();
+ getCheckInOutService().checkin( target.getNode(), versionProperties, null, false);
// Check if there are any file/directory change notify requests active
@@ -183,4 +198,23 @@ public class CheckInOutDesktopAction extends DesktopAction {
return response;
}
+
+ /**
+ * Get the check in/out service
+ *
+ * @return CheckOutCheckInService
+ */
+ protected final CheckOutCheckInService getCheckInOutService()
+ {
+ // Check if the service has been cached
+
+ if ( m_checkInOutService == null)
+ {
+ m_checkInOutService = getServiceRegistry().getCheckOutCheckInService();
+ }
+
+ // Return the check in/out service
+
+ return m_checkInOutService;
+ }
}
diff --git a/source/java/org/alfresco/filesys/smb/server/repo/desk/JavaScriptDesktopAction.java b/source/java/org/alfresco/filesys/smb/server/repo/desk/JavaScriptDesktopAction.java
new file mode 100644
index 0000000000..303ff36272
--- /dev/null
+++ b/source/java/org/alfresco/filesys/smb/server/repo/desk/JavaScriptDesktopAction.java
@@ -0,0 +1,448 @@
+/*
+ * Copyright (C) 2005 Alfresco, Inc.
+ *
+ * Licensed under the Mozilla Public License version 1.1
+ * with a permitted attribution clause. You may obtain a
+ * copy of the License at
+ *
+ * http://www.alfresco.org/legal/license.txt
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific
+ * language governing permissions and limitations under the
+ * License.
+ */
+
+package org.alfresco.filesys.smb.server.repo.desk;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.net.URL;
+import java.net.URLDecoder;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.StringTokenizer;
+
+import org.alfresco.config.ConfigElement;
+import org.alfresco.filesys.server.filesys.DiskSharedDevice;
+import org.alfresco.filesys.smb.server.repo.DesktopAction;
+import org.alfresco.filesys.smb.server.repo.DesktopActionException;
+import org.alfresco.filesys.smb.server.repo.DesktopParams;
+import org.alfresco.filesys.smb.server.repo.DesktopResponse;
+import org.alfresco.service.cmr.repository.ScriptException;
+import org.alfresco.service.cmr.repository.ScriptService;
+
+/**
+ * Javascript Desktop Action Class
+ *
+ * Run a server-side script against the target node(s).
+ *
+ * @author gkspencer
+ */
+public class JavaScriptDesktopAction extends DesktopAction {
+
+ // Script service
+
+ private ScriptService m_scriptService;
+
+ // Script name
+
+ private String m_scriptName;
+
+ // Script file details
+
+ private String m_scriptPath;
+ private long m_lastModified;
+
+ // Script string
+
+ private String m_script;
+
+ /**
+ * Class constructor
+ */
+ public JavaScriptDesktopAction()
+ {
+ super( DesktopAction.AttrAnyFiles, DesktopAction.PreConfirmAction + DesktopAction.PreCopyToTarget);
+ }
+
+ /**
+ * Return the confirmation string to be displayed by the client
+ *
+ * @return String
+ */
+ @Override
+ public String getConfirmationString()
+ {
+ return "Run Javascript action";
+ }
+
+ /**
+ * Initialize the action
+ *
+ * @param global ConfigElement
+ * @param config ConfigElement
+ * @param fileSys DiskSharedDevice
+ * @exception DesktopActionException
+ */
+ @Override
+ public void initializeAction(ConfigElement global, ConfigElement config, DiskSharedDevice fileSys)
+ throws DesktopActionException
+ {
+
+ // Perform standard initialization
+
+ super.initializeAction(global, config, fileSys);
+
+ // Get the script file name and check that it exists
+
+ ConfigElement elem = config.getChild("script");
+ if ( elem != null && elem.getValue().length() > 0)
+ {
+ // Set the script name
+
+ setScriptName(elem.getValue());
+
+ // Check if the script exists on the classpath
+
+ URL scriptURL = this.getClass().getClassLoader().getResource(getScriptName());
+ if ( scriptURL == null)
+ throw new DesktopActionException("Failed to find script on classpath, " + getScriptName());
+
+ // Decode the URL path, it might contain escaped characters
+
+ String scriptURLPath = null;
+ try
+ {
+ scriptURLPath = URLDecoder.decode( scriptURL.getFile(), "UTF-8");
+ }
+ catch ( UnsupportedEncodingException ex)
+ {
+ throw new DesktopActionException("Failed to decode script path, " + ex.getMessage());
+ }
+
+ // Check that the script file exists
+
+ File scriptFile = new File(scriptURLPath);
+ if ( scriptFile.exists() == false)
+ throw new DesktopActionException("Script file not found, " + elem.getValue());
+
+ m_scriptPath = scriptFile.getAbsolutePath();
+ m_lastModified =scriptFile.lastModified();
+
+ // Load the script
+
+ try
+ {
+ loadScript( scriptFile);
+ }
+ catch ( IOException ex)
+ {
+ throw new DesktopActionException( "Failed to load script, " + ex.getMessage());
+ }
+ }
+ else
+ throw new DesktopActionException("Script name not specified");
+
+ // check if the desktop action attributes have been specified
+
+ elem = config.getChild("attributes");
+ if ( elem != null)
+ {
+ // Check if the attribute string is empty
+
+ if ( elem.getValue().length() == 0)
+ throw new DesktopActionException("Empty desktop action attributes");
+
+ // Parse the attribute string
+
+ int attr = 0;
+ StringTokenizer tokens = new StringTokenizer( elem.getValue(), ",");
+
+ while ( tokens.hasMoreTokens())
+ {
+ // Get the current attribute token and validate
+
+ String token = tokens.nextToken().trim();
+
+ if ( token.equalsIgnoreCase( "targetFiles"))
+ attr |= AttrTargetFiles;
+ else if ( token.equalsIgnoreCase( "targetFolders"))
+ attr |= AttrTargetFolders;
+ else if ( token.equalsIgnoreCase( "clientFiles"))
+ attr |= AttrClientFiles;
+ else if ( token.equalsIgnoreCase( "clientFolders"))
+ attr |= AttrClientFolders;
+ else if ( token.equalsIgnoreCase( "alfrescoFiles"))
+ attr |= AttrAlfrescoFiles;
+ else if ( token.equalsIgnoreCase( "alfrescoFolders"))
+ attr |= AttrAlfrescoFolders;
+ else if ( token.equalsIgnoreCase( "multiplePaths"))
+ attr |= AttrMultiplePaths;
+ else if ( token.equalsIgnoreCase( "allowNoParams"))
+ attr |= AttrAllowNoParams;
+ else if ( token.equalsIgnoreCase( "anyFiles"))
+ attr |= AttrAnyFiles;
+ else if ( token.equalsIgnoreCase( "anyFolders"))
+ attr |= AttrAnyFolders;
+ else if ( token.equalsIgnoreCase( "anyFilesFolders"))
+ attr |= AttrAnyFilesFolders;
+ else
+ throw new DesktopActionException("Unknown attribute, " + token);
+ }
+
+ // Set the action attributes
+
+ setAttributes( attr);
+ }
+
+ // Check if the desktop action pre-processing options have been specified
+
+ elem = config.getChild("preprocess");
+ if ( elem != null)
+ {
+ // Check if the pre-process string is empty
+
+ if ( elem.getValue().length() == 0)
+ throw new DesktopActionException("Empty desktop action pre-processing flags");
+
+ // Parse the pre-process string
+
+ int pre = 0;
+ StringTokenizer tokens = new StringTokenizer( elem.getValue(), ",");
+
+ while ( tokens.hasMoreTokens())
+ {
+ // Get the current pre-process token and validate
+
+ String token = tokens.nextToken().trim();
+
+ if ( token.equalsIgnoreCase( "copyToTarget"))
+ pre |= PreCopyToTarget;
+ else if ( token.equalsIgnoreCase( "confirm"))
+ pre |= PreConfirmAction;
+ else if ( token.equalsIgnoreCase( "localToWorkingCopy"))
+ pre |= PreLocalToWorkingCopy;
+ else
+ throw new DesktopActionException("Unknown pre-processing flag, " + token);
+ }
+
+ // Set the action pre-processing flags
+
+ setPreProcessActions( pre);
+ }
+ }
+
+ /**
+ * Run the desktop action
+ *
+ * @param params DesktopParams
+ * @return DesktopResponse
+ */
+ @Override
+ public DesktopResponse runAction(DesktopParams params)
+ throws DesktopActionException
+ {
+ // Check if the script file has been changed
+
+ DesktopResponse response = new DesktopResponse(StsSuccess);
+
+ File scriptFile = new File(m_scriptPath);
+ if ( scriptFile.lastModified() != m_lastModified)
+ {
+ // Reload the script
+
+ m_lastModified = scriptFile.lastModified();
+
+ try
+ {
+ loadScript( scriptFile);
+ }
+ catch ( IOException ex)
+ {
+ response.setStatus(StsError, "Failed to reload script file, " + getScriptName());
+ return response;
+ }
+ }
+
+ // Start a transaction
+
+ params.getSession().beginTransaction(getTransactionService(), false);
+
+ // Access the script service
+
+ if ( getScriptService() != null)
+ {
+ // Create the objects to be passed to the script
+
+ Map model = new HashMap();
+ model.put("deskParams", params);
+ model.put("out", System.out);
+
+ // Start a transaction
+
+ params.getSession().beginTransaction(getTransactionService(), false);
+
+ // Run the script
+
+ Object result = null;
+
+ try
+ {
+ // Run the script
+
+ result = getScriptService().executeScriptString( getScript(), model);
+
+ // Check the result
+
+ if ( result != null)
+ {
+ // Check for a full response object
+
+ if ( result instanceof DesktopResponse)
+ {
+ response = (DesktopResponse) result;
+ }
+
+ // Status code only response
+
+ else if ( result instanceof Double)
+ {
+ Double jsSts = (Double) result;
+ response.setStatus( jsSts.intValue(), "");
+ }
+
+ // Encoded response in the format ','
+
+ else if ( result instanceof String)
+ {
+ String responseMsg = (String) result;
+
+ // Parse the status message
+
+ StringTokenizer token = new StringTokenizer( responseMsg, ",");
+ String stsToken = token.nextToken();
+ String msgToken = token.nextToken();
+
+ int sts = -1;
+ try
+ {
+ sts = Integer.parseInt( stsToken);
+ }
+ catch ( NumberFormatException ex)
+ {
+ response.setStatus( StsError, "Bad response from script");
+ }
+
+ // Set the response
+
+ response.setStatus( sts, msgToken != null ? msgToken : "");
+ }
+ }
+ }
+ catch (ScriptException ex)
+ {
+ // Set the error response for the client
+
+ response.setStatus( StsError, ex.getMessage());
+ }
+ }
+ else
+ {
+ // Return an error response, script service not available
+
+ response.setStatus( StsError, "Script service not available");
+ }
+
+ // Return the response
+
+ return response;
+ }
+
+ /**
+ * Get the script service
+ *
+ * @return ScriptService
+ */
+ protected final ScriptService getScriptService()
+ {
+ // Check if the script service has been initialized
+
+ if ( m_scriptService == null)
+ {
+ // Get the script service
+
+ m_scriptService = getServiceRegistry().getScriptService();
+ }
+
+ // Return the script service
+
+ return m_scriptService;
+ }
+
+ /**
+ * Get the script name
+ *
+ * @return String
+ */
+ public final String getScriptName()
+ {
+ return m_scriptName;
+ }
+
+ /**
+ * Return the script data
+ *
+ * @return String
+ */
+ public final String getScript()
+ {
+ return m_script;
+ }
+
+ /**
+ * Set the script name
+ *
+ * @param name String
+ */
+ protected final void setScriptName(String name)
+ {
+ m_scriptName = name;
+ }
+
+ /**
+ * Load, or reload, the script
+ *
+ * @param scriptFile File
+ */
+ private final void loadScript(File scriptFile)
+ throws IOException
+ {
+ // Open the script file
+
+ BufferedReader scriptIn = new BufferedReader(new FileReader( scriptFile));
+ StringBuilder scriptStr = new StringBuilder((int) scriptFile.length() + 256);
+
+ String inRec = scriptIn.readLine();
+
+ while ( inRec != null)
+ {
+ scriptStr.append( inRec);
+ scriptStr.append( "\n");
+ inRec = scriptIn.readLine();
+ }
+
+ // Close the script file
+
+ scriptIn.close();
+
+ // Update the script string
+
+ m_script = scriptStr.toString();
+ }
+}
diff --git a/source/java/org/alfresco/filesys/smb/server/repo/pseudo/ContentPseudoFileImpl.java b/source/java/org/alfresco/filesys/smb/server/repo/pseudo/ContentPseudoFileImpl.java
index 24f8e08896..ab38534a88 100644
--- a/source/java/org/alfresco/filesys/smb/server/repo/pseudo/ContentPseudoFileImpl.java
+++ b/source/java/org/alfresco/filesys/smb/server/repo/pseudo/ContentPseudoFileImpl.java
@@ -68,6 +68,27 @@ public class ContentPseudoFileImpl implements PseudoFileInterface
if ( pfile != null)
isPseudo = true;
}
+ else
+ {
+ // Check if the file name matches a pseudo-file name in the desktop actions list
+
+ if ( ctx.hasDesktopActions())
+ {
+ DesktopActionTable actions = ctx.getDesktopActions();
+ if ( actions.getActionViaPseudoName( paths[1]) != null)
+ isPseudo = true;
+ }
+
+ // Check if the URL file is enabled
+
+ if ( isPseudo == false && ctx.hasURLFile())
+ {
+ // Check if it is the URL file name
+
+ if ( ctx.getURLFileName().equals( paths[1]))
+ isPseudo = true;
+ }
+ }
// Return the pseudo file status
diff --git a/source/java/org/alfresco/filesys/util/DataBuffer.java b/source/java/org/alfresco/filesys/util/DataBuffer.java
index 8f1b8b5963..d016ea4830 100644
--- a/source/java/org/alfresco/filesys/util/DataBuffer.java
+++ b/source/java/org/alfresco/filesys/util/DataBuffer.java
@@ -570,7 +570,9 @@ public class DataBuffer
// Check if there is enough space in the buffer
int bytLen = str.length() * 2;
- if (m_data.length - m_pos < bytLen)
+ if ( nulTerm)
+ bytLen += 2;
+ if ((m_data.length - m_pos) < (bytLen + 4))
extendBuffer(bytLen + 4);
// Word align the buffer position, pack the Unicode string
diff --git a/source/java/org/alfresco/jcr/repository/RepositoryImpl.java b/source/java/org/alfresco/jcr/repository/RepositoryImpl.java
index 5b09ad4a9b..9777322502 100644
--- a/source/java/org/alfresco/jcr/repository/RepositoryImpl.java
+++ b/source/java/org/alfresco/jcr/repository/RepositoryImpl.java
@@ -201,7 +201,6 @@ public class RepositoryImpl implements Repository
{
// construct the session
SessionImpl sessionImpl = new SessionImpl(this);
- registerSession(sessionImpl);
// authenticate user
AuthenticationService authenticationService = getServiceRegistry().getAuthenticationService();
@@ -211,7 +210,6 @@ public class RepositoryImpl implements Repository
}
catch(AuthenticationException e)
{
- deregisterSession();
throw new LoginException("Alfresco Repository failed to authenticate credentials", e);
}
@@ -222,11 +220,11 @@ public class RepositoryImpl implements Repository
// session is now ready
Session session = sessionImpl.getProxy();
+ registerSession(sessionImpl);
return session;
}
catch(AlfrescoRuntimeException e)
{
- deregisterSession();
throw new RepositoryException(e);
}
}
diff --git a/source/java/org/alfresco/model/ContentModel.java b/source/java/org/alfresco/model/ContentModel.java
index c3cd5e5397..7873190322 100644
--- a/source/java/org/alfresco/model/ContentModel.java
+++ b/source/java/org/alfresco/model/ContentModel.java
@@ -58,16 +58,16 @@ public interface ContentModel
// referenceable aspect constants
static final QName TYPE_REFERENCE = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "reference");
static final QName PROP_REFERENCE = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "reference");
-
+
// container type constants
static final QName TYPE_CONTAINER = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "container");
/** child association type supported by {@link #TYPE_CONTAINER} */
static final QName ASSOC_CHILDREN =QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "children");
-
+
// roots
static final QName ASPECT_ROOT = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "aspect_root");
static final QName TYPE_STOREROOT = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "store_root");
-
+
// descriptor properties
static final QName PROP_SYS_VERSION_MAJOR = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "versionMajor");
static final QName PROP_SYS_VERSION_MINOR = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "versionMinor");
@@ -76,7 +76,7 @@ public interface ContentModel
static final QName PROP_SYS_VERSION_BUILD = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "versionBuild");
static final QName PROP_SYS_VERSION_SCHEMA = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "versionSchema");
static final QName PROP_SYS_VERSION_EDITION = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "versionEdition");
-
+
//
// Content Model Definitions
@@ -89,7 +89,7 @@ public interface ContentModel
// copy aspect constants
static final QName ASPECT_COPIEDFROM = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "copiedfrom");
static final QName PROP_COPY_REFERENCE = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "source");
-
+
// working copy aspect contants
static final QName ASPECT_WORKING_COPY = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "workingcopy");
static final QName PROP_WORKING_COPY_OWNER = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "workingCopyOwner");
@@ -124,19 +124,19 @@ public interface ContentModel
static final QName PROP_CATEGORIES = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "categories");
static final QName ASSOC_CATEGORIES = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "categories");
static final QName ASSOC_SUBCATEGORIES = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "subcategories");
-
+
// lock aspect
public final static QName ASPECT_LOCKABLE = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "lockable");
public final static QName PROP_LOCK_OWNER = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "lockOwner");
public final static QName PROP_LOCK_TYPE = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "lockType");
public final static QName PROP_EXPIRY_DATE = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "expiryDate");
-
+
// version aspect
static final QName ASPECT_VERSIONABLE = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "versionable");
static final QName PROP_VERSION_LABEL = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "versionLabel");
static final QName PROP_INITIAL_VERSION = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "initialVersion");
static final QName PROP_AUTO_VERSION = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "autoVersion");
-
+
// folders
static final QName TYPE_SYSTEM_FOLDER = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "systemfolder");
static final QName TYPE_FOLDER = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "folder");
@@ -151,6 +151,10 @@ public interface ContentModel
static final QName PROP_LASTNAME = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "lastName");
static final QName PROP_EMAIL = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "email");
static final QName PROP_ORGID = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "organizationId");
+ static final QName PROP_HOME_FOLDER_PROVIDER = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "homeFolderProvider");
+ static final QName PROP_DEFAULT_HOME_FOLDER_PATH = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "defaultHomeFolderPath");
+
+
// Ownable aspect
static final QName ASPECT_OWNABLE = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "ownable");
@@ -190,7 +194,7 @@ public interface ContentModel
public static final QName PROP_HITS = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "hits");
public static final QName PROP_COUNTER = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "counter");
- // referencesnode aspect
+ // References Node Aspect.
public static final QName ASPECT_REFERENCES_NODE = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "referencesnode");
public static final QName PROP_NODE_REF = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "noderef");
@@ -206,15 +210,15 @@ public interface ContentModel
static final QName PROP_REJECT_STEP = QName.createQName(NamespaceService.APP_MODEL_1_0_URI, "rejectStep");
static final QName PROP_REJECT_FOLDER = QName.createQName(NamespaceService.APP_MODEL_1_0_URI, "rejectFolder");
static final QName PROP_REJECT_MOVE = QName.createQName(NamespaceService.APP_MODEL_1_0_URI, "rejectMove");
-
+
// ui facets aspect
static final QName ASPECT_UIFACETS = QName.createQName(NamespaceService.APP_MODEL_1_0_URI, "uifacets");
static final QName PROP_ICON = QName.createQName(NamespaceService.APP_MODEL_1_0_URI, "icon");
-
+
// inlineeditable aspect
static final QName ASPECT_INLINEEDITABLE = QName.createQName(NamespaceService.APP_MODEL_1_0_URI, "inlineeditable");
static final QName PROP_EDITINLINE = QName.createQName(NamespaceService.APP_MODEL_1_0_URI, "editInline");
-
+
// configurable aspect
static final QName ASPECT_CONFIGURABLE = QName.createQName(NamespaceService.APP_MODEL_1_0_URI, "configurable");
static final QName TYPE_CONFIGURATIONS = QName.createQName(NamespaceService.APP_MODEL_1_0_URI, "configurations");
@@ -254,7 +258,7 @@ public interface ContentModel
static final QName PROP_CREDENTIALS_EXPIRY_DATE = QName.createQName(USER_MODEL_URI, "credentialsExpiryDate");
static final QName PROP_ACCOUNT_LOCKED = QName.createQName(USER_MODEL_URI, "accountLocked");
static final QName PROP_SALT = QName.createQName(USER_MODEL_URI, "salt");
-
+
static final QName TYPE_AUTHORITY = QName.createQName(USER_MODEL_URI, "authority");
static final QName TYPE_AUTHORITY_CONTAINER = QName.createQName(USER_MODEL_URI, "authorityContainer");
diff --git a/source/java/org/alfresco/repo/action/executer/MailActionExecuter.java b/source/java/org/alfresco/repo/action/executer/MailActionExecuter.java
index c256f07916..61a592c240 100644
--- a/source/java/org/alfresco/repo/action/executer/MailActionExecuter.java
+++ b/source/java/org/alfresco/repo/action/executer/MailActionExecuter.java
@@ -109,6 +109,11 @@ public class MailActionExecuter extends ActionExecuterAbstractBase
*/
private ServiceRegistry serviceRegistry;
+ /**
+ * Mail header encoding scheme
+ */
+ private String headerEncoding = null;
+
/**
* @param javaMailSender the java mail sender
*/
@@ -164,6 +169,14 @@ public class MailActionExecuter extends ActionExecuterAbstractBase
{
this.nodeService = nodeService;
}
+
+ /**
+ * @param headerEncoding The mail header encoding to set.
+ */
+ public void setHeaderEncoding(String headerEncoding)
+ {
+ this.headerEncoding = headerEncoding;
+ }
/**
* Execute the rule action
@@ -180,6 +193,12 @@ public class MailActionExecuter extends ActionExecuterAbstractBase
{
MimeMessageHelper message = new MimeMessageHelper(mimeMessage);
+ // set header encoding if one has been supplied
+ if (headerEncoding != null && headerEncoding.length() != 0)
+ {
+ mimeMessage.setHeader("Content-Transfer-Encoding", headerEncoding);
+ }
+
// set recipient
String to = (String)ruleAction.getParameterValue(PARAM_TO);
if (to != null && to.length() != 0)
diff --git a/source/java/org/alfresco/repo/action/executer/ScriptActionExecuter.java b/source/java/org/alfresco/repo/action/executer/ScriptActionExecuter.java
index 2a49156dee..1505941515 100644
--- a/source/java/org/alfresco/repo/action/executer/ScriptActionExecuter.java
+++ b/source/java/org/alfresco/repo/action/executer/ScriptActionExecuter.java
@@ -84,6 +84,10 @@ public class ScriptActionExecuter extends ActionExecuterAbstractBase
{
NodeRef scriptRef = (NodeRef)action.getParameterValue(PARAM_SCRIPTREF);
NodeRef spaceRef = this.serviceRegistry.getRuleService().getOwningNodeRef(action);
+ if (spaceRef == null)
+ {
+ spaceRef = nodeService.getPrimaryParent(actionedUponNodeRef).getParentRef();
+ }
if (nodeService.exists(scriptRef))
{
diff --git a/source/java/org/alfresco/repo/action/executer/TransformActionExecuter.java b/source/java/org/alfresco/repo/action/executer/TransformActionExecuter.java
index 89c040ca15..28fafbcc91 100644
--- a/source/java/org/alfresco/repo/action/executer/TransformActionExecuter.java
+++ b/source/java/org/alfresco/repo/action/executer/TransformActionExecuter.java
@@ -240,7 +240,10 @@ public class TransformActionExecuter extends ActionExecuterAbstractBase
contentWriter.setMimetype(mimeType); // new mimetype
contentWriter.setEncoding(contentReader.getEncoding()); // original encoding
- // Try and transform the content
+ // Try and transform the content - failures are caught and allowed to fail silently.
+ // This is unique to this action, and is essentially a broken pattern.
+ // Clients should rather get the exception and then decide to replay with rules/actions turned off or not.
+ // TODO: Check failure patterns for actions.
try
{
doTransform(ruleAction, contentReader, contentWriter);
@@ -258,8 +261,16 @@ public class TransformActionExecuter extends ActionExecuterAbstractBase
}
}
+ /**
+ * Executed in a new transaction so that failures don't cause the entire transaction to rollback.
+ */
protected void doTransform(Action ruleAction, ContentReader contentReader, ContentWriter contentWriter)
{
+ // try to pre-empt the lack of a transformer
+ if (!this.contentService.isTransformable(contentReader, contentWriter))
+ {
+ throw new NoTransformerException(contentReader.getMimetype(), contentWriter.getMimetype());
+ }
this.contentService.transform(contentReader, contentWriter);
}
diff --git a/source/java/org/alfresco/repo/audit/AuditComponentImpl.java b/source/java/org/alfresco/repo/audit/AuditComponentImpl.java
index 6ee47e0c6d..240bebf881 100644
--- a/source/java/org/alfresco/repo/audit/AuditComponentImpl.java
+++ b/source/java/org/alfresco/repo/audit/AuditComponentImpl.java
@@ -23,6 +23,7 @@ import java.net.UnknownHostException;
import java.util.Date;
import java.util.List;
+import org.alfresco.repo.audit.model.TrueFalseUnset;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.service.Auditable;
@@ -123,13 +124,26 @@ public class AuditComponentImpl implements AuditComponent
{
if ((auditFlag.get() == null) || (!auditFlag.get().booleanValue()))
{
+ boolean auditInternal = (auditModel.getAuditInternalServiceMethods(mi) == TrueFalseUnset.TRUE);
try
{
- auditFlag.set(Boolean.TRUE);
-
Method method = mi.getMethod();
String methodName = method.getName();
String serviceName = publicServiceIdentifier.getPublicServiceName(mi);
+
+ if (!auditInternal)
+ {
+ auditFlag.set(Boolean.TRUE);
+ }
+ else
+ {
+ if (s_logger.isDebugEnabled())
+ {
+ s_logger.debug("Auditing internal service use for - " + serviceName + "." + methodName);
+ }
+ }
+
+
if (method.isAnnotationPresent(Auditable.class))
{
@@ -170,7 +184,10 @@ public class AuditComponentImpl implements AuditComponent
}
finally
{
- auditFlag.set(Boolean.FALSE);
+ if (!auditInternal)
+ {
+ auditFlag.set(Boolean.FALSE);
+ }
}
}
else
@@ -272,7 +289,7 @@ public class AuditComponentImpl implements AuditComponent
}
else if (returnObject instanceof StoreRef)
{
- auditInfo.setKeyStore((StoreRef)returnObject);
+ auditInfo.setKeyStore((StoreRef) returnObject);
}
}
}
diff --git a/source/java/org/alfresco/repo/audit/AuditableAspect.java b/source/java/org/alfresco/repo/audit/AuditableAspect.java
index 19b63ab727..88effcedca 100644
--- a/source/java/org/alfresco/repo/audit/AuditableAspect.java
+++ b/source/java/org/alfresco/repo/audit/AuditableAspect.java
@@ -35,6 +35,7 @@ import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.security.AuthenticationService;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
+import org.alfresco.util.PropertyMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -126,7 +127,7 @@ public class AuditableAspect
public void onAddAudit(NodeRef nodeRef, QName aspect)
{
// Get the current properties
- Map properties = this.nodeService.getProperties(nodeRef);
+ PropertyMap properties = new PropertyMap();
// Set created / updated date
Date now = new Date(System.currentTimeMillis());
@@ -167,7 +168,7 @@ public class AuditableAspect
// Get the current properties
try
{
- Map properties = this.nodeService.getProperties(nodeRef);
+ PropertyMap properties = new PropertyMap();
// Set updated date
Date now = new Date(System.currentTimeMillis());
@@ -249,7 +250,11 @@ public class AuditableAspect
*/
public Boolean doWork() throws Exception
{
- nodeService.setProperties(nodeRef, properties);
+ for (QName propertyQName : properties.keySet())
+ {
+ Serializable property = properties.get(propertyQName);
+ nodeService.setProperty(nodeRef, propertyQName, property);
+ }
return Boolean.TRUE;
}
}
diff --git a/source/java/org/alfresco/repo/audit/MethodAuditModel.java b/source/java/org/alfresco/repo/audit/MethodAuditModel.java
index 37b1200173..62791ca5e6 100644
--- a/source/java/org/alfresco/repo/audit/MethodAuditModel.java
+++ b/source/java/org/alfresco/repo/audit/MethodAuditModel.java
@@ -16,6 +16,7 @@
*/
package org.alfresco.repo.audit;
+import org.alfresco.repo.audit.model.TrueFalseUnset;
import org.aopalliance.intercept.MethodInvocation;
public interface MethodAuditModel
@@ -54,4 +55,11 @@ public interface MethodAuditModel
* @return
*/
public RecordOptions getAuditRecordOptions(MethodInvocation mi);
+
+ /**
+ * Should internal service class be logged.
+ *
+ * @return
+ */
+ public TrueFalseUnset getAuditInternalServiceMethods(MethodInvocation mi);
}
diff --git a/source/java/org/alfresco/repo/audit/hibernate/Audit.hbm.xml b/source/java/org/alfresco/repo/audit/hibernate/Audit.hbm.xml
index 9e40c7a787..843a9637b7 100644
--- a/source/java/org/alfresco/repo/audit/hibernate/Audit.hbm.xml
+++ b/source/java/org/alfresco/repo/audit/hibernate/Audit.hbm.xml
@@ -42,10 +42,10 @@
-
+
-
+
@@ -67,7 +67,7 @@
-
+
@@ -135,24 +135,34 @@
select
- audit_store
+ audit_store_byid
+ from
+ org.alfresco.repo.audit.hibernate.AuditSourceImpl as audit_store_byid
+ where
+ audit_store_byid =
+ (select max(audit_store.id)
from
org.alfresco.repo.audit.hibernate.AuditSourceImpl as audit_store
where
audit_store.application = :application and
audit_store.service is null and
- audit_store.method is null
+ audit_store.method is null)
select
- audit_store
+ audit_store_byid
+ from
+ org.alfresco.repo.audit.hibernate.AuditSourceImpl as audit_store_byid
+ where
+ audit_store_byid =
+ (select max(audit_store.id)
from
org.alfresco.repo.audit.hibernate.AuditSourceImpl as audit_store
where
audit_store.application = :application and
audit_store.service = :service and
- audit_store.method = :method
+ audit_store.method = :method)
diff --git a/source/java/org/alfresco/repo/audit/model/AbstractAuditEntry.java b/source/java/org/alfresco/repo/audit/model/AbstractAuditEntry.java
index c99fd0bb7c..d747a482d3 100644
--- a/source/java/org/alfresco/repo/audit/model/AbstractAuditEntry.java
+++ b/source/java/org/alfresco/repo/audit/model/AbstractAuditEntry.java
@@ -136,6 +136,49 @@ public abstract class AbstractAuditEntry
{
return recordOptions;
}
+
+
+ protected TrueFalseUnset getEffectiveAuditInternal()
+ {
+ TrueFalseUnset auditInternal;
+ if (checkEnabled() == TrueFalseUnset.TRUE)
+ {
+ auditInternal = getAuditInternalOrParentAuditInternal();
+ }
+ else
+ {
+ auditInternal = TrueFalseUnset.FALSE;
+ }
+ if(s_logger.isDebugEnabled())
+ {
+ s_logger.debug("... Effective audit internal is = "+auditInternal);
+ }
+ return auditInternal;
+ }
+
+ private TrueFalseUnset getAuditInternalOrParentAuditInternal()
+ {
+ TrueFalseUnset auditInternal = getAuditInternal();
+ if(s_logger.isDebugEnabled())
+ {
+ s_logger.debug("... ... audit internal is = "+auditInternal);
+ }
+ if (auditInternal == TrueFalseUnset.UNSET)
+ {
+ if (getParent() == null)
+ {
+ return TrueFalseUnset.UNSET;
+ }
+ else
+ {
+ return getParent().getAuditInternalOrParentAuditInternal();
+ }
+ }
+ else
+ {
+ return auditInternal;
+ }
+ }
protected AuditMode getEffectiveAuditMode()
{
diff --git a/source/java/org/alfresco/repo/audit/model/AuditEntry.java b/source/java/org/alfresco/repo/audit/model/AuditEntry.java
index 68678d200c..613da6578d 100644
--- a/source/java/org/alfresco/repo/audit/model/AuditEntry.java
+++ b/source/java/org/alfresco/repo/audit/model/AuditEntry.java
@@ -217,4 +217,23 @@ public class AuditEntry extends AbstractAuditEntry implements InitializingBean,
throw new UnsupportedOperationException();
}
+ public TrueFalseUnset getAuditInternalServiceMethods( MethodInvocation mi)
+ {
+ String serviceName = getPublicServiceIdentifier().getPublicServiceName(mi);
+ ServiceAuditEntry service = services.get(serviceName);
+ if(service != null)
+ {
+ return service.getAuditInternalServiceMethods( mi);
+ }
+ else
+ {
+ if(s_logger.isDebugEnabled())
+ {
+ s_logger.debug("No specific audit entry for service "+serviceName);
+ }
+ return getEffectiveAuditInternal();
+
+ }
+ }
+
}
diff --git a/source/java/org/alfresco/repo/audit/model/MethodAuditEntry.java b/source/java/org/alfresco/repo/audit/model/MethodAuditEntry.java
index a59d6b7a72..c38d36e5db 100644
--- a/source/java/org/alfresco/repo/audit/model/MethodAuditEntry.java
+++ b/source/java/org/alfresco/repo/audit/model/MethodAuditEntry.java
@@ -56,4 +56,13 @@ public class MethodAuditEntry extends AbstractNamedAuditEntry implements MethodA
throw new UnsupportedOperationException();
}
+ public TrueFalseUnset getAuditInternalServiceMethods(MethodInvocation mi)
+ {
+ if(s_logger.isDebugEnabled())
+ {
+ s_logger.debug("Evaluating if method is internally audited ..."+((ServiceAuditEntry)getParent()).getName()+"."+getName());
+ }
+ return getEffectiveAuditInternal();
+ }
+
}
diff --git a/source/java/org/alfresco/repo/audit/model/ServiceAuditEntry.java b/source/java/org/alfresco/repo/audit/model/ServiceAuditEntry.java
index 764f004044..e4458bb5ff 100644
--- a/source/java/org/alfresco/repo/audit/model/ServiceAuditEntry.java
+++ b/source/java/org/alfresco/repo/audit/model/ServiceAuditEntry.java
@@ -97,4 +97,22 @@ public class ServiceAuditEntry extends AbstractNamedAuditEntry implements Method
throw new UnsupportedOperationException();
}
+ public TrueFalseUnset getAuditInternalServiceMethods(MethodInvocation mi)
+ {
+ String methodName = mi.getMethod().getName();
+ MethodAuditEntry method = methods.get(methodName);
+ if (method != null)
+ {
+ return method.getAuditInternalServiceMethods(mi);
+ }
+ else
+ {
+ if(s_logger.isDebugEnabled())
+ {
+ s_logger.debug("Evaluating if service is internally audited (no specific setting) for "+getName()+"."+methodName);
+ }
+ return getEffectiveAuditInternal();
+ }
+ }
+
}
diff --git a/source/java/org/alfresco/repo/cache/InternalEhCacheManagerFactoryBean.java b/source/java/org/alfresco/repo/cache/InternalEhCacheManagerFactoryBean.java
index 3830803457..cf87c94d1f 100644
--- a/source/java/org/alfresco/repo/cache/InternalEhCacheManagerFactoryBean.java
+++ b/source/java/org/alfresco/repo/cache/InternalEhCacheManagerFactoryBean.java
@@ -44,7 +44,7 @@ import org.springframework.util.ResourceUtils;
* For Alfresco purposes, there are two files that are looked for:
*
* - classpath:alfresco/extension/ehcache-custom.xml, which will take precedence
- * - classpath:alfresco/ehcache.xml, which is the default shipped with Alfresco
+ * - classpath:alfresco/ehcache-default.xml, which is the default shipped with Alfresco
*
*
* The EHCache static singleton instance is used but ensuring that all access to the
diff --git a/source/java/org/alfresco/repo/content/AbstractContentStore.java b/source/java/org/alfresco/repo/content/AbstractContentStore.java
index 76ff24e12b..db7336a57c 100644
--- a/source/java/org/alfresco/repo/content/AbstractContentStore.java
+++ b/source/java/org/alfresco/repo/content/AbstractContentStore.java
@@ -109,7 +109,7 @@ public abstract class AbstractContentStore implements ContentStore
// extract the relative part of the URL
String path = contentUrl.substring(index);
// more extensive checks can be added in, but it seems overkill
- if (path.length() < 10)
+ if (path.length() < 8)
{
throw new AlfrescoRuntimeException(
"The content URL is invalid: \n" +
diff --git a/source/java/org/alfresco/repo/content/MimetypeMap.java b/source/java/org/alfresco/repo/content/MimetypeMap.java
index 272c1cb89d..473dea5253 100644
--- a/source/java/org/alfresco/repo/content/MimetypeMap.java
+++ b/source/java/org/alfresco/repo/content/MimetypeMap.java
@@ -56,6 +56,7 @@ public class MimetypeMap implements MimetypeService
public static final String MIMETYPE_IMAGE_GIF = "image/gif";
public static final String MIMETYPE_IMAGE_JPEG = "image/jpeg";
public static final String MIMETYPE_IMAGE_RGB = "image/x-rgb";
+ public static final String MIMETYPE_IMAGE_SVG = "image/svg";
public static final String MIMETYPE_JAVASCRIPT = "application/x-javascript";
public static final String MIMETYPE_ZIP = "application/zip";
// Open Document
diff --git a/source/java/org/alfresco/repo/content/RoutingContentService.java b/source/java/org/alfresco/repo/content/RoutingContentService.java
index 61f9eefd2c..3821025eca 100644
--- a/source/java/org/alfresco/repo/content/RoutingContentService.java
+++ b/source/java/org/alfresco/repo/content/RoutingContentService.java
@@ -323,7 +323,6 @@ public class RoutingContentService implements ContentService
public ContentWriter getWriter(NodeRef nodeRef, QName propertyQName, boolean update)
{
-
// check for an existing URL - the get of the reader will perform type checking
ContentReader existingContentReader = getReader(nodeRef, propertyQName, false);
diff --git a/source/java/org/alfresco/repo/content/cleanup/ContentStoreCleaner.java b/source/java/org/alfresco/repo/content/cleanup/ContentStoreCleaner.java
index 91ec5e9e57..33ebf84479 100644
--- a/source/java/org/alfresco/repo/content/cleanup/ContentStoreCleaner.java
+++ b/source/java/org/alfresco/repo/content/cleanup/ContentStoreCleaner.java
@@ -1,262 +1,249 @@
-/*
- * Copyright (C) 2005 Alfresco, Inc.
- *
- * Licensed under the Mozilla Public License version 1.1
- * with a permitted attribution clause. You may obtain a
- * copy of the License at
- *
- * http://www.alfresco.org/legal/license.txt
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific
- * language governing permissions and limitations under the
- * License.
- */
-package org.alfresco.repo.content.cleanup;
-
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.alfresco.error.AlfrescoRuntimeException;
-import org.alfresco.repo.avm.AVMNodeDAO;
-import org.alfresco.repo.content.ContentStore;
-import org.alfresco.repo.node.db.NodeDaoService;
-import org.alfresco.repo.transaction.TransactionUtil;
-import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
-import org.alfresco.service.cmr.dictionary.DictionaryService;
-import org.alfresco.service.cmr.repository.ContentData;
-import org.alfresco.service.cmr.repository.ContentReader;
-import org.alfresco.service.transaction.TransactionService;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * This component is responsible for finding orphaned content in a given
- * content store or stores. Deletion handlers can be provided to ensure
- * that the content is moved to another location prior to being removed
- * from the store(s) being cleaned.
- *
- * @author Derek Hulley
- */
-public class ContentStoreCleaner
-{
- private static Log logger = LogFactory.getLog(ContentStoreCleaner.class);
-
- private DictionaryService dictionaryService;
- private NodeDaoService nodeDaoService;
- private TransactionService transactionService;
- private AVMNodeDAO avmNodeDAO;
- private List stores;
- private List listeners;
- private int protectDays;
-
- public ContentStoreCleaner()
- {
- this.stores = new ArrayList(0);
- this.listeners = new ArrayList(0);
- this.protectDays = 7;
- }
-
- /**
- * @param dictionaryService used to determine which properties are content properties
- */
- public void setDictionaryService(DictionaryService dictionaryService)
- {
- this.dictionaryService = dictionaryService;
- }
-
- /**
- * @param nodeDaoService used to get the property values
- */
- public void setNodeDaoService(NodeDaoService nodeDaoService)
- {
- this.nodeDaoService = nodeDaoService;
- }
-
- /**
- * Setter for Spring.
- * @param avmNodeDAO The AVM Node DAO to get urls with.
- */
- public void setAvmNodeDAO(AVMNodeDAO avmNodeDAO)
- {
- this.avmNodeDAO = avmNodeDAO;
- }
-
- /**
- * @param transactionService the component to ensure proper transactional wrapping
- */
- public void setTransactionService(TransactionService transactionService)
- {
- this.transactionService = transactionService;
- }
-
- /**
- * @param stores the content stores to clean
- */
- public void setStores(List stores)
- {
- this.stores = stores;
- }
-
- /**
- * @param listeners the listeners that can react to deletions
- */
- public void setListeners(List listeners)
- {
- this.listeners = listeners;
- }
-
- /**
- * Set the minimum number of days old that orphaned content must be
- * before deletion is possible. The default is 7 days.
- *
- * @param protectDays minimum age (in days) of deleted content
- */
- public void setProtectDays(int protectDays)
- {
- this.protectDays = protectDays;
- }
-
- /**
- * Perform basic checks to ensure that the necessary dependencies were injected.
- */
- private void checkProperties()
- {
- if (dictionaryService == null)
- {
- throw new AlfrescoRuntimeException("Property 'dictionaryService' not set");
- }
- if (nodeDaoService == null)
- {
- throw new AlfrescoRuntimeException("Property 'nodeDaoService' not set");
- }
- if (transactionService == null)
- {
- throw new AlfrescoRuntimeException("Property 'transactionService' not set");
- }
- if (stores == null || stores.size() == 0)
- {
- throw new AlfrescoRuntimeException("Property 'stores' not set");
- }
- if (listeners == null)
- {
- throw new AlfrescoRuntimeException("Property 'listeners' not set");
- }
-
- // check the protect days
- if (protectDays < 0)
- {
- throw new AlfrescoRuntimeException("Property 'protectDays' must be 0 or greater (0 is not recommended)");
- }
- else if (protectDays == 0)
- {
- logger.warn(
- "Property 'protectDays' is set to 0. " +
- "It is possible that in-transaction content will be deleted.");
- }
- }
-
- private Set getValidUrls()
- {
- // This does the work for the regular Alfresco repository.
- // wrap to make the request in a transaction
- TransactionWork> getUrlsWork = new TransactionWork>()
- {
- public List doWork() throws Exception
- {
- return nodeDaoService.getContentDataStrings();
- };
- };
- // execute in READ-ONLY txn
- List contentDataStrings = TransactionUtil.executeInUserTransaction(
- transactionService,
- getUrlsWork,
- true);
-
- // Do the same for the AVM repository.
- TransactionWork> getAVMUrlsWork = new TransactionWork>()
- {
- public List doWork() throws Exception
- {
- return avmNodeDAO.getContentUrls();
- }
- };
-
- List avmContentUrls = TransactionUtil.executeInUserTransaction(
- transactionService,
- getAVMUrlsWork,
- true);
-
- // get all valid URLs
- Set validUrls = new HashSet(contentDataStrings.size());
- // convert the strings to objects and extract the URL
- for (String contentDataString : contentDataStrings)
- {
- ContentData contentData = ContentData.createContentProperty(contentDataString);
- if (contentData.getContentUrl() != null)
- {
- // a URL was present
- validUrls.add(contentData.getContentUrl());
- }
- }
- // put all the avm urls into validUrls.
- for (String url : avmContentUrls)
- {
- validUrls.add(url);
- }
-
- // done
- if (logger.isDebugEnabled())
- {
- logger.debug("Found " + validUrls.size() + " valid URLs in metadata");
- }
- return validUrls;
- }
-
- public void execute()
- {
- checkProperties();
- Set validUrls = getValidUrls();
- // now clean each store in turn
- for (ContentStore store : stores)
- {
- clean(validUrls, store);
- }
- }
-
- private void clean(Set validUrls, ContentStore store)
- {
- Date checkAllBeforeDate = new Date(System.currentTimeMillis() - (long) protectDays * 3600L * 1000L * 24L);
- // get the store's URLs
- Set storeUrls = store.getUrls(null, checkAllBeforeDate);
- // remove all URLs that occur in the validUrls
- storeUrls.removeAll(validUrls);
- // now clean the store
- for (String url : storeUrls)
- {
- ContentReader sourceReader = store.getReader(url);
- // announce this to the listeners
- for (ContentStoreCleanerListener listener : listeners)
- {
- // get a fresh reader
- ContentReader listenerReader = sourceReader.getReader();
- // call it
- listener.beforeDelete(listenerReader);
- }
- // delete it
- store.delete(url);
-
- if (logger.isDebugEnabled())
- {
- logger.debug("Removed URL from store: \n" +
- " Store: " + store + "\n" +
- " URL: " + url);
- }
- }
- }
-}
+/*
+ * Copyright (C) 2005 Alfresco, Inc.
+ *
+ * Licensed under the Mozilla Public License version 1.1
+ * with a permitted attribution clause. You may obtain a
+ * copy of the License at
+ *
+ * http://www.alfresco.org/legal/license.txt
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific
+ * language governing permissions and limitations under the
+ * License.
+ */
+package org.alfresco.repo.content.cleanup;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.alfresco.error.AlfrescoRuntimeException;
+import org.alfresco.repo.avm.AVMNodeDAO;
+import org.alfresco.repo.content.ContentStore;
+import org.alfresco.repo.node.db.NodeDaoService;
+import org.alfresco.repo.transaction.TransactionUtil;
+import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
+import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
+import org.alfresco.service.cmr.dictionary.DictionaryService;
+import org.alfresco.service.cmr.repository.ContentData;
+import org.alfresco.service.cmr.repository.ContentReader;
+import org.alfresco.service.transaction.TransactionService;
+import org.alfresco.util.PropertyCheck;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/**
+ * This component is responsible for finding orphaned content in a given
+ * content store or stores. Deletion handlers can be provided to ensure
+ * that the content is moved to another location prior to being removed
+ * from the store(s) being cleaned.
+ *
+ * @author Derek Hulley
+ */
+public class ContentStoreCleaner
+{
+ private static Log logger = LogFactory.getLog(ContentStoreCleaner.class);
+
+ private DictionaryService dictionaryService;
+ private NodeDaoService nodeDaoService;
+ private TransactionService transactionService;
+ private AVMNodeDAO avmNodeDAO;
+ private List stores;
+ private List listeners;
+ private int protectDays;
+
+ public ContentStoreCleaner()
+ {
+ this.stores = new ArrayList(0);
+ this.listeners = new ArrayList(0);
+ this.protectDays = 7;
+ }
+
+ /**
+ * @param dictionaryService used to determine which properties are content properties
+ */
+ public void setDictionaryService(DictionaryService dictionaryService)
+ {
+ this.dictionaryService = dictionaryService;
+ }
+
+ /**
+ * @param nodeDaoService used to get the property values
+ */
+ public void setNodeDaoService(NodeDaoService nodeDaoService)
+ {
+ this.nodeDaoService = nodeDaoService;
+ }
+
+ /**
+ * Setter for Spring.
+ * @param avmNodeDAO The AVM Node DAO to get urls with.
+ */
+ public void setAvmNodeDAO(AVMNodeDAO avmNodeDAO)
+ {
+ this.avmNodeDAO = avmNodeDAO;
+ }
+
+ /**
+ * @param transactionService the component to ensure proper transactional wrapping
+ */
+ public void setTransactionService(TransactionService transactionService)
+ {
+ this.transactionService = transactionService;
+ }
+
+ /**
+ * @param stores the content stores to clean
+ */
+ public void setStores(List stores)
+ {
+ this.stores = stores;
+ }
+
+ /**
+ * @param listeners the listeners that can react to deletions
+ */
+ public void setListeners(List listeners)
+ {
+ this.listeners = listeners;
+ }
+
+ /**
+ * Set the minimum number of days old that orphaned content must be
+ * before deletion is possible. The default is 7 days.
+ *
+ * @param protectDays minimum age (in days) of deleted content
+ */
+ public void setProtectDays(int protectDays)
+ {
+ this.protectDays = protectDays;
+ }
+
+ /**
+ * Perform basic checks to ensure that the necessary dependencies were injected.
+ */
+ private void checkProperties()
+ {
+ PropertyCheck.mandatory(this, "dictionaryService", dictionaryService);
+ PropertyCheck.mandatory(this, "nodeDaoService", nodeDaoService);
+ PropertyCheck.mandatory(this, "transactionService", transactionService);
+ PropertyCheck.mandatory(this, "listeners", listeners);
+
+ // check the protect days
+ if (protectDays < 0)
+ {
+ throw new AlfrescoRuntimeException("Property 'protectDays' must be 0 or greater (0 is not recommended)");
+ }
+ else if (protectDays == 0)
+ {
+ logger.warn(
+ "Property 'protectDays' is set to 0. " +
+ "It is possible that in-transaction content will be deleted.");
+ }
+ }
+
+ private Set getValidUrls()
+ {
+ final DataTypeDefinition contentDataType = dictionaryService.getDataType(DataTypeDefinition.CONTENT);
+ // wrap to make the request in a transaction
+ TransactionWork> getUrlsWork = new TransactionWork>()
+ {
+ public List doWork() throws Exception
+ {
+ return nodeDaoService.getPropertyValuesByActualType(contentDataType);
+ };
+ };
+ // execute in READ-ONLY txn
+ List values = TransactionUtil.executeInUserTransaction(
+ transactionService,
+ getUrlsWork,
+ true);
+
+ // Do the same for the AVM repository.
+ TransactionWork> getAVMUrlsWork = new TransactionWork>()
+ {
+ public List doWork() throws Exception
+ {
+ return avmNodeDAO.getContentUrls();
+ }
+ };
+
+ List avmContentUrls = TransactionUtil.executeInUserTransaction(
+ transactionService,
+ getAVMUrlsWork,
+ true);
+
+ // get all valid URLs
+ Set validUrls = new HashSet(values.size());
+ // convert the strings to objects and extract the URL
+ for (Serializable value : values)
+ {
+ ContentData contentData = (ContentData) value;
+ if (contentData.getContentUrl() != null)
+ {
+ // a URL was present
+ validUrls.add(contentData.getContentUrl());
+ }
+ }
+ // put all the avm urls into validUrls.
+ for (String url : avmContentUrls)
+ {
+ validUrls.add(url);
+ }
+
+ // done
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("Found " + validUrls.size() + " valid URLs in metadata");
+ }
+ return validUrls;
+ }
+
+ public void execute()
+ {
+ checkProperties();
+ Set validUrls = getValidUrls();
+ // now clean each store in turn
+ for (ContentStore store : stores)
+ {
+ clean(validUrls, store);
+ }
+ }
+
+ private void clean(Set validUrls, ContentStore store)
+ {
+ Date checkAllBeforeDate = new Date(System.currentTimeMillis() - (long) protectDays * 3600L * 1000L * 24L);
+ // get the store's URLs
+ Set storeUrls = store.getUrls(null, checkAllBeforeDate);
+ // remove all URLs that occur in the validUrls
+ storeUrls.removeAll(validUrls);
+ // now clean the store
+ for (String url : storeUrls)
+ {
+ ContentReader sourceReader = store.getReader(url);
+ // announce this to the listeners
+ for (ContentStoreCleanerListener listener : listeners)
+ {
+ // get a fresh reader
+ ContentReader listenerReader = sourceReader.getReader();
+ // call it
+ listener.beforeDelete(listenerReader);
+ }
+ // delete it
+ store.delete(url);
+
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("Removed URL from store: \n" +
+ " Store: " + store + "\n" +
+ " URL: " + url);
+ }
+ }
+ }
+}
diff --git a/source/java/org/alfresco/repo/content/transform/AbstractContentTransformerTest.java b/source/java/org/alfresco/repo/content/transform/AbstractContentTransformerTest.java
index 468ba3e5f0..42ba8f27d3 100644
--- a/source/java/org/alfresco/repo/content/transform/AbstractContentTransformerTest.java
+++ b/source/java/org/alfresco/repo/content/transform/AbstractContentTransformerTest.java
@@ -19,7 +19,10 @@ package org.alfresco.repo.content.transform;
import java.io.File;
import java.io.IOException;
import java.net.URL;
-import java.util.List;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.TreeSet;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.content.filestore.FileContentReader;
@@ -120,45 +123,71 @@ public abstract class AbstractContentTransformerTest extends BaseSpringTest
* case where optimizations are being done around the selection of the most
* appropriate transformer, different transformers could be used during the iteration
* process.
+ *
+ * Results for the transformations are dumped to a temporary file named
+ * AbstractContentTransformerTest-results-1234.txt.
*/
public void testAllConversions() throws Exception
{
+ StringBuilder sb = new StringBuilder(2048);
+ sb.append("Mimetype Conversion Tests \n")
+ .append("========================= \n")
+ .append(" Date: ").append(new Date()).append("\n")
+ .append("\n");
+
// get all mimetypes
- List mimetypes = mimetypeMap.getMimetypes();
+ Set mimetypes = new TreeSet(mimetypeMap.getMimetypes());
for (String sourceMimetype : mimetypes)
{
// attempt to get a source file for each mimetype
String sourceExtension = mimetypeMap.getExtension(sourceMimetype);
- File sourceFile = AbstractContentTransformerTest.loadQuickTestFile(sourceExtension);
- if (sourceFile == null)
- {
- continue; // no test file available for that extension
- }
+
+ sb.append(" Source Extension: ").append(sourceExtension).append("\n");
// attempt to convert to every other mimetype
for (String targetMimetype : mimetypes)
{
ContentWriter targetWriter = null;
// construct a reader onto the source file
+ String targetExtension = mimetypeMap.getExtension(targetMimetype);
+
+ // must we test the transformation?
+ ContentTransformer transformer = getTransformer(sourceMimetype, targetMimetype);
+ if (transformer == null || transformer.getReliability(sourceMimetype, targetMimetype) <= 0.0)
+ {
+ // no transformer
+ continue;
+ }
+
+ // dump
+ sb.append(" Target Extension: ").append(targetExtension);
+ sb.append(" <").append(transformer.getClass().getSimpleName()).append(">");
+
+ // is there a test file for this conversion?
+ File sourceFile = AbstractContentTransformerTest.loadQuickTestFile(sourceExtension);
+ if (sourceFile == null)
+ {
+ sb.append(" \n");
+ continue; // no test file available for that extension
+ }
ContentReader sourceReader = new FileContentReader(sourceFile);
// perform the transformation several times so that we get a good idea of performance
int count = 0;
+ long before = System.currentTimeMillis();
+ Set transformerClasses = new HashSet(2);
for (int i = 0; i < 5; i++)
{
- // must we test the transformation?
- ContentTransformer transformer = getTransformer(sourceMimetype, targetMimetype);
- if (transformer == null)
+ // get the transformer repeatedly as it might be different each time around
+ transformer = getTransformer(sourceMimetype, targetMimetype);
+ // must we report on this class?
+ if (!transformerClasses.contains(transformer.getClass().getName()))
{
- break; // test is not required
+ transformerClasses.add(transformer.getClass().getName());
+ sb.append(" <").append(transformer.getClass().getSimpleName()).append(">");
}
- else if (transformer.getReliability(sourceMimetype, targetMimetype) <= 0.0)
- {
- break; // not reliable for this transformation
- }
-
+
// make a writer for the target file
- String targetExtension = mimetypeMap.getExtension(targetMimetype);
File targetFile = TempFileProvider.createTempFile(
getClass().getSimpleName() + "_" + getName() + "_" + sourceExtension + "_",
"." + targetExtension);
@@ -198,6 +227,11 @@ public abstract class AbstractContentTransformerTest extends BaseSpringTest
// increment count
count++;
}
+ long after = System.currentTimeMillis();
+ double average = (double) (after - before) / (double) count;
+
+ // dump
+ sb.append(String.format(" average %10.0f ms", average)).append("\n");
if (logger.isDebugEnabled())
{
@@ -209,5 +243,11 @@ public abstract class AbstractContentTransformerTest extends BaseSpringTest
}
}
}
+
+ // dump to file
+ File outputFile = TempFileProvider.createTempFile("AbstractContentTransformerTest-results-", ".txt");
+ ContentWriter outputWriter = new FileContentWriter(outputFile);
+ outputWriter.setEncoding("UTF8");
+ outputWriter.putContent(sb.toString());
}
}
diff --git a/source/java/org/alfresco/repo/content/transform/OpenOfficeContentTransformerTest.java b/source/java/org/alfresco/repo/content/transform/OpenOfficeContentTransformerTest.java
index 12d016db23..a70d31d106 100644
--- a/source/java/org/alfresco/repo/content/transform/OpenOfficeContentTransformerTest.java
+++ b/source/java/org/alfresco/repo/content/transform/OpenOfficeContentTransformerTest.java
@@ -16,9 +16,16 @@
*/
package org.alfresco.repo.content.transform;
+import java.io.File;
+
import net.sf.jooreports.openoffice.connection.OpenOfficeConnection;
import org.alfresco.repo.content.MimetypeMap;
+import org.alfresco.repo.content.filestore.FileContentReader;
+import org.alfresco.repo.content.filestore.FileContentWriter;
+import org.alfresco.service.cmr.repository.ContentReader;
+import org.alfresco.service.cmr.repository.ContentWriter;
+import org.alfresco.util.TempFileProvider;
/**
* @see org.alfresco.repo.content.transform.OpenOfficeContentTransformer
@@ -75,4 +82,24 @@ public class OpenOfficeContentTransformerTest extends AbstractContentTransformer
reliability = transformer.getReliability(MimetypeMap.MIMETYPE_WORD, MimetypeMap.MIMETYPE_TEXT_PLAIN);
assertEquals("Mimetype should be supported", 1.0, reliability);
}
+
+ /**
+ * Test what is up with HTML to PDF
+ */
+ public void testHtmlToPdf() throws Exception
+ {
+ if (!transformer.isConnected())
+ {
+ // no connection
+ return;
+ }
+ File htmlSourceFile = loadQuickTestFile("html");
+ File pdfTargetFile = TempFileProvider.createTempFile(getName() + "-target-", ".pdf");
+ ContentReader reader = new FileContentReader(htmlSourceFile);
+ reader.setMimetype(MimetypeMap.MIMETYPE_HTML);
+ ContentWriter writer = new FileContentWriter(pdfTargetFile);
+ writer.setMimetype(MimetypeMap.MIMETYPE_PDF);
+
+ transformer.transform(reader, writer);
+ }
}
diff --git a/source/java/org/alfresco/repo/content/transform/magick/AbstractImageMagickContentTransformer.java b/source/java/org/alfresco/repo/content/transform/magick/AbstractImageMagickContentTransformer.java
index 852f9194b0..bb11c7d662 100644
--- a/source/java/org/alfresco/repo/content/transform/magick/AbstractImageMagickContentTransformer.java
+++ b/source/java/org/alfresco/repo/content/transform/magick/AbstractImageMagickContentTransformer.java
@@ -145,6 +145,10 @@ public abstract class AbstractImageMagickContentTransformer extends AbstractCont
{
return false; // rgb extension doesn't work
}
+ else if (mimetype.equals(MimetypeMap.MIMETYPE_IMAGE_SVG))
+ {
+ return false; // svg extension doesn't work
+ }
else
{
return true;
diff --git a/source/java/org/alfresco/repo/dictionary/DictionaryComponent.java b/source/java/org/alfresco/repo/dictionary/DictionaryComponent.java
index 2b9722b49e..f9cd844d6c 100644
--- a/source/java/org/alfresco/repo/dictionary/DictionaryComponent.java
+++ b/source/java/org/alfresco/repo/dictionary/DictionaryComponent.java
@@ -81,7 +81,7 @@ public class DictionaryComponent implements DictionaryService
Collection propertyTypes = new ArrayList();
for (QName model : getAllModels())
{
- propertyTypes.addAll(getAspects(model));
+ propertyTypes.addAll(getDataTypes(model));
}
return propertyTypes;
}
diff --git a/source/java/org/alfresco/repo/dictionary/M2Model.java b/source/java/org/alfresco/repo/dictionary/M2Model.java
index 4dad74222a..c4c8752aa4 100644
--- a/source/java/org/alfresco/repo/dictionary/M2Model.java
+++ b/source/java/org/alfresco/repo/dictionary/M2Model.java
@@ -202,7 +202,7 @@ public class M2Model
M2Type type = getType(name);
if (type != null)
{
- types.remove(types);
+ types.remove(type);
}
}
diff --git a/source/java/org/alfresco/repo/dictionary/dictionarydaotest_model.xml b/source/java/org/alfresco/repo/dictionary/dictionarydaotest_model.xml
index fc8748f662..ba51937ef7 100644
--- a/source/java/org/alfresco/repo/dictionary/dictionarydaotest_model.xml
+++ b/source/java/org/alfresco/repo/dictionary/dictionarydaotest_model.xml
@@ -16,7 +16,7 @@
- org.apache.lucene.analysis.standard.StandardAnalyzer
+ org.alfresco.repo.search.impl.lucene.analysis.AlfrescoStandardAnalyser
java.lang.Object
diff --git a/source/java/org/alfresco/repo/domain/DbAccessControlList.java b/source/java/org/alfresco/repo/domain/DbAccessControlList.java
index da378dd6c0..327ed47347 100644
--- a/source/java/org/alfresco/repo/domain/DbAccessControlList.java
+++ b/source/java/org/alfresco/repo/domain/DbAccessControlList.java
@@ -1,83 +1,83 @@
-/*
- * Copyright (C) 2005 Alfresco, Inc.
- *
- * Licensed under the Mozilla Public License version 1.1
- * with a permitted attribution clause. You may obtain a
- * copy of the License at
- *
- * http://www.alfresco.org/legal/license.txt
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific
- * language governing permissions and limitations under the
- * License.
- */
-package org.alfresco.repo.domain;
-
-import java.util.Set;
-
-import org.alfresco.repo.domain.hibernate.DbAccessControlEntryImpl;
-
-
-/**
- * The interface to support persistence of node access control entries in hibernate
- *
- * @author andyh
- */
-public interface DbAccessControlList
-{
- public long getId();
-
- /**
- *
- * @return Returns the access control entries for this access control list
- */
- public Set getEntries();
-
- /**
- * Get inheritance behaviour
- * @return Returns the inheritance status of this list
- */
- public boolean getInherits();
-
- /**
- * Set inheritance behaviour
- * @param inherits true to set the permissions to inherit
- */
- public void setInherits(boolean inherits);
-
- public int deleteEntriesForAuthority(String authorityKey);
-
- public int deleteEntriesForPermission(DbPermissionKey permissionKey);
-
- public int deleteEntry(String authorityKey, DbPermissionKey permissionKey);
-
- /**
- * Delete the entries related to this access control list
- *
- * @return Returns the number of entries deleted
- */
- public int deleteEntries();
-
- public DbAccessControlEntry getEntry(String authorityKey, DbPermissionKey permissionKey);
-
- /**
- * Factory method to create an entry and wire it up.
- * Note that the returned value may still be transient. Saving it should be fine, but
- * is not required.
- *
- * @param permission the mandatory permission association with this entry
- * @param authority the mandatory authority. Must not be transient.
- * @param allowed allowed or disallowed. Must not be transient.
- * @return Returns the new entry
- */
- public DbAccessControlEntryImpl newEntry(DbPermission permission, DbAuthority authority, boolean allowed);
-
- /**
- * Make a copy of this ACL (persistently)
- * @return The copy.
- */
- public DbAccessControlList getCopy();
-}
+/*
+ * Copyright (C) 2005 Alfresco, Inc.
+ *
+ * Licensed under the Mozilla Public License version 1.1
+ * with a permitted attribution clause. You may obtain a
+ * copy of the License at
+ *
+ * http://www.alfresco.org/legal/license.txt
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific
+ * language governing permissions and limitations under the
+ * License.
+ */
+package org.alfresco.repo.domain;
+
+import java.util.Set;
+
+import org.alfresco.repo.domain.hibernate.DbAccessControlEntryImpl;
+
+
+/**
+ * The interface to support persistence of node access control entries in hibernate
+ *
+ * @author andyh
+ */
+public interface DbAccessControlList
+{
+ public long getId();
+
+ /**
+ *
+ * @return Returns the access control entries for this access control list
+ */
+ public Set getEntries();
+
+ /**
+ * Get inheritance behaviour
+ * @return Returns the inheritance status of this list
+ */
+ public boolean getInherits();
+
+ /**
+ * Set inheritance behaviour
+ * @param inherits true to set the permissions to inherit
+ */
+ public void setInherits(boolean inherits);
+
+ public int deleteEntriesForAuthority(String authorityKey);
+
+ public int deleteEntriesForPermission(DbPermissionKey permissionKey);
+
+ public int deleteEntry(String authorityKey, DbPermissionKey permissionKey);
+
+ /**
+ * Delete the entries related to this access control list
+ *
+ * @return Returns the number of entries deleted
+ */
+ public int deleteEntries();
+
+ public DbAccessControlEntry getEntry(String authorityKey, DbPermissionKey permissionKey);
+
+ /**
+ * Factory method to create an entry and wire it up.
+ * Note that the returned value may still be transient. Saving it should be fine, but
+ * is not required.
+ *
+ * @param permission the mandatory permission association with this entry
+ * @param authority the mandatory authority. Must not be transient.
+ * @param allowed allowed or disallowed. Must not be transient.
+ * @return Returns the new entry
+ */
+ public DbAccessControlEntryImpl newEntry(DbPermission permission, DbAuthority authority, boolean allowed);
+
+ /**
+ * Make a copy of this ACL (persistently)
+ * @return The copy.
+ */
+ public DbAccessControlList getCopy();
+}
diff --git a/source/java/org/alfresco/repo/domain/PropertyValue.java b/source/java/org/alfresco/repo/domain/PropertyValue.java
index f4fad1fa2a..bd1803d20f 100644
--- a/source/java/org/alfresco/repo/domain/PropertyValue.java
+++ b/source/java/org/alfresco/repo/domain/PropertyValue.java
@@ -19,6 +19,7 @@ package org.alfresco.repo.domain;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
@@ -438,19 +439,32 @@ public class PropertyValue implements Cloneable, Serializable
*
* @return Returns the ValueType
- never null
*/
- private ValueType makeValueType(QName typeQName)
+ private static ValueType makeValueType(QName typeQName)
{
ValueType valueType = valueTypesByPropertyType.get(typeQName);
if (valueType == null)
{
throw new AlfrescoRuntimeException(
"Property type not recognised: \n" +
- " type: " + typeQName + "\n" +
- " property: " + this);
+ " type: " + typeQName);
}
return valueType;
}
+ /**
+ * Given an actual type qualified name, returns the String that represents it in
+ * the database.
+ *
+ * @param typeQName the type qualified name
+ * @return Returns the String representation of the type,
+ * e.g. CONTENT for type d:content.
+ */
+ public static String getActualTypeString(QName typeQName)
+ {
+ ValueType valueType = makeValueType(typeQName);
+ return valueType.toString();
+ }
+
@Override
public boolean equals(Object obj)
{
@@ -632,15 +646,16 @@ public class PropertyValue implements Cloneable, Serializable
* @return Returns the value of this property as the desired type, or a Collection
* of values of the required type
*
- * @throws java.lang.UnsupportedOperationException if the value cannot be converted to the
- * type given
+ * @throws AlfrescoRuntimeException
+ * if the type given is not recognized
+ * @throws org.alfresco.service.cmr.repository.datatype.TypeConversionException
+ * if the conversion to the required type fails
*
* @see DataTypeDefinition#ANY The static qualified names for the types
*/
public Serializable getValue(QName typeQName)
{
// first check for null
-
ValueType requiredType = makeValueType(typeQName);
if (requiredType == ValueType.SERIALIZABLE)
{
@@ -680,6 +695,24 @@ public class PropertyValue implements Cloneable, Serializable
return ret;
}
+ /**
+ * Gets the value or values as a guaranteed collection.
+ *
+ * @see #getValue(QName)
+ */
+ public Collection getCollection(QName typeQName)
+ {
+ Serializable value = getValue(typeQName);
+ if (value instanceof Collection)
+ {
+ return (Collection) value;
+ }
+ else
+ {
+ return Collections.singletonList(value);
+ }
+ }
+
public boolean getBooleanValue()
{
if (booleanValue == null)
diff --git a/source/java/org/alfresco/repo/domain/hibernate/ChildAssocImpl.java b/source/java/org/alfresco/repo/domain/hibernate/ChildAssocImpl.java
index 8df8f271a2..4a830a42f1 100644
--- a/source/java/org/alfresco/repo/domain/hibernate/ChildAssocImpl.java
+++ b/source/java/org/alfresco/repo/domain/hibernate/ChildAssocImpl.java
@@ -133,29 +133,6 @@ public class ChildAssocImpl implements ChildAssoc, Serializable
return sb.toString();
}
- public boolean equals(Object obj)
- {
- if (obj == null)
- {
- return false;
- }
- else if (obj == this)
- {
- return true;
- }
- else if (!(obj instanceof ChildAssoc))
- {
- return false;
- }
- ChildAssoc that = (ChildAssoc) obj;
- return EqualsHelper.nullSafeEquals(id, that.getId());
- }
-
- public int hashCode()
- {
- return (id == null ? 0 : id.hashCode());
- }
-
/**
* Orders the child associations by ID. A smaller ID has a higher priority.
* This may change once we introduce a changeable index against which to order.
diff --git a/source/java/org/alfresco/repo/domain/hibernate/DbAccessControlListImpl.java b/source/java/org/alfresco/repo/domain/hibernate/DbAccessControlListImpl.java
index 6576a88117..7139d6a8bc 100644
--- a/source/java/org/alfresco/repo/domain/hibernate/DbAccessControlListImpl.java
+++ b/source/java/org/alfresco/repo/domain/hibernate/DbAccessControlListImpl.java
@@ -1,250 +1,250 @@
-/*
- * Copyright (C) 2005 Alfresco, Inc.
- *
- * Licensed under the Mozilla Public License version 1.1
- * with a permitted attribution clause. You may obtain a
- * copy of the License at
- *
- * http://www.alfresco.org/legal/license.txt
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific
- * language governing permissions and limitations under the
- * License.
- */
-package org.alfresco.repo.domain.hibernate;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.alfresco.repo.domain.DbAccessControlEntry;
-import org.alfresco.repo.domain.DbAccessControlList;
-import org.alfresco.repo.domain.DbAuthority;
-import org.alfresco.repo.domain.DbPermission;
-import org.alfresco.repo.domain.DbPermissionKey;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.hibernate.Session;
-
-/**
- * The hibernate persisted class for node permission entries.
- *
- * @author andyh
- */
-public class DbAccessControlListImpl extends LifecycleAdapter
- implements DbAccessControlList, Serializable
-{
- private static final long serialVersionUID = 3123277428227075648L;
-
- private static Log logger = LogFactory.getLog(DbAccessControlListImpl.class);
-
- private long id;
- private Set entries;
- private boolean inherits;
-
- public DbAccessControlListImpl()
- {
- entries = new HashSet(5);
- }
-
- @Override
- public String toString()
- {
- StringBuilder sb = new StringBuilder(128);
- sb.append("DbAccessControlListImpl")
- .append("[ id=").append(id)
- .append(", entries=").append(entries.size())
- .append(", inherits=").append(inherits)
- .append("]");
- return sb.toString();
- }
-
- @Override
- public boolean equals(Object o)
- {
- if (this == o)
- {
- return true;
- }
- if (!(o instanceof DbAccessControlList))
- {
- return false;
- }
- DbAccessControlList other = (DbAccessControlList) o;
-
- return (this.inherits == other.getInherits());
- }
-
- @Override
- public int hashCode()
- {
- return (inherits == false ? 0 : 17);
- }
-
- public long getId()
- {
- return id;
- }
-
- /**
- * Hibernate use
- */
- @SuppressWarnings("unused")
- private void setId(long id)
- {
- this.id = id;
- }
-
- public Set getEntries()
- {
- return entries;
- }
-
- /**
- * For Hibernate use
- */
- @SuppressWarnings("unused")
- private void setEntries(Set entries)
- {
- this.entries = entries;
- }
-
- public boolean getInherits()
- {
- return inherits;
- }
-
- public void setInherits(boolean inherits)
- {
- this.inherits = inherits;
- }
-
- /**
- * @see #deleteEntry(String, DbPermissionKey)
- */
- public int deleteEntriesForAuthority(String authority)
- {
- return deleteEntry(authority, null);
- }
-
- /**
- * @see #deleteEntry(String, DbPermissionKey)
- */
- public int deleteEntriesForPermission(DbPermissionKey permissionKey)
- {
- return deleteEntry(null, permissionKey);
- }
-
- public int deleteEntry(String authority, DbPermissionKey permissionKey)
- {
- List toDelete = new ArrayList(2);
- for (DbAccessControlEntry entry : entries)
- {
- if (authority != null && !authority.equals(entry.getAuthority().getRecipient()))
- {
- // authority is not a match
- continue;
- }
- else if (permissionKey != null && !permissionKey.equals(entry.getPermission().getKey()))
- {
- // permission is not a match
- continue;
- }
- toDelete.add(entry);
- }
- // delete them
- for (DbAccessControlEntry entry : toDelete)
- {
- // remove from the entry list
- entry.delete();
- }
- // done
- if (logger.isDebugEnabled())
- {
- logger.debug("Deleted " + toDelete.size() + " access entries: \n" +
- " access control list: " + id + "\n" +
- " authority: " + authority + "\n" +
- " permission: " + permissionKey);
- }
- return toDelete.size();
- }
-
- public int deleteEntries()
- {
- /*
- * We don't do the full delete-remove-from-set thing here. Just delete each child entity
- * and then clear the entry set.
- */
-
- Session session = getSession();
- List toDelete = new ArrayList(entries);
- // delete each entry
- for (DbAccessControlEntry entry : toDelete)
- {
- session.delete(entry);
- }
- // clear the list
- int count = entries.size();
- entries.clear();
- // done
- if (logger.isDebugEnabled())
- {
- logger.debug("Deleted " + count + " access entries for access control list " + this.id);
- }
- return count;
- }
-
- public DbAccessControlEntry getEntry(String authority, DbPermissionKey permissionKey)
- {
- for (DbAccessControlEntry entry : entries)
- {
- DbAuthority authorityEntity = entry.getAuthority();
- DbPermission permissionEntity = entry.getPermission();
- // check for a match
- if (authorityEntity.getRecipient().equals(authority)
- && permissionEntity.getKey().equals(permissionKey))
- {
- // found it
- return entry;
- }
- }
- return null;
- }
-
- public DbAccessControlEntryImpl newEntry(DbPermission permission, DbAuthority authority, boolean allowed)
- {
- DbAccessControlEntryImpl accessControlEntry = new DbAccessControlEntryImpl();
- // fill
- accessControlEntry.setAccessControlList(this);
- accessControlEntry.setPermission(permission);
- accessControlEntry.setAuthority(authority);
- accessControlEntry.setAllowed(allowed);
- // save it
- getSession().save(accessControlEntry);
- // maintain inverse set on the acl
- getEntries().add(accessControlEntry);
- // done
- return accessControlEntry;
- }
-
- /**
- * Make a copy of this ACL.
- * @return The copy.
- */
- public DbAccessControlList getCopy()
- {
- DbAccessControlList newAcl =
- new DbAccessControlListImpl();
- getSession().save(newAcl);
- for (DbAccessControlEntry entry : entries)
- {
- newAcl.newEntry(entry.getPermission(), entry.getAuthority(), entry.isAllowed());
- }
- return newAcl;
- }
-}
+/*
+ * Copyright (C) 2005 Alfresco, Inc.
+ *
+ * Licensed under the Mozilla Public License version 1.1
+ * with a permitted attribution clause. You may obtain a
+ * copy of the License at
+ *
+ * http://www.alfresco.org/legal/license.txt
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific
+ * language governing permissions and limitations under the
+ * License.
+ */
+package org.alfresco.repo.domain.hibernate;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.alfresco.repo.domain.DbAccessControlEntry;
+import org.alfresco.repo.domain.DbAccessControlList;
+import org.alfresco.repo.domain.DbAuthority;
+import org.alfresco.repo.domain.DbPermission;
+import org.alfresco.repo.domain.DbPermissionKey;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.hibernate.Session;
+
+/**
+ * The hibernate persisted class for node permission entries.
+ *
+ * @author andyh
+ */
+public class DbAccessControlListImpl extends LifecycleAdapter
+ implements DbAccessControlList, Serializable
+{
+ private static final long serialVersionUID = 3123277428227075648L;
+
+ private static Log logger = LogFactory.getLog(DbAccessControlListImpl.class);
+
+ private long id;
+ private Set entries;
+ private boolean inherits;
+
+ public DbAccessControlListImpl()
+ {
+ entries = new HashSet(5);
+ }
+
+ @Override
+ public String toString()
+ {
+ StringBuilder sb = new StringBuilder(128);
+ sb.append("DbAccessControlListImpl")
+ .append("[ id=").append(id)
+ .append(", entries=").append(entries.size())
+ .append(", inherits=").append(inherits)
+ .append("]");
+ return sb.toString();
+ }
+
+ @Override
+ public boolean equals(Object o)
+ {
+ if (this == o)
+ {
+ return true;
+ }
+ if (!(o instanceof DbAccessControlList))
+ {
+ return false;
+ }
+ DbAccessControlList other = (DbAccessControlList) o;
+
+ return (this.inherits == other.getInherits());
+ }
+
+ @Override
+ public int hashCode()
+ {
+ return (inherits == false ? 0 : 17);
+ }
+
+ public long getId()
+ {
+ return id;
+ }
+
+ /**
+ * Hibernate use
+ */
+ @SuppressWarnings("unused")
+ private void setId(long id)
+ {
+ this.id = id;
+ }
+
+ public Set getEntries()
+ {
+ return entries;
+ }
+
+ /**
+ * For Hibernate use
+ */
+ @SuppressWarnings("unused")
+ private void setEntries(Set entries)
+ {
+ this.entries = entries;
+ }
+
+ public boolean getInherits()
+ {
+ return inherits;
+ }
+
+ public void setInherits(boolean inherits)
+ {
+ this.inherits = inherits;
+ }
+
+ /**
+ * @see #deleteEntry(String, DbPermissionKey)
+ */
+ public int deleteEntriesForAuthority(String authority)
+ {
+ return deleteEntry(authority, null);
+ }
+
+ /**
+ * @see #deleteEntry(String, DbPermissionKey)
+ */
+ public int deleteEntriesForPermission(DbPermissionKey permissionKey)
+ {
+ return deleteEntry(null, permissionKey);
+ }
+
+ public int deleteEntry(String authority, DbPermissionKey permissionKey)
+ {
+ List toDelete = new ArrayList(2);
+ for (DbAccessControlEntry entry : entries)
+ {
+ if (authority != null && !authority.equals(entry.getAuthority().getRecipient()))
+ {
+ // authority is not a match
+ continue;
+ }
+ else if (permissionKey != null && !permissionKey.equals(entry.getPermission().getKey()))
+ {
+ // permission is not a match
+ continue;
+ }
+ toDelete.add(entry);
+ }
+ // delete them
+ for (DbAccessControlEntry entry : toDelete)
+ {
+ // remove from the entry list
+ entry.delete();
+ }
+ // done
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("Deleted " + toDelete.size() + " access entries: \n" +
+ " access control list: " + id + "\n" +
+ " authority: " + authority + "\n" +
+ " permission: " + permissionKey);
+ }
+ return toDelete.size();
+ }
+
+ public int deleteEntries()
+ {
+ /*
+ * We don't do the full delete-remove-from-set thing here. Just delete each child entity
+ * and then clear the entry set.
+ */
+
+ Session session = getSession();
+ List toDelete = new ArrayList(entries);
+ // delete each entry
+ for (DbAccessControlEntry entry : toDelete)
+ {
+ session.delete(entry);
+ }
+ // clear the list
+ int count = entries.size();
+ entries.clear();
+ // done
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("Deleted " + count + " access entries for access control list " + this.id);
+ }
+ return count;
+ }
+
+ public DbAccessControlEntry getEntry(String authority, DbPermissionKey permissionKey)
+ {
+ for (DbAccessControlEntry entry : entries)
+ {
+ DbAuthority authorityEntity = entry.getAuthority();
+ DbPermission permissionEntity = entry.getPermission();
+ // check for a match
+ if (authorityEntity.getRecipient().equals(authority)
+ && permissionEntity.getKey().equals(permissionKey))
+ {
+ // found it
+ return entry;
+ }
+ }
+ return null;
+ }
+
+ public DbAccessControlEntryImpl newEntry(DbPermission permission, DbAuthority authority, boolean allowed)
+ {
+ DbAccessControlEntryImpl accessControlEntry = new DbAccessControlEntryImpl();
+ // fill
+ accessControlEntry.setAccessControlList(this);
+ accessControlEntry.setPermission(permission);
+ accessControlEntry.setAuthority(authority);
+ accessControlEntry.setAllowed(allowed);
+ // save it
+ getSession().save(accessControlEntry);
+ // maintain inverse set on the acl
+ getEntries().add(accessControlEntry);
+ // done
+ return accessControlEntry;
+ }
+
+ /**
+ * Make a copy of this ACL.
+ * @return The copy.
+ */
+ public DbAccessControlList getCopy()
+ {
+ DbAccessControlList newAcl =
+ new DbAccessControlListImpl();
+ getSession().save(newAcl);
+ for (DbAccessControlEntry entry : entries)
+ {
+ newAcl.newEntry(entry.getPermission(), entry.getAuthority(), entry.isAllowed());
+ }
+ return newAcl;
+ }
+}
diff --git a/source/java/org/alfresco/repo/domain/hibernate/HibernateNodeTest.java b/source/java/org/alfresco/repo/domain/hibernate/HibernateNodeTest.java
index 1419a3c413..2eb1b7d4e7 100644
--- a/source/java/org/alfresco/repo/domain/hibernate/HibernateNodeTest.java
+++ b/source/java/org/alfresco/repo/domain/hibernate/HibernateNodeTest.java
@@ -1,456 +1,462 @@
-/*
- * Copyright (C) 2005 Alfresco, Inc.
- *
- * Licensed under the Mozilla Public License version 1.1
- * with a permitted attribution clause. You may obtain a
- * copy of the License at
- *
- * http://www.alfresco.org/legal/license.txt
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific
- * language governing permissions and limitations under the
- * License.
- */
-package org.alfresco.repo.domain.hibernate;
-
-import java.io.Serializable;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-import javax.transaction.UserTransaction;
-
-import org.alfresco.model.ContentModel;
-import org.alfresco.repo.domain.ChildAssoc;
-import org.alfresco.repo.domain.Node;
-import org.alfresco.repo.domain.NodeKey;
-import org.alfresco.repo.domain.NodeStatus;
-import org.alfresco.repo.domain.PropertyValue;
-import org.alfresco.repo.domain.Server;
-import org.alfresco.repo.domain.Store;
-import org.alfresco.repo.domain.StoreKey;
-import org.alfresco.repo.domain.Transaction;
-import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
-import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
-import org.alfresco.service.cmr.repository.StoreRef;
-import org.alfresco.service.namespace.QName;
-import org.alfresco.service.transaction.TransactionService;
-import org.alfresco.util.BaseSpringTest;
-import org.alfresco.util.GUID;
-import org.hibernate.CacheMode;
-import org.hibernate.exception.ConstraintViolationException;
-
-/**
- * Test persistence and retrieval of Hibernate-specific implementations of the
- * {@link org.alfresco.repo.domain.Node} interface
- *
- * @author Derek Hulley
- */
-@SuppressWarnings("unused")
-public class HibernateNodeTest extends BaseSpringTest
-{
- private static final String TEST_NAMESPACE = "http://www.alfresco.org/test/HibernateNodeTest";
- private static int i = 0;
-
- private Store store;
- private Server server;
- private Transaction transaction;
-
- public HibernateNodeTest()
- {
- }
-
- protected void onSetUpInTransaction() throws Exception
- {
- store = new StoreImpl();
- StoreKey storeKey = new StoreKey(StoreRef.PROTOCOL_WORKSPACE,
- "TestWorkspace@" + System.currentTimeMillis() + " - " + System.nanoTime());
- store.setKey(storeKey);
- // persist so that it is present in the hibernate cache
- getSession().save(store);
-
- server = (Server) getSession().get(ServerImpl.class, new Long(1));
- if (server == null)
- {
- server = new ServerImpl();
- server.setIpAddress("" + "i_" + System.currentTimeMillis());
- getSession().save(server);
- }
- transaction = new TransactionImpl();
- transaction.setServer(server);
- transaction.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId());
- getSession().save(transaction);
- }
-
- protected void onTearDownInTransaction()
- {
- // force a flush to ensure that the database updates succeed
- getSession().flush();
- getSession().clear();
- }
-
- public void testSetUp() throws Exception
- {
- assertNotNull("Workspace not initialised", store);
- }
-
- public void testGetStore() throws Exception
- {
- // create a new Node
- Node node = new NodeImpl();
- node.setStore(store);
- node.setUuid(GUID.generate());
- node.setTypeQName(ContentModel.TYPE_CONTAINER);
-
- // now it should work
- Serializable id = getSession().save(node);
-
- // throw the reference away and get the a new one for the id
- node = (Node) getSession().load(NodeImpl.class, id);
- assertNotNull("Node not found", node);
- // check that the store has been loaded
- Store loadedStore = node.getStore();
- assertNotNull("Store not present on node", loadedStore);
- assertEquals("Incorrect store key", store, loadedStore);
- }
-
- public void testNodeStatus()
- {
- NodeKey key = new NodeKey(store.getKey(), "AAA");
- // create the node status
- NodeStatus nodeStatus = new NodeStatusImpl();
- nodeStatus.setKey(key);
- nodeStatus.setTransaction(transaction);
- getSession().save(nodeStatus);
-
- // create a new Node
- Node node = new NodeImpl();
- node.setStore(store);
- node.setUuid(GUID.generate());
- node.setTypeQName(ContentModel.TYPE_CONTAINER);
- Serializable nodeId = getSession().save(node);
-
- // This should all be fine. The node does not HAVE to have a status.
- flushAndClear();
-
- // set the node
- nodeStatus = (NodeStatus) getSession().get(NodeStatusImpl.class, key);
- nodeStatus.setNode(node);
- flushAndClear();
-
- // is the node retrievable?
- nodeStatus = (NodeStatus) getSession().get(NodeStatusImpl.class, key);
- node = nodeStatus.getNode();
- assertNotNull("Node was not attached to status", node);
- // change the values
- transaction.setChangeTxnId("txn:456");
- // delete the node
- getSession().delete(node);
-
- try
- {
- flushAndClear();
- fail("Node status may not refer to non-existent node");
- }
- catch(ConstraintViolationException e)
- {
- // expected
- }
- }
-
- /**
- * Check that properties can be persisted and retrieved
- */
- public void testProperties() throws Exception
- {
- // create a new Node
- Node node = new NodeImpl();
- node.setStore(store);
- node.setUuid(GUID.generate());
- node.setTypeQName(ContentModel.TYPE_CONTAINER);
- // give it a property map
- Map propertyMap = new HashMap(5);
- QName propertyQName = QName.createQName("{}A");
- PropertyValue propertyValue = new PropertyValue(DataTypeDefinition.TEXT, "AAA");
- propertyMap.put(propertyQName, propertyValue);
- node.getProperties().putAll(propertyMap);
- // persist it
- Serializable id = getSession().save(node);
-
- // throw the reference away and get the a new one for the id
- node = (Node) getSession().load(NodeImpl.class, id);
- assertNotNull("Node not found", node);
- // extract the Map
- propertyMap = node.getProperties();
- assertNotNull("Map not persisted", propertyMap);
- // ensure that the value is present
- assertNotNull("Property value not present in map", QName.createQName("{}A"));
- }
-
- /**
- * Check that aspect qnames can be added and removed from a node and that they
- * are persisted correctly
- */
- public void testAspects() throws Exception
- {
- // make a real node
- Node node = new NodeImpl();
- node.setStore(store);
- node.setUuid(GUID.generate());
- node.setTypeQName(ContentModel.TYPE_CMOBJECT);
-
- // add some aspects
- QName aspect1 = QName.createQName(TEST_NAMESPACE, "1");
- QName aspect2 = QName.createQName(TEST_NAMESPACE, "2");
- QName aspect3 = QName.createQName(TEST_NAMESPACE, "3");
- QName aspect4 = QName.createQName(TEST_NAMESPACE, "4");
- Set aspects = node.getAspects();
- aspects.add(aspect1);
- aspects.add(aspect2);
- aspects.add(aspect3);
- aspects.add(aspect4);
- assertFalse("Set did not eliminate duplicate aspect qname", aspects.add(aspect4));
-
- // persist
- Serializable id = getSession().save(node);
-
- // flush and clear
- flushAndClear();
-
- // get node and check aspects
- node = (Node) getSession().get(NodeImpl.class, id);
- assertNotNull("Node not persisted", node);
- aspects = node.getAspects();
- assertEquals("Not all aspects persisted", 4, aspects.size());
- }
-
- public void testChildAssoc() throws Exception
- {
- // make a content node
- Node contentNode = new NodeImpl();
- contentNode.setStore(store);
- contentNode.setUuid(GUID.generate());
- contentNode.setTypeQName(ContentModel.TYPE_CONTENT);
- Serializable contentNodeId = getSession().save(contentNode);
-
- // make a container node
- Node containerNode = new NodeImpl();
- containerNode.setStore(store);
- containerNode.setUuid(GUID.generate());
- containerNode.setTypeQName(ContentModel.TYPE_CONTAINER);
- Serializable containerNodeId = getSession().save(containerNode);
- // create an association to the content
- ChildAssoc assoc1 = new ChildAssocImpl();
- assoc1.setIsPrimary(true);
- assoc1.setTypeQName(QName.createQName(null, "type1"));
- assoc1.setQname(QName.createQName(null, "number1"));
- assoc1.setChildNodeName("number1");
- assoc1.setChildNodeNameCrc(1);
- getSession().save(assoc1);
- assoc1.buildAssociation(containerNode, contentNode);
-
- // make another association between the same two parent and child nodes
- ChildAssoc assoc2 = new ChildAssocImpl();
- assoc2.setIsPrimary(true);
- assoc2.setTypeQName(QName.createQName(null, "type2"));
- assoc2.setQname(QName.createQName(null, "number2"));
- assoc2.setChildNodeName("number2");
- assoc2.setChildNodeNameCrc(2);
- getSession().save(assoc2);
- assoc2.buildAssociation(containerNode, contentNode);
-
- assertFalse("Hashcode incorrent", assoc2.hashCode() == 0);
- assertNotSame("Assoc equals failure", assoc1, assoc2);
-
- // reload the container
- containerNode = (Node) getSession().get(NodeImpl.class, containerNodeId);
- assertNotNull("Node not found", containerNode);
-
- // check that we can traverse the association from the child
- Collection parentAssocs = contentNode.getParentAssocs();
- assertEquals("Expected exactly 2 parent assocs", 2, parentAssocs.size());
- parentAssocs = new HashSet(parentAssocs);
- for (ChildAssoc assoc : parentAssocs)
- {
- // maintain inverse assoc sets
- assoc.removeAssociation();
- // remove the assoc
- getSession().delete(assoc);
- }
-
- // check that the child now has zero parents
- parentAssocs = contentNode.getParentAssocs();
- assertEquals("Expected exactly 0 parent assocs", 0, parentAssocs.size());
- }
-
- /**
- * Allows tracing of L2 cache
- */
- public void testCaching() throws Exception
- {
- // make a node
- Node node = new NodeImpl();
- node.setStore(store);
- node.setUuid(GUID.generate());
- node.setTypeQName(ContentModel.TYPE_CONTENT);
- Serializable nodeId = getSession().save(node);
-
- // add some aspects to the node
- Set aspects = node.getAspects();
- aspects.add(ContentModel.ASPECT_AUDITABLE);
-
- // add some properties
- Map properties = node.getProperties();
- properties.put(ContentModel.PROP_NAME, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
-
- // check that the session hands back the same instance
- Node checkNode = (Node) getSession().get(NodeImpl.class, nodeId);
- assertNotNull(checkNode);
- assertTrue("Node retrieved was not same instance", checkNode == node);
-
- Set checkAspects = checkNode.getAspects();
- assertTrue("Aspect set retrieved was not the same instance", checkAspects == aspects);
- assertEquals("Incorrect number of aspects", 1, checkAspects.size());
- QName checkQName = (QName) checkAspects.toArray()[0];
- assertTrue("QName retrieved was not the same instance", checkQName == ContentModel.ASPECT_AUDITABLE);
-
- Map checkProperties = checkNode.getProperties();
- assertTrue("Propery map retrieved was not the same instance", checkProperties == properties);
- assertTrue("Property not found", checkProperties.containsKey(ContentModel.PROP_NAME));
-
- flushAndClear();
- // commit the transaction
- setComplete();
- endTransaction();
-
- TransactionService transactionService = (TransactionService) applicationContext.getBean("transactionComponent");
- UserTransaction txn = transactionService.getUserTransaction();
- try
- {
- txn.begin();
-
- // check that the L2 cache hands back the same instance
- checkNode = (Node) getSession().get(NodeImpl.class, nodeId);
- assertNotNull(checkNode);
- checkAspects = checkNode.getAspects();
-
- txn.commit();
- }
- catch (Throwable e)
- {
- txn.rollback();
- }
- }
-
- /**
- * Create some simple parent-child relationships and flush them. Then read them back in without
- * using the L2 cache.
- */
- public void testQueryJoins() throws Exception
- {
- getSession().setCacheMode(CacheMode.IGNORE);
-
- // make a container node
- Node containerNode = new NodeImpl();
- containerNode.setStore(store);
- containerNode.setUuid(GUID.generate());
- containerNode.setTypeQName(ContentModel.TYPE_CONTAINER);
- containerNode.getProperties().put(ContentModel.PROP_AUTHOR, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
- containerNode.getProperties().put(ContentModel.PROP_ARCHIVED_BY, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
- containerNode.getAspects().add(ContentModel.ASPECT_AUDITABLE);
- Serializable containerNodeId = getSession().save(containerNode);
- NodeKey containerNodeKey = new NodeKey(containerNode.getNodeRef());
- NodeStatus containerNodeStatus = new NodeStatusImpl();
- containerNodeStatus.setKey(containerNodeKey);
- containerNodeStatus.setNode(containerNode);
- containerNodeStatus.setTransaction(transaction);
- getSession().save(containerNodeStatus);
- // make content node 1
- Node contentNode1 = new NodeImpl();
- contentNode1.setStore(store);
- contentNode1.setUuid(GUID.generate());
- contentNode1.setTypeQName(ContentModel.TYPE_CONTENT);
- contentNode1.getProperties().put(ContentModel.PROP_AUTHOR, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
- contentNode1.getProperties().put(ContentModel.PROP_ARCHIVED_BY, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
- contentNode1.getAspects().add(ContentModel.ASPECT_AUDITABLE);
- Serializable contentNode1Id = getSession().save(contentNode1);
- NodeKey contentNodeKey1 = new NodeKey(contentNode1.getNodeRef());
- NodeStatus contentNodeStatus1 = new NodeStatusImpl();
- contentNodeStatus1.setKey(contentNodeKey1);
- contentNodeStatus1.setNode(contentNode1);
- contentNodeStatus1.setTransaction(transaction);
- getSession().save(contentNodeStatus1);
- // make content node 2
- Node contentNode2 = new NodeImpl();
- contentNode2.setStore(store);
- contentNode2.setUuid(GUID.generate());
- contentNode2.setTypeQName(ContentModel.TYPE_CONTENT);
- Serializable contentNode2Id = getSession().save(contentNode2);
- contentNode2.getProperties().put(ContentModel.PROP_AUTHOR, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
- contentNode2.getProperties().put(ContentModel.PROP_ARCHIVED_BY, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
- contentNode2.getAspects().add(ContentModel.ASPECT_AUDITABLE);
- NodeKey contentNodeKey2 = new NodeKey(contentNode2.getNodeRef());
- NodeStatus contentNodeStatus2 = new NodeStatusImpl();
- contentNodeStatus2.setKey(contentNodeKey2);
- contentNodeStatus2.setNode(contentNode2);
- contentNodeStatus2.setTransaction(transaction);
- getSession().save(contentNodeStatus2);
- // create an association to content 1
- ChildAssoc assoc1 = new ChildAssocImpl();
- assoc1.setIsPrimary(true);
- assoc1.setTypeQName(QName.createQName(null, "type1"));
- assoc1.setQname(QName.createQName(null, "number1"));
- assoc1.setChildNodeName("number1");
- assoc1.setChildNodeNameCrc(1);
- assoc1.buildAssociation(containerNode, contentNode1);
- getSession().save(assoc1);
- // create an association to content 2
- ChildAssoc assoc2 = new ChildAssocImpl();
- assoc2.setIsPrimary(true);
- assoc2.setTypeQName(QName.createQName(null, "type2"));
- assoc2.setQname(QName.createQName(null, "number2"));
- assoc2.setChildNodeName("number2");
- assoc2.setChildNodeNameCrc(2);
- assoc2.buildAssociation(containerNode, contentNode2);
- getSession().save(assoc2);
-
- // make sure that there are no entities cached in either L1 or L2
- getSession().flush();
- getSession().clear();
-
- // now read the structure back in from the container down
- containerNodeStatus = (NodeStatus) getSession().get(NodeStatusImpl.class, containerNodeKey);
- containerNode = containerNodeStatus.getNode();
-
- // clear out again
- getSession().clear();
-
- // expect that just the specific property gets removed in the delete statement
- getSession().flush();
- getSession().clear();
-
- // Create a second association to content 2
- // create an association to content 2
- containerNodeStatus = (NodeStatus) getSession().get(NodeStatusImpl.class, containerNodeKey);
- containerNode = containerNodeStatus.getNode();
- contentNodeStatus2 = (NodeStatus) getSession().get(NodeStatusImpl.class, contentNodeKey2);
- contentNode2 = contentNodeStatus2.getNode();
- ChildAssoc assoc3 = new ChildAssocImpl();
- assoc3.setIsPrimary(false);
- assoc3.setTypeQName(QName.createQName(null, "type3"));
- assoc3.setQname(QName.createQName(null, "number3"));
- assoc3.setChildNodeName("number3");
- assoc3.setChildNodeNameCrc(2);
- assoc3.buildAssociation(containerNode, contentNode2); // check whether the children are pulled in for this
- getSession().save(assoc3);
-
- // flush it
- getSession().flush();
- getSession().clear();
- }
-}
+/*
+ * Copyright (C) 2005 Alfresco, Inc.
+ *
+ * Licensed under the Mozilla Public License version 1.1
+ * with a permitted attribution clause. You may obtain a
+ * copy of the License at
+ *
+ * http://www.alfresco.org/legal/license.txt
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific
+ * language governing permissions and limitations under the
+ * License.
+ */
+package org.alfresco.repo.domain.hibernate;
+
+import java.io.Serializable;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+import javax.transaction.UserTransaction;
+
+import org.alfresco.model.ContentModel;
+import org.alfresco.repo.domain.ChildAssoc;
+import org.alfresco.repo.domain.Node;
+import org.alfresco.repo.domain.NodeKey;
+import org.alfresco.repo.domain.NodeStatus;
+import org.alfresco.repo.domain.PropertyValue;
+import org.alfresco.repo.domain.Server;
+import org.alfresco.repo.domain.Store;
+import org.alfresco.repo.domain.StoreKey;
+import org.alfresco.repo.domain.Transaction;
+import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
+import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
+import org.alfresco.service.cmr.repository.StoreRef;
+import org.alfresco.service.namespace.QName;
+import org.alfresco.service.transaction.TransactionService;
+import org.alfresco.util.BaseSpringTest;
+import org.alfresco.util.GUID;
+import org.hibernate.CacheMode;
+import org.hibernate.exception.ConstraintViolationException;
+import org.hibernate.exception.GenericJDBCException;
+
+/**
+ * Test persistence and retrieval of Hibernate-specific implementations of the
+ * {@link org.alfresco.repo.domain.Node} interface
+ *
+ * @author Derek Hulley
+ */
+@SuppressWarnings("unused")
+public class HibernateNodeTest extends BaseSpringTest
+{
+ private static final String TEST_NAMESPACE = "http://www.alfresco.org/test/HibernateNodeTest";
+ private static int i = 0;
+
+ private Store store;
+ private Server server;
+ private Transaction transaction;
+
+ public HibernateNodeTest()
+ {
+ }
+
+ protected void onSetUpInTransaction() throws Exception
+ {
+ store = new StoreImpl();
+ StoreKey storeKey = new StoreKey(StoreRef.PROTOCOL_WORKSPACE,
+ "TestWorkspace@" + System.currentTimeMillis() + " - " + System.nanoTime());
+ store.setKey(storeKey);
+ // persist so that it is present in the hibernate cache
+ getSession().save(store);
+
+ server = (Server) getSession().get(ServerImpl.class, new Long(1));
+ if (server == null)
+ {
+ server = new ServerImpl();
+ server.setIpAddress("" + "i_" + System.currentTimeMillis());
+ getSession().save(server);
+ }
+ transaction = new TransactionImpl();
+ transaction.setServer(server);
+ transaction.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId());
+ getSession().save(transaction);
+ }
+
+ protected void onTearDownInTransaction()
+ {
+ // force a flush to ensure that the database updates succeed
+ getSession().flush();
+ getSession().clear();
+ }
+
+ public void testSetUp() throws Exception
+ {
+ assertNotNull("Workspace not initialised", store);
+ }
+
+ public void testGetStore() throws Exception
+ {
+ // create a new Node
+ Node node = new NodeImpl();
+ node.setStore(store);
+ node.setUuid(GUID.generate());
+ node.setTypeQName(ContentModel.TYPE_CONTAINER);
+
+ // now it should work
+ Serializable id = getSession().save(node);
+
+ // throw the reference away and get the a new one for the id
+ node = (Node) getSession().load(NodeImpl.class, id);
+ assertNotNull("Node not found", node);
+ // check that the store has been loaded
+ Store loadedStore = node.getStore();
+ assertNotNull("Store not present on node", loadedStore);
+ assertEquals("Incorrect store key", store, loadedStore);
+ }
+
+ public void testNodeStatus()
+ {
+ NodeKey key = new NodeKey(store.getKey(), "AAA");
+ // create the node status
+ NodeStatus nodeStatus = new NodeStatusImpl();
+ nodeStatus.setKey(key);
+ nodeStatus.setTransaction(transaction);
+ getSession().save(nodeStatus);
+
+ // create a new Node
+ Node node = new NodeImpl();
+ node.setStore(store);
+ node.setUuid(GUID.generate());
+ node.setTypeQName(ContentModel.TYPE_CONTAINER);
+ Serializable nodeId = getSession().save(node);
+
+ // This should all be fine. The node does not HAVE to have a status.
+ flushAndClear();
+
+ // set the node
+ nodeStatus = (NodeStatus) getSession().get(NodeStatusImpl.class, key);
+ nodeStatus.setNode(node);
+ flushAndClear();
+
+ // is the node retrievable?
+ nodeStatus = (NodeStatus) getSession().get(NodeStatusImpl.class, key);
+ node = nodeStatus.getNode();
+ assertNotNull("Node was not attached to status", node);
+ // change the values
+ transaction.setChangeTxnId("txn:456");
+ // delete the node
+ getSession().delete(node);
+
+ try
+ {
+ flushAndClear();
+ fail("Node status may not refer to non-existent node");
+ }
+ catch(ConstraintViolationException e)
+ {
+ // expected
+ }
+ catch(GenericJDBCException e)
+ {
+ // Sybase
+ // expected
+ }
+ }
+
+ /**
+ * Check that properties can be persisted and retrieved
+ */
+ public void testProperties() throws Exception
+ {
+ // create a new Node
+ Node node = new NodeImpl();
+ node.setStore(store);
+ node.setUuid(GUID.generate());
+ node.setTypeQName(ContentModel.TYPE_CONTAINER);
+ // give it a property map
+ Map propertyMap = new HashMap(5);
+ QName propertyQName = QName.createQName("{}A");
+ PropertyValue propertyValue = new PropertyValue(DataTypeDefinition.TEXT, "AAA");
+ propertyMap.put(propertyQName, propertyValue);
+ node.getProperties().putAll(propertyMap);
+ // persist it
+ Serializable id = getSession().save(node);
+
+ // throw the reference away and get the a new one for the id
+ node = (Node) getSession().load(NodeImpl.class, id);
+ assertNotNull("Node not found", node);
+ // extract the Map
+ propertyMap = node.getProperties();
+ assertNotNull("Map not persisted", propertyMap);
+ // ensure that the value is present
+ assertNotNull("Property value not present in map", QName.createQName("{}A"));
+ }
+
+ /**
+ * Check that aspect qnames can be added and removed from a node and that they
+ * are persisted correctly
+ */
+ public void testAspects() throws Exception
+ {
+ // make a real node
+ Node node = new NodeImpl();
+ node.setStore(store);
+ node.setUuid(GUID.generate());
+ node.setTypeQName(ContentModel.TYPE_CMOBJECT);
+
+ // add some aspects
+ QName aspect1 = QName.createQName(TEST_NAMESPACE, "1");
+ QName aspect2 = QName.createQName(TEST_NAMESPACE, "2");
+ QName aspect3 = QName.createQName(TEST_NAMESPACE, "3");
+ QName aspect4 = QName.createQName(TEST_NAMESPACE, "4");
+ Set aspects = node.getAspects();
+ aspects.add(aspect1);
+ aspects.add(aspect2);
+ aspects.add(aspect3);
+ aspects.add(aspect4);
+ assertFalse("Set did not eliminate duplicate aspect qname", aspects.add(aspect4));
+
+ // persist
+ Serializable id = getSession().save(node);
+
+ // flush and clear
+ flushAndClear();
+
+ // get node and check aspects
+ node = (Node) getSession().get(NodeImpl.class, id);
+ assertNotNull("Node not persisted", node);
+ aspects = node.getAspects();
+ assertEquals("Not all aspects persisted", 4, aspects.size());
+ }
+
+ public void testChildAssoc() throws Exception
+ {
+ // make a content node
+ Node contentNode = new NodeImpl();
+ contentNode.setStore(store);
+ contentNode.setUuid(GUID.generate());
+ contentNode.setTypeQName(ContentModel.TYPE_CONTENT);
+ Serializable contentNodeId = getSession().save(contentNode);
+
+ // make a container node
+ Node containerNode = new NodeImpl();
+ containerNode.setStore(store);
+ containerNode.setUuid(GUID.generate());
+ containerNode.setTypeQName(ContentModel.TYPE_CONTAINER);
+ Serializable containerNodeId = getSession().save(containerNode);
+ // create an association to the content
+ ChildAssoc assoc1 = new ChildAssocImpl();
+ assoc1.setIsPrimary(true);
+ assoc1.setTypeQName(QName.createQName(null, "type1"));
+ assoc1.setQname(QName.createQName(null, "number1"));
+ assoc1.setChildNodeName("number1");
+ assoc1.setChildNodeNameCrc(1);
+ assoc1.buildAssociation(containerNode, contentNode);
+ getSession().save(assoc1);
+
+ // make another association between the same two parent and child nodes
+ ChildAssoc assoc2 = new ChildAssocImpl();
+ assoc2.setIsPrimary(true);
+ assoc2.setTypeQName(QName.createQName(null, "type2"));
+ assoc2.setQname(QName.createQName(null, "number2"));
+ assoc2.setChildNodeName("number2");
+ assoc2.setChildNodeNameCrc(2);
+ assoc2.buildAssociation(containerNode, contentNode);
+ getSession().save(assoc2);
+
+ assertFalse("Hashcode incorrent", assoc2.hashCode() == 0);
+ assertNotSame("Assoc equals failure", assoc1, assoc2);
+
+ // reload the container
+ containerNode = (Node) getSession().get(NodeImpl.class, containerNodeId);
+ assertNotNull("Node not found", containerNode);
+
+ // check that we can traverse the association from the child
+ Collection parentAssocs = contentNode.getParentAssocs();
+ assertEquals("Expected exactly 2 parent assocs", 2, parentAssocs.size());
+ parentAssocs = new HashSet(parentAssocs);
+ for (ChildAssoc assoc : parentAssocs)
+ {
+ // maintain inverse assoc sets
+ assoc.removeAssociation();
+ // remove the assoc
+ getSession().delete(assoc);
+ }
+
+ // check that the child now has zero parents
+ parentAssocs = contentNode.getParentAssocs();
+ assertEquals("Expected exactly 0 parent assocs", 0, parentAssocs.size());
+ }
+
+ /**
+ * Allows tracing of L2 cache
+ */
+ public void testCaching() throws Exception
+ {
+ // make a node
+ Node node = new NodeImpl();
+ node.setStore(store);
+ node.setUuid(GUID.generate());
+ node.setTypeQName(ContentModel.TYPE_CONTENT);
+ Serializable nodeId = getSession().save(node);
+
+ // add some aspects to the node
+ Set aspects = node.getAspects();
+ aspects.add(ContentModel.ASPECT_AUDITABLE);
+
+ // add some properties
+ Map properties = node.getProperties();
+ properties.put(ContentModel.PROP_NAME, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
+
+ // check that the session hands back the same instance
+ Node checkNode = (Node) getSession().get(NodeImpl.class, nodeId);
+ assertNotNull(checkNode);
+ assertTrue("Node retrieved was not same instance", checkNode == node);
+
+ Set checkAspects = checkNode.getAspects();
+ assertTrue("Aspect set retrieved was not the same instance", checkAspects == aspects);
+ assertEquals("Incorrect number of aspects", 1, checkAspects.size());
+ QName checkQName = (QName) checkAspects.toArray()[0];
+ assertTrue("QName retrieved was not the same instance", checkQName == ContentModel.ASPECT_AUDITABLE);
+
+ Map checkProperties = checkNode.getProperties();
+ assertTrue("Propery map retrieved was not the same instance", checkProperties == properties);
+ assertTrue("Property not found", checkProperties.containsKey(ContentModel.PROP_NAME));
+
+ flushAndClear();
+ // commit the transaction
+ setComplete();
+ endTransaction();
+
+ TransactionService transactionService = (TransactionService) applicationContext.getBean("transactionComponent");
+ UserTransaction txn = transactionService.getUserTransaction();
+ try
+ {
+ txn.begin();
+
+ // check that the L2 cache hands back the same instance
+ checkNode = (Node) getSession().get(NodeImpl.class, nodeId);
+ assertNotNull(checkNode);
+ checkAspects = checkNode.getAspects();
+
+ txn.commit();
+ }
+ catch (Throwable e)
+ {
+ txn.rollback();
+ }
+ }
+
+ /**
+ * Create some simple parent-child relationships and flush them. Then read them back in without
+ * using the L2 cache.
+ */
+ public void testQueryJoins() throws Exception
+ {
+ getSession().setCacheMode(CacheMode.IGNORE);
+
+ // make a container node
+ Node containerNode = new NodeImpl();
+ containerNode.setStore(store);
+ containerNode.setUuid(GUID.generate());
+ containerNode.setTypeQName(ContentModel.TYPE_CONTAINER);
+ containerNode.getProperties().put(ContentModel.PROP_AUTHOR, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
+ containerNode.getProperties().put(ContentModel.PROP_ARCHIVED_BY, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
+ containerNode.getAspects().add(ContentModel.ASPECT_AUDITABLE);
+ Serializable containerNodeId = getSession().save(containerNode);
+ NodeKey containerNodeKey = new NodeKey(containerNode.getNodeRef());
+ NodeStatus containerNodeStatus = new NodeStatusImpl();
+ containerNodeStatus.setKey(containerNodeKey);
+ containerNodeStatus.setNode(containerNode);
+ containerNodeStatus.setTransaction(transaction);
+ getSession().save(containerNodeStatus);
+ // make content node 1
+ Node contentNode1 = new NodeImpl();
+ contentNode1.setStore(store);
+ contentNode1.setUuid(GUID.generate());
+ contentNode1.setTypeQName(ContentModel.TYPE_CONTENT);
+ contentNode1.getProperties().put(ContentModel.PROP_AUTHOR, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
+ contentNode1.getProperties().put(ContentModel.PROP_ARCHIVED_BY, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
+ contentNode1.getAspects().add(ContentModel.ASPECT_AUDITABLE);
+ Serializable contentNode1Id = getSession().save(contentNode1);
+ NodeKey contentNodeKey1 = new NodeKey(contentNode1.getNodeRef());
+ NodeStatus contentNodeStatus1 = new NodeStatusImpl();
+ contentNodeStatus1.setKey(contentNodeKey1);
+ contentNodeStatus1.setNode(contentNode1);
+ contentNodeStatus1.setTransaction(transaction);
+ getSession().save(contentNodeStatus1);
+ // make content node 2
+ Node contentNode2 = new NodeImpl();
+ contentNode2.setStore(store);
+ contentNode2.setUuid(GUID.generate());
+ contentNode2.setTypeQName(ContentModel.TYPE_CONTENT);
+ Serializable contentNode2Id = getSession().save(contentNode2);
+ contentNode2.getProperties().put(ContentModel.PROP_AUTHOR, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
+ contentNode2.getProperties().put(ContentModel.PROP_ARCHIVED_BY, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
+ contentNode2.getAspects().add(ContentModel.ASPECT_AUDITABLE);
+ NodeKey contentNodeKey2 = new NodeKey(contentNode2.getNodeRef());
+ NodeStatus contentNodeStatus2 = new NodeStatusImpl();
+ contentNodeStatus2.setKey(contentNodeKey2);
+ contentNodeStatus2.setNode(contentNode2);
+ contentNodeStatus2.setTransaction(transaction);
+ getSession().save(contentNodeStatus2);
+ // create an association to content 1
+ ChildAssoc assoc1 = new ChildAssocImpl();
+ assoc1.setIsPrimary(true);
+ assoc1.setTypeQName(QName.createQName(null, "type1"));
+ assoc1.setQname(QName.createQName(null, "number1"));
+ assoc1.setChildNodeName("number1");
+ assoc1.setChildNodeNameCrc(1);
+ assoc1.buildAssociation(containerNode, contentNode1);
+ getSession().save(assoc1);
+ // create an association to content 2
+ ChildAssoc assoc2 = new ChildAssocImpl();
+ assoc2.setIsPrimary(true);
+ assoc2.setTypeQName(QName.createQName(null, "type2"));
+ assoc2.setQname(QName.createQName(null, "number2"));
+ assoc2.setChildNodeName("number2");
+ assoc2.setChildNodeNameCrc(2);
+ assoc2.buildAssociation(containerNode, contentNode2);
+ getSession().save(assoc2);
+
+ // make sure that there are no entities cached in either L1 or L2
+ getSession().flush();
+ getSession().clear();
+
+ // now read the structure back in from the container down
+ containerNodeStatus = (NodeStatus) getSession().get(NodeStatusImpl.class, containerNodeKey);
+ containerNode = containerNodeStatus.getNode();
+
+ // clear out again
+ getSession().clear();
+
+ // expect that just the specific property gets removed in the delete statement
+ getSession().flush();
+ getSession().clear();
+
+ // Create a second association to content 2
+ // create an association to content 2
+ containerNodeStatus = (NodeStatus) getSession().get(NodeStatusImpl.class, containerNodeKey);
+ containerNode = containerNodeStatus.getNode();
+ contentNodeStatus2 = (NodeStatus) getSession().get(NodeStatusImpl.class, contentNodeKey2);
+ contentNode2 = contentNodeStatus2.getNode();
+ ChildAssoc assoc3 = new ChildAssocImpl();
+ assoc3.setIsPrimary(false);
+ assoc3.setTypeQName(QName.createQName(null, "type3"));
+ assoc3.setQname(QName.createQName(null, "number3"));
+ assoc3.setChildNodeName("number3");
+ assoc3.setChildNodeNameCrc(2);
+ assoc3.buildAssociation(containerNode, contentNode2); // check whether the children are pulled in for this
+ getSession().save(assoc3);
+
+ // flush it
+ getSession().flush();
+ getSession().clear();
+ }
+}
\ No newline at end of file
diff --git a/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml b/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml
index 15fe0ed614..0309f85558 100644
--- a/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml
+++ b/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml
@@ -314,65 +314,19 @@
assoc.target.id = :targetId
-
- select distinct
- transaction.changeTxnId
- from
- org.alfresco.repo.domain.hibernate.TransactionImpl as transaction
- where
- transaction.changeTxnId > :currentTxnId
- order by
- transaction.changeTxnId
-
-
-
+
select
- count(transaction.changeTxnId)
- from
- org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
- join status.transaction as transaction
- where
- status.key.protocol = :storeProtocol and
- status.key.identifier = :storeIdentifier and
- status.node.id is not null and
- transaction.changeTxnId = :changeTxnId
-
-
-
- select
- status
- from
- org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
- join status.transaction as transaction
- where
- status.key.protocol = :storeProtocol and
- status.key.identifier = :storeIdentifier and
- status.node.id is not null and
- transaction.changeTxnId = :changeTxnId
-
-
-
- select
- status
- from
- org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
- join status.transaction as transaction
- where
- status.key.protocol = :storeProtocol and
- status.key.identifier = :storeIdentifier and
- status.node.id is null and
- transaction.changeTxnId = :changeTxnId
-
-
-
- select distinct
- props.stringValue
+ node
from
org.alfresco.repo.domain.hibernate.NodeImpl as node
join
- node.properties props
+ node.properties prop
where
- props.stringValue like 'contentUrl%'
+ (
+ prop.actualType = :actualTypeString or
+ prop.actualType = 'SERIALIZABLE'
+ ) and
+ prop.persistedType != 'NULL'
diff --git a/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml b/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml
index e770f3f26c..61d3314746 100644
--- a/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml
+++ b/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml
@@ -59,4 +59,81 @@
server.ipAddress = :ipAddress
+
+ select
+ max(txn.id)
+ from
+ org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
+ join status.transaction as txn
+ where
+ status.key.protocol = :protocol and
+ status.key.identifier = :identifier
+
+
+
+ select
+ count(txn.id)
+ from
+ org.alfresco.repo.domain.hibernate.TransactionImpl as txn
+
+
+
+ :lastTxnId
+ order by
+ txn.id
+ ]]>
+
+
+
+ select
+ count(status.key.guid)
+ from
+ org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
+ join status.transaction as txn
+ where
+ txn.id = :txnId and
+ status.node is not null and
+ status.key.protocol = :protocol and
+ status.key.identifier = :identifier
+
+
+
+ select
+ count(status.key.guid)
+ from
+ org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
+ join status.transaction as txn
+ where
+ txn.id = :txnId and
+ status.node is null and
+ status.key.protocol = :protocol and
+ status.key.identifier = :identifier
+
+
+
+ select
+ status
+ from
+ org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
+ where
+ status.transaction.id = :txnId and
+ status.key.protocol = :protocol and
+ status.key.identifier = :identifier
+
+
+
+ select
+ status
+ from
+ org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
+ where
+ status.transaction.id = :txnId
+
+
diff --git a/source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java b/source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java
index c3822a51bc..6ab41fa9ff 100644
--- a/source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java
+++ b/source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java
@@ -1,535 +1,539 @@
-/*
- * Copyright (C) 2006 Alfresco, Inc.
- *
- * Licensed under the Mozilla Public License version 1.1
- * with a permitted attribution clause. You may obtain a
- * copy of the License at
- *
- * http://www.alfresco.org/legal/license.txt
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific
- * language governing permissions and limitations under the
- * License.
- */
-package org.alfresco.repo.domain.schema;
-
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileWriter;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Writer;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.alfresco.error.AlfrescoRuntimeException;
-import org.alfresco.i18n.I18NUtil;
-import org.alfresco.repo.admin.patch.impl.SchemaUpgradeScriptPatch;
-import org.alfresco.util.TempFileProvider;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.hibernate.Session;
-import org.hibernate.SessionFactory;
-import org.hibernate.Transaction;
-import org.hibernate.cfg.Configuration;
-import org.hibernate.dialect.Dialect;
-import org.hibernate.tool.hbm2ddl.DatabaseMetadata;
-import org.hibernate.tool.hbm2ddl.SchemaExport;
-import org.springframework.beans.BeansException;
-import org.springframework.context.ApplicationEvent;
-import org.springframework.context.ApplicationListener;
-import org.springframework.context.event.ContextRefreshedEvent;
-import org.springframework.core.io.Resource;
-import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
-import org.springframework.core.io.support.ResourcePatternResolver;
-import org.springframework.orm.hibernate3.LocalSessionFactoryBean;
-
-/**
- * Bootstraps the schema and schema update. The schema is considered missing if the applied patch table
- * is not present, and the schema is considered empty if the applied patch table is empty.
- *
- * @author Derek Hulley
- */
-public class SchemaBootstrap implements ApplicationListener
-{
- /** The placeholder for the configured Dialect
class name: ${db.script.dialect} */
- private static final String PLACEHOLDER_SCRIPT_DIALECT = "\\$\\{db\\.script\\.dialect\\}";
-
- private static final String MSG_EXECUTING_SCRIPT = "schema.update.msg.executing_script";
- private static final String ERR_UPDATE_FAILED = "schema.update.err.update_failed";
- private static final String ERR_VALIDATION_FAILED = "schema.update.err.validation_failed";
- private static final String ERR_SCRIPT_NOT_RUN = "schema.update.err.update_script_not_run";
- private static final String ERR_SCRIPT_NOT_FOUND = "schema.update.err.script_not_found";
- private static final String ERR_STATEMENT_TERMINATOR = "schema.update.err.statement_terminator";
-
- private static Log logger = LogFactory.getLog(SchemaBootstrap.class);
-
- private LocalSessionFactoryBean localSessionFactory;
- private String schemaOuputFilename;
- private boolean updateSchema;
- private List postCreateScriptUrls;
- private List validateUpdateScriptPatches;
- private List applyUpdateScriptPatches;
-
- public SchemaBootstrap()
- {
- postCreateScriptUrls = new ArrayList(1);
- validateUpdateScriptPatches = new ArrayList(4);
- applyUpdateScriptPatches = new ArrayList(4);
- }
-
- public void setLocalSessionFactory(LocalSessionFactoryBean localSessionFactory) throws BeansException
- {
- this.localSessionFactory = localSessionFactory;
- }
-
- /**
- * Set this to output the full database creation script
- *
- * @param schemaOuputFilename the name of a file to dump the schema to, or null to ignore
- */
- public void setSchemaOuputFilename(String schemaOuputFilename)
- {
- this.schemaOuputFilename = schemaOuputFilename;
- }
-
- /**
- * Set whether to modify the schema or not. Either way, the schema will be validated.
- *
- * @param updateSchema true to update and validate the schema, otherwise false to just
- * validate the schema. Default is true.
- */
- public void setUpdateSchema(boolean updateSchema)
- {
- this.updateSchema = updateSchema;
- }
-
- /**
- * Set the scripts that must be executed after the schema has been created.
- *
- * @param postCreateScriptUrls file URLs
- *
- * @see #PLACEHOLDER_SCRIPT_DIALECT
- */
- public void setPostCreateScriptUrls(List postUpdateScriptUrls)
- {
- this.postCreateScriptUrls = postUpdateScriptUrls;
- }
-
- /**
- * Set the schema script patches that must have been applied. These will not be
- * applied to the database. These can be used where the script cannot be
- * applied automatically or where a particular upgrade path is no longer supported.
- * For example, at version 3.0, the upgrade scripts for version 1.4 may be considered
- * unsupported - this doesn't prevent the manual application of the scripts, though.
- *
- * @param applyUpdateScriptPatches a list of schema patches to check
- */
- public void setValidateUpdateScriptPatches(List scriptPatches)
- {
- this.validateUpdateScriptPatches = scriptPatches;
- }
-
- /**
- * Set the schema script patches that may be executed during an update.
- *
- * @param applyUpdateScriptPatches a list of schema patches to check
- */
- public void setApplyUpdateScriptPatches(List scriptPatches)
- {
- this.applyUpdateScriptPatches = scriptPatches;
- }
-
- public void onApplicationEvent(ApplicationEvent event)
- {
- if (!(event instanceof ContextRefreshedEvent))
- {
- // only work on startup
- return;
- }
-
- // do everything in a transaction
- Session session = getLocalSessionFactory().openSession();
- Transaction transaction = session.beginTransaction();
- try
- {
- // make sure that we don't autocommit
- Connection connection = session.connection();
- connection.setAutoCommit(false);
-
- Configuration cfg = localSessionFactory.getConfiguration();
- // dump the schema, if required
- if (schemaOuputFilename != null)
- {
- File schemaOutputFile = new File(schemaOuputFilename);
- dumpSchemaCreate(cfg, schemaOutputFile);
- }
-
- // update the schema, if required
- if (updateSchema)
- {
- updateSchema(cfg, session, connection);
- }
-
- // verify that all patches have been applied correctly
- checkSchemaPatchScripts(cfg, session, connection, validateUpdateScriptPatches, false); // check scripts
- checkSchemaPatchScripts(cfg, session, connection, applyUpdateScriptPatches, false); // check scripts
-
- // all done successfully
- transaction.commit();
- }
- catch (Throwable e)
- {
- try { transaction.rollback(); } catch (Throwable ee) {}
- if (updateSchema)
- {
- throw new AlfrescoRuntimeException(ERR_UPDATE_FAILED, e);
- }
- else
- {
- throw new AlfrescoRuntimeException(ERR_VALIDATION_FAILED, e);
- }
- }
- }
-
- private void dumpSchemaCreate(Configuration cfg, File schemaOutputFile)
- {
- // if the file exists, delete it
- if (schemaOutputFile.exists())
- {
- schemaOutputFile.delete();
- }
- SchemaExport schemaExport = new SchemaExport(cfg)
- .setFormat(true)
- .setHaltOnError(true)
- .setOutputFile(schemaOutputFile.getAbsolutePath())
- .setDelimiter(";");
- schemaExport.execute(false, false, false, true);
- }
-
- private SessionFactory getLocalSessionFactory()
- {
- return (SessionFactory) localSessionFactory.getObject();
- }
-
- /**
- * @return Returns the number of applied patches
- */
- private int countAppliedPatches(Connection connection) throws Exception
- {
- Statement stmt = connection.createStatement();
- try
- {
- ResultSet rs = stmt.executeQuery("select count(id) from alf_applied_patch");
- rs.next();
- int count = rs.getInt(1);
- return count;
- }
- catch (Throwable e)
- {
- // we'll try another table name
- }
- finally
- {
- try { stmt.close(); } catch (Throwable e) {}
- }
- // for pre-1.4 databases, the table was named differently
- stmt = connection.createStatement();
- try
- {
- ResultSet rs = stmt.executeQuery("select count(id) from applied_patch");
- rs.next();
- int count = rs.getInt(1);
- return count;
- }
- finally
- {
- try { stmt.close(); } catch (Throwable e) {}
- }
- }
-
- /**
- * @return Returns the number of applied patches
- */
- private boolean didPatchSucceed(Connection connection, String patchId) throws Exception
- {
- Statement stmt = connection.createStatement();
- try
- {
- ResultSet rs = stmt.executeQuery("select succeeded from alf_applied_patch where id = '" + patchId + "'");
- if (!rs.next())
- {
- return false;
- }
- boolean succeeded = rs.getBoolean(1);
- return succeeded;
- }
- catch (Throwable e)
- {
- // we'll try another table name
- }
- finally
- {
- try { stmt.close(); } catch (Throwable e) {}
- }
- // for pre-1.4 databases, the table was named differently
- stmt = connection.createStatement();
- try
- {
- ResultSet rs = stmt.executeQuery("select succeeded from applied_patch where id = '" + patchId + "'");
- if (!rs.next())
- {
- return false;
- }
- boolean succeeded = rs.getBoolean(1);
- return succeeded;
- }
- finally
- {
- try { stmt.close(); } catch (Throwable e) {}
- }
- }
-
- /**
- * Builds the schema from scratch or applies the necessary patches to the schema.
- */
- private void updateSchema(Configuration cfg, Session session, Connection connection) throws Exception
- {
- boolean create = false;
- try
- {
- countAppliedPatches(connection);
- }
- catch (Throwable e)
- {
- create = true;
- }
- if (create)
- {
- // the applied patch table is missing - we assume that all other tables are missing
- // perform a full update using Hibernate-generated statements
- File tempFile = TempFileProvider.createTempFile("AlfrescoSchemaCreate", ".sql");
- dumpSchemaCreate(cfg, tempFile);
- FileInputStream tempInputStream = new FileInputStream(tempFile);
- executeScriptFile(cfg, connection, tempInputStream, tempFile.getPath());
- // execute post-create scripts (not patches)
- for (String scriptUrl : this.postCreateScriptUrls)
- {
- executeScriptUrl(cfg, connection, scriptUrl);
- }
- }
- else
- {
- // we have a database, so just run the update scripts
- checkSchemaPatchScripts(cfg, session, connection, validateUpdateScriptPatches, false); // check for scripts that must have been run
- checkSchemaPatchScripts(cfg, session, connection, applyUpdateScriptPatches, true); // execute scripts as required
- // let Hibernate do any required updates
- File tempFile = null;
- Writer writer = null;
- try
- {
- final Dialect dialect = Dialect.getDialect(cfg.getProperties());
- DatabaseMetadata metadata = new DatabaseMetadata(connection, dialect);
- String[] sqls = cfg.generateSchemaUpdateScript(dialect, metadata);
- if (sqls.length > 0)
- {
- tempFile = TempFileProvider.createTempFile("AlfrescoSchemaUpdate", ".sql");
- writer = new BufferedWriter(new FileWriter(tempFile));
- for (String sql : sqls)
- {
- writer.append(sql);
- writer.append(";\n");
- }
- }
- }
- finally
- {
- if (writer != null)
- {
- try {writer.close();} catch (Throwable e) {}
- }
- }
- // execute if there were changes raised by Hibernate
- if (tempFile != null)
- {
- InputStream tempInputStream = new FileInputStream(tempFile);
- executeScriptFile(cfg, connection, tempInputStream, tempFile.getPath());
- }
- }
- }
-
- /**
- * Check that the necessary scripts have been executed against the database
- */
- private void checkSchemaPatchScripts(
- Configuration cfg,
- Session session,
- Connection connection,
- List scriptPatches,
- boolean apply) throws Exception
- {
- // first check if there have been any applied patches
- int appliedPatchCount = countAppliedPatches(connection);
- if (appliedPatchCount == 0)
- {
- // This is a new schema, so upgrade scripts are irrelevant
- // and patches will not have been applied yet
- return;
- }
-
- for (SchemaUpgradeScriptPatch patch : scriptPatches)
- {
- final String patchId = patch.getId();
- final String scriptUrl = patch.getScriptUrl();
-
- // check if the script was successfully executed
- boolean wasSuccessfullyApplied = didPatchSucceed(connection, patchId);
- if (wasSuccessfullyApplied)
- {
- // nothing to do - it has been done before
- continue;
- }
- else if (!apply)
- {
- // the script was not run and may not be run automatically
- throw AlfrescoRuntimeException.create(ERR_SCRIPT_NOT_RUN, scriptUrl);
- }
- // it wasn't run and it can be run now
- executeScriptUrl(cfg, connection, scriptUrl);
- }
- }
-
- private void executeScriptUrl(Configuration cfg, Connection connection, String scriptUrl) throws Exception
- {
- Dialect dialect = Dialect.getDialect(cfg.getProperties());
- InputStream scriptInputStream = getScriptInputStream(dialect.getClass(), scriptUrl);
- // check that it exists
- if (scriptInputStream == null)
- {
- throw AlfrescoRuntimeException.create(ERR_SCRIPT_NOT_FOUND, scriptUrl);
- }
- // now execute it
- executeScriptFile(cfg, connection, scriptInputStream, scriptUrl);
- }
-
- /**
- * Replaces the dialect placeholder in the script URL and attempts to find a file for
- * it. If not found, the dialect hierarchy will be walked until a compatible script is
- * found. This makes it possible to have scripts that are generic to all dialects.
- *
- * @return Returns an input stream onto the script, otherwise null
- */
- private InputStream getScriptInputStream(Class dialectClazz, String scriptUrl) throws Exception
- {
- // replace the dialect placeholder
- String dialectScriptUrl = scriptUrl.replaceAll(PLACEHOLDER_SCRIPT_DIALECT, dialectClazz.getName());
- // get a handle on the resource
- ResourcePatternResolver rpr = new PathMatchingResourcePatternResolver(this.getClass().getClassLoader());
- Resource resource = rpr.getResource(dialectScriptUrl);
- if (!resource.exists())
- {
- // it wasn't found. Get the superclass of the dialect and try again
- Class superClazz = dialectClazz.getSuperclass();
- if (Dialect.class.isAssignableFrom(superClazz))
- {
- // we still have a Dialect - try again
- return getScriptInputStream(superClazz, scriptUrl);
- }
- else
- {
- // we have exhausted all options
- return null;
- }
- }
- else
- {
- // we have a handle to it
- return resource.getInputStream();
- }
- }
-
- private void executeScriptFile(
- Configuration cfg,
- Connection connection,
- InputStream scriptInputStream,
- String scriptUrl) throws Exception
- {
- logger.info(I18NUtil.getMessage(MSG_EXECUTING_SCRIPT, scriptUrl));
-
- BufferedReader reader = new BufferedReader(new InputStreamReader(scriptInputStream, "UTF8"));
- try
- {
- int line = 0;
- // loop through all statements
- StringBuilder sb = new StringBuilder(1024);
- while(true)
- {
- String sql = reader.readLine();
- line++;
-
- if (sql == null)
- {
- // nothing left in the file
- break;
- }
-
- // trim it
- sql = sql.trim();
- if (sql.length() == 0 ||
- sql.startsWith( "--" ) ||
- sql.startsWith( "//" ) ||
- sql.startsWith( "/*" ) )
- {
- if (sb.length() > 0)
- {
- // we have an unterminated statement
- throw AlfrescoRuntimeException.create(ERR_STATEMENT_TERMINATOR, (line - 1), scriptUrl);
- }
- // there has not been anything to execute - it's just a comment line
- continue;
- }
- // have we reached the end of a statement?
- boolean execute = false;
- if (sql.endsWith(";"))
- {
- sql = sql.substring(0, sql.length() - 1);
- execute = true;
- }
- // append to the statement being built up
- sb.append(" ").append(sql);
- // execute, if required
- if (execute)
- {
- Statement stmt = connection.createStatement();
- try
- {
- sql = sb.toString();
- if (logger.isDebugEnabled())
- {
- logger.debug("Executing statment: " + sql);
- }
- stmt.execute(sql);
- sb = new StringBuilder(1024);
- }
- finally
- {
- try { stmt.close(); } catch (Throwable e) {}
- }
- }
- }
- }
- finally
- {
- try { reader.close(); } catch (Throwable e) {}
- try { scriptInputStream.close(); } catch (Throwable e) {}
- }
- }
-}
+/*
+ * Copyright (C) 2006 Alfresco, Inc.
+ *
+ * Licensed under the Mozilla Public License version 1.1
+ * with a permitted attribution clause. You may obtain a
+ * copy of the License at
+ *
+ * http://www.alfresco.org/legal/license.txt
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific
+ * language governing permissions and limitations under the
+ * License.
+ */
+package org.alfresco.repo.domain.schema;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Writer;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.alfresco.error.AlfrescoRuntimeException;
+import org.alfresco.i18n.I18NUtil;
+import org.alfresco.repo.admin.patch.impl.SchemaUpgradeScriptPatch;
+import org.alfresco.util.TempFileProvider;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+import org.hibernate.Transaction;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.dialect.Dialect;
+import org.hibernate.tool.hbm2ddl.DatabaseMetadata;
+import org.hibernate.tool.hbm2ddl.SchemaExport;
+import org.springframework.beans.BeansException;
+import org.springframework.context.ApplicationEvent;
+import org.springframework.context.ApplicationListener;
+import org.springframework.context.event.ContextRefreshedEvent;
+import org.springframework.core.io.Resource;
+import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
+import org.springframework.core.io.support.ResourcePatternResolver;
+import org.springframework.orm.hibernate3.LocalSessionFactoryBean;
+
+/**
+ * Bootstraps the schema and schema update. The schema is considered missing if the applied patch table
+ * is not present, and the schema is considered empty if the applied patch table is empty.
+ *
+ * @author Derek Hulley
+ */
+public class SchemaBootstrap implements ApplicationListener
+{
+ /** The placeholder for the configured Dialect
class name: ${db.script.dialect} */
+ private static final String PLACEHOLDER_SCRIPT_DIALECT = "\\$\\{db\\.script\\.dialect\\}";
+
+ private static final String MSG_EXECUTING_SCRIPT = "schema.update.msg.executing_script";
+ private static final String ERR_UPDATE_FAILED = "schema.update.err.update_failed";
+ private static final String ERR_VALIDATION_FAILED = "schema.update.err.validation_failed";
+ private static final String ERR_SCRIPT_NOT_RUN = "schema.update.err.update_script_not_run";
+ private static final String ERR_SCRIPT_NOT_FOUND = "schema.update.err.script_not_found";
+ private static final String ERR_STATEMENT_TERMINATOR = "schema.update.err.statement_terminator";
+
+ private static Log logger = LogFactory.getLog(SchemaBootstrap.class);
+
+ private LocalSessionFactoryBean localSessionFactory;
+ private String schemaOuputFilename;
+ private boolean updateSchema;
+ private List postCreateScriptUrls;
+ private List validateUpdateScriptPatches;
+ private List applyUpdateScriptPatches;
+
+ public SchemaBootstrap()
+ {
+ postCreateScriptUrls = new ArrayList(1);
+ validateUpdateScriptPatches = new ArrayList(4);
+ applyUpdateScriptPatches = new ArrayList(4);
+ }
+
+ public void setLocalSessionFactory(LocalSessionFactoryBean localSessionFactory) throws BeansException
+ {
+ this.localSessionFactory = localSessionFactory;
+ }
+
+ /**
+ * Set this to output the full database creation script
+ *
+ * @param schemaOuputFilename the name of a file to dump the schema to, or null to ignore
+ */
+ public void setSchemaOuputFilename(String schemaOuputFilename)
+ {
+ this.schemaOuputFilename = schemaOuputFilename;
+ }
+
+ /**
+ * Set whether to modify the schema or not. Either way, the schema will be validated.
+ *
+ * @param updateSchema true to update and validate the schema, otherwise false to just
+ * validate the schema. Default is true.
+ */
+ public void setUpdateSchema(boolean updateSchema)
+ {
+ this.updateSchema = updateSchema;
+ }
+
+ /**
+ * Set the scripts that must be executed after the schema has been created.
+ *
+ * @param postCreateScriptUrls file URLs
+ *
+ * @see #PLACEHOLDER_SCRIPT_DIALECT
+ */
+ public void setPostCreateScriptUrls(List postUpdateScriptUrls)
+ {
+ this.postCreateScriptUrls = postUpdateScriptUrls;
+ }
+
+ /**
+ * Set the schema script patches that must have been applied. These will not be
+ * applied to the database. These can be used where the script cannot be
+ * applied automatically or where a particular upgrade path is no longer supported.
+ * For example, at version 3.0, the upgrade scripts for version 1.4 may be considered
+ * unsupported - this doesn't prevent the manual application of the scripts, though.
+ *
+ * @param applyUpdateScriptPatches a list of schema patches to check
+ */
+ public void setValidateUpdateScriptPatches(List scriptPatches)
+ {
+ this.validateUpdateScriptPatches = scriptPatches;
+ }
+
+ /**
+ * Set the schema script patches that may be executed during an update.
+ *
+ * @param applyUpdateScriptPatches a list of schema patches to check
+ */
+ public void setApplyUpdateScriptPatches(List scriptPatches)
+ {
+ this.applyUpdateScriptPatches = scriptPatches;
+ }
+
+ public void onApplicationEvent(ApplicationEvent event)
+ {
+ if (!(event instanceof ContextRefreshedEvent))
+ {
+ // only work on startup
+ return;
+ }
+
+ // do everything in a transaction
+ Session session = getLocalSessionFactory().openSession();
+ Transaction transaction = session.beginTransaction();
+ try
+ {
+ // make sure that we don't autocommit
+ Connection connection = session.connection();
+ connection.setAutoCommit(false);
+
+ Configuration cfg = localSessionFactory.getConfiguration();
+ // dump the schema, if required
+ if (schemaOuputFilename != null)
+ {
+ File schemaOutputFile = new File(schemaOuputFilename);
+ dumpSchemaCreate(cfg, schemaOutputFile);
+ }
+
+ // update the schema, if required
+ if (updateSchema)
+ {
+ updateSchema(cfg, session, connection);
+ }
+
+ // verify that all patches have been applied correctly
+ checkSchemaPatchScripts(cfg, session, connection, validateUpdateScriptPatches, false); // check scripts
+ checkSchemaPatchScripts(cfg, session, connection, applyUpdateScriptPatches, false); // check scripts
+
+ // all done successfully
+ transaction.commit();
+ }
+ catch (Throwable e)
+ {
+ try { transaction.rollback(); } catch (Throwable ee) {}
+ if (updateSchema)
+ {
+ throw new AlfrescoRuntimeException(ERR_UPDATE_FAILED, e);
+ }
+ else
+ {
+ throw new AlfrescoRuntimeException(ERR_VALIDATION_FAILED, e);
+ }
+ }
+ }
+
+ private void dumpSchemaCreate(Configuration cfg, File schemaOutputFile)
+ {
+ // if the file exists, delete it
+ if (schemaOutputFile.exists())
+ {
+ schemaOutputFile.delete();
+ }
+ SchemaExport schemaExport = new SchemaExport(cfg)
+ .setFormat(true)
+ .setHaltOnError(true)
+ .setOutputFile(schemaOutputFile.getAbsolutePath())
+ .setDelimiter(";");
+ schemaExport.execute(false, false, false, true);
+ }
+
+ private SessionFactory getLocalSessionFactory()
+ {
+ return (SessionFactory) localSessionFactory.getObject();
+ }
+
+ /**
+ * @return Returns the number of applied patches
+ */
+ private int countAppliedPatches(Connection connection) throws Exception
+ {
+ Statement stmt = connection.createStatement();
+ try
+ {
+ ResultSet rs = stmt.executeQuery("select count(id) from alf_applied_patch");
+ rs.next();
+ int count = rs.getInt(1);
+ return count;
+ }
+ catch (Throwable e)
+ {
+ // we'll try another table name
+ }
+ finally
+ {
+ try { stmt.close(); } catch (Throwable e) {}
+ }
+ // for pre-1.4 databases, the table was named differently
+ stmt = connection.createStatement();
+ try
+ {
+ ResultSet rs = stmt.executeQuery("select count(id) from applied_patch");
+ rs.next();
+ int count = rs.getInt(1);
+ return count;
+ }
+ finally
+ {
+ try { stmt.close(); } catch (Throwable e) {}
+ }
+ }
+
+ /**
+ * @return Returns the number of applied patches
+ */
+ private boolean didPatchSucceed(Connection connection, String patchId) throws Exception
+ {
+ Statement stmt = connection.createStatement();
+ try
+ {
+ ResultSet rs = stmt.executeQuery("select succeeded from alf_applied_patch where id = '" + patchId + "'");
+ if (!rs.next())
+ {
+ return false;
+ }
+ boolean succeeded = rs.getBoolean(1);
+ return succeeded;
+ }
+ catch (Throwable e)
+ {
+ // we'll try another table name
+ }
+ finally
+ {
+ try { stmt.close(); } catch (Throwable e) {}
+ }
+ // for pre-1.4 databases, the table was named differently
+ stmt = connection.createStatement();
+ try
+ {
+ ResultSet rs = stmt.executeQuery("select succeeded from applied_patch where id = '" + patchId + "'");
+ if (!rs.next())
+ {
+ return false;
+ }
+ boolean succeeded = rs.getBoolean(1);
+ return succeeded;
+ }
+ finally
+ {
+ try { stmt.close(); } catch (Throwable e) {}
+ }
+ }
+
+ /**
+ * Builds the schema from scratch or applies the necessary patches to the schema.
+ */
+ private void updateSchema(Configuration cfg, Session session, Connection connection) throws Exception
+ {
+ boolean create = false;
+ try
+ {
+ countAppliedPatches(connection);
+ }
+ catch (Throwable e)
+ {
+ create = true;
+ }
+ if (create)
+ {
+ // Get the dialect
+ final Dialect dialect = Dialect.getDialect(cfg.getProperties());
+ String dialectStr = dialect.getClass().getName();
+
+ // the applied patch table is missing - we assume that all other tables are missing
+ // perform a full update using Hibernate-generated statements
+ File tempFile = TempFileProvider.createTempFile("AlfrescoSchemaCreate-" + dialectStr + "-", ".sql");
+ dumpSchemaCreate(cfg, tempFile);
+ FileInputStream tempInputStream = new FileInputStream(tempFile);
+ executeScriptFile(cfg, connection, tempInputStream, tempFile.getPath());
+ // execute post-create scripts (not patches)
+ for (String scriptUrl : this.postCreateScriptUrls)
+ {
+ executeScriptUrl(cfg, connection, scriptUrl);
+ }
+ }
+ else
+ {
+ // we have a database, so just run the update scripts
+ checkSchemaPatchScripts(cfg, session, connection, validateUpdateScriptPatches, false); // check for scripts that must have been run
+ checkSchemaPatchScripts(cfg, session, connection, applyUpdateScriptPatches, true); // execute scripts as required
+ // let Hibernate do any required updates
+ File tempFile = null;
+ Writer writer = null;
+ try
+ {
+ final Dialect dialect = Dialect.getDialect(cfg.getProperties());
+ DatabaseMetadata metadata = new DatabaseMetadata(connection, dialect);
+ String[] sqls = cfg.generateSchemaUpdateScript(dialect, metadata);
+ if (sqls.length > 0)
+ {
+ tempFile = TempFileProvider.createTempFile("AlfrescoSchemaUpdate", ".sql");
+ writer = new BufferedWriter(new FileWriter(tempFile));
+ for (String sql : sqls)
+ {
+ writer.append(sql);
+ writer.append(";\n");
+ }
+ }
+ }
+ finally
+ {
+ if (writer != null)
+ {
+ try {writer.close();} catch (Throwable e) {}
+ }
+ }
+ // execute if there were changes raised by Hibernate
+ if (tempFile != null)
+ {
+ InputStream tempInputStream = new FileInputStream(tempFile);
+ executeScriptFile(cfg, connection, tempInputStream, tempFile.getPath());
+ }
+ }
+ }
+
+ /**
+ * Check that the necessary scripts have been executed against the database
+ */
+ private void checkSchemaPatchScripts(
+ Configuration cfg,
+ Session session,
+ Connection connection,
+ List scriptPatches,
+ boolean apply) throws Exception
+ {
+ // first check if there have been any applied patches
+ int appliedPatchCount = countAppliedPatches(connection);
+ if (appliedPatchCount == 0)
+ {
+ // This is a new schema, so upgrade scripts are irrelevant
+ // and patches will not have been applied yet
+ return;
+ }
+
+ for (SchemaUpgradeScriptPatch patch : scriptPatches)
+ {
+ final String patchId = patch.getId();
+ final String scriptUrl = patch.getScriptUrl();
+
+ // check if the script was successfully executed
+ boolean wasSuccessfullyApplied = didPatchSucceed(connection, patchId);
+ if (wasSuccessfullyApplied)
+ {
+ // nothing to do - it has been done before
+ continue;
+ }
+ else if (!apply)
+ {
+ // the script was not run and may not be run automatically
+ throw AlfrescoRuntimeException.create(ERR_SCRIPT_NOT_RUN, scriptUrl);
+ }
+ // it wasn't run and it can be run now
+ executeScriptUrl(cfg, connection, scriptUrl);
+ }
+ }
+
+ private void executeScriptUrl(Configuration cfg, Connection connection, String scriptUrl) throws Exception
+ {
+ Dialect dialect = Dialect.getDialect(cfg.getProperties());
+ InputStream scriptInputStream = getScriptInputStream(dialect.getClass(), scriptUrl);
+ // check that it exists
+ if (scriptInputStream == null)
+ {
+ throw AlfrescoRuntimeException.create(ERR_SCRIPT_NOT_FOUND, scriptUrl);
+ }
+ // now execute it
+ executeScriptFile(cfg, connection, scriptInputStream, scriptUrl);
+ }
+
+ /**
+ * Replaces the dialect placeholder in the script URL and attempts to find a file for
+ * it. If not found, the dialect hierarchy will be walked until a compatible script is
+ * found. This makes it possible to have scripts that are generic to all dialects.
+ *
+ * @return Returns an input stream onto the script, otherwise null
+ */
+ private InputStream getScriptInputStream(Class dialectClazz, String scriptUrl) throws Exception
+ {
+ // replace the dialect placeholder
+ String dialectScriptUrl = scriptUrl.replaceAll(PLACEHOLDER_SCRIPT_DIALECT, dialectClazz.getName());
+ // get a handle on the resource
+ ResourcePatternResolver rpr = new PathMatchingResourcePatternResolver(this.getClass().getClassLoader());
+ Resource resource = rpr.getResource(dialectScriptUrl);
+ if (!resource.exists())
+ {
+ // it wasn't found. Get the superclass of the dialect and try again
+ Class superClazz = dialectClazz.getSuperclass();
+ if (Dialect.class.isAssignableFrom(superClazz))
+ {
+ // we still have a Dialect - try again
+ return getScriptInputStream(superClazz, scriptUrl);
+ }
+ else
+ {
+ // we have exhausted all options
+ return null;
+ }
+ }
+ else
+ {
+ // we have a handle to it
+ return resource.getInputStream();
+ }
+ }
+
+ private void executeScriptFile(
+ Configuration cfg,
+ Connection connection,
+ InputStream scriptInputStream,
+ String scriptUrl) throws Exception
+ {
+ logger.info(I18NUtil.getMessage(MSG_EXECUTING_SCRIPT, scriptUrl));
+
+ BufferedReader reader = new BufferedReader(new InputStreamReader(scriptInputStream, "UTF8"));
+ try
+ {
+ int line = 0;
+ // loop through all statements
+ StringBuilder sb = new StringBuilder(1024);
+ while(true)
+ {
+ String sql = reader.readLine();
+ line++;
+
+ if (sql == null)
+ {
+ // nothing left in the file
+ break;
+ }
+
+ // trim it
+ sql = sql.trim();
+ if (sql.length() == 0 ||
+ sql.startsWith( "--" ) ||
+ sql.startsWith( "//" ) ||
+ sql.startsWith( "/*" ) )
+ {
+ if (sb.length() > 0)
+ {
+ // we have an unterminated statement
+ throw AlfrescoRuntimeException.create(ERR_STATEMENT_TERMINATOR, (line - 1), scriptUrl);
+ }
+ // there has not been anything to execute - it's just a comment line
+ continue;
+ }
+ // have we reached the end of a statement?
+ boolean execute = false;
+ if (sql.endsWith(";"))
+ {
+ sql = sql.substring(0, sql.length() - 1);
+ execute = true;
+ }
+ // append to the statement being built up
+ sb.append(" ").append(sql);
+ // execute, if required
+ if (execute)
+ {
+ Statement stmt = connection.createStatement();
+ try
+ {
+ sql = sb.toString();
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("Executing statment: " + sql);
+ }
+ stmt.execute(sql);
+ sb = new StringBuilder(1024);
+ }
+ finally
+ {
+ try { stmt.close(); } catch (Throwable e) {}
+ }
+ }
+ }
+ }
+ finally
+ {
+ try { reader.close(); } catch (Throwable e) {}
+ try { scriptInputStream.close(); } catch (Throwable e) {}
+ }
+ }
+}
diff --git a/source/java/org/alfresco/repo/jscript/Actions.java b/source/java/org/alfresco/repo/jscript/Actions.java
index 818f52d130..b371b1b5aa 100644
--- a/source/java/org/alfresco/repo/jscript/Actions.java
+++ b/source/java/org/alfresco/repo/jscript/Actions.java
@@ -202,7 +202,7 @@ public final class Actions implements Scopeable
@SuppressWarnings("synthetic-access")
public void execute(Node node)
{
- if (this.parameters.isModified())
+ if (this.parameters != null && this.parameters.isModified())
{
Map actionParams = action.getParameterValues();
actionParams.clear();
diff --git a/source/java/org/alfresco/repo/jscript/Node.java b/source/java/org/alfresco/repo/jscript/Node.java
index 25226c1629..daeb82ed3f 100644
--- a/source/java/org/alfresco/repo/jscript/Node.java
+++ b/source/java/org/alfresco/repo/jscript/Node.java
@@ -79,6 +79,11 @@ import org.springframework.util.StringUtils;
*/
public class Node implements Serializable, Scopeable
{
+ /**
+ * Comment for serialVersionUID
+ */
+ private static final long serialVersionUID = -3378946227712939600L;
+
private static Log logger = LogFactory.getLog(Node.class);
private final static String NAMESPACE_BEGIN = "" + QName.NAMESPACE_BEGIN;
@@ -885,6 +890,43 @@ public class Node implements Serializable, Scopeable
this.services.getPermissionService().deletePermission(this.nodeRef, authority, permission);
}
+ // -------------
+ // Ownership API
+
+ /**
+ * Set the owner of the node
+ */
+ public void setOwner(String userId)
+ {
+ this.services.getOwnableService().setOwner(this.nodeRef, userId);
+ }
+
+ /**
+ * Take ownership of the node.
+ */
+ public void takeOwnership()
+ {
+ this.services.getOwnableService().takeOwnership(this.nodeRef);
+ }
+
+ /**
+ * Get the owner of the node.
+ * @return
+ */
+ public String getOwner()
+ {
+ return this.services.getOwnableService().getOwner(this.nodeRef);
+ }
+
+ /**
+ * Make owner available as a property.
+ *
+ * @return
+ */
+ public String jsGet_owner()
+ {
+ return getOwner();
+ }
// ------------------------------------------------------------------------------
// Create and Modify API
@@ -1645,8 +1687,11 @@ public class Node implements Serializable, Scopeable
{
if (this.nodeService.exists(nodeRef))
{
+ // TODO: DC: Allow debug output of property values - for now it's disabled as this could potentially
+ // follow a large network of nodes. Unfortunately, JBPM issues unprotected debug statements
+ // where node.toString is used - will request this is fixed in next release of JBPM.
return "Node Type: " + getType() +
- "\nNode Properties: " + this.getProperties().toString() +
+ "\nNode Properties: " + this.getProperties().size() +
"\nNode Aspects: " + this.getAspects().toString();
}
else
diff --git a/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImpl.java b/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImpl.java
index 4fe89d582b..6d5eff338d 100644
--- a/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImpl.java
+++ b/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImpl.java
@@ -73,7 +73,11 @@ public class FileFolderServiceImpl implements FileFolderService
/** Shallow search for all files and folders */
private static final String LUCENE_QUERY_SHALLOW_ALL =
"+PARENT:\"${cm:parent}\"" +
- "-TYPE:\"" + ContentModel.TYPE_SYSTEM_FOLDER + "\" ";
+ "-TYPE:\"" + ContentModel.TYPE_SYSTEM_FOLDER + "\" " +
+ "+(" +
+ "TYPE:\"" + ContentModel.TYPE_CONTENT + "\" " +
+ "TYPE:\"" + ContentModel.TYPE_FOLDER + "\" " +
+ ")";
/** Shallow search for all files and folders */
private static final String LUCENE_QUERY_SHALLOW_FOLDERS =
diff --git a/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImplTest.java b/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImplTest.java
index 279cdb5922..c06659c665 100644
--- a/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImplTest.java
+++ b/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImplTest.java
@@ -28,6 +28,7 @@ import junit.framework.TestCase;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
+import org.alfresco.repo.node.integrity.IntegrityChecker;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.model.FileExistsException;
@@ -91,6 +92,9 @@ public class FileFolderServiceImplTest extends TestCase
txn = transactionService.getUserTransaction();
txn.begin();
+ // downgrade integrity
+ IntegrityChecker.setWarnInTransaction();
+
// authenticate
authenticationComponent.setCurrentUser(authenticationComponent.getSystemUserName());
diff --git a/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java b/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java
index f6de57d293..6dd08e7ca8 100644
--- a/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java
+++ b/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java
@@ -35,6 +35,7 @@ import org.alfresco.repo.dictionary.M2Model;
import org.alfresco.repo.domain.hibernate.ChildAssocImpl;
import org.alfresco.repo.domain.hibernate.NodeImpl;
import org.alfresco.repo.node.db.NodeDaoService;
+import org.alfresco.repo.node.integrity.IntegrityChecker;
import org.alfresco.repo.policy.JavaBehaviour;
import org.alfresco.repo.policy.PolicyComponent;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
@@ -161,6 +162,9 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest
StoreRef.PROTOCOL_WORKSPACE,
"Test_" + System.currentTimeMillis());
rootNodeRef = nodeService.getRootNode(storeRef);
+
+ // downgrade integrity checks
+ IntegrityChecker.setWarnInTransaction();
}
@Override
diff --git a/source/java/org/alfresco/repo/node/PerformanceNodeServiceTest.java b/source/java/org/alfresco/repo/node/PerformanceNodeServiceTest.java
index 8935297d7c..51810f9c1f 100644
--- a/source/java/org/alfresco/repo/node/PerformanceNodeServiceTest.java
+++ b/source/java/org/alfresco/repo/node/PerformanceNodeServiceTest.java
@@ -27,6 +27,7 @@ import org.alfresco.model.ContentModel;
import org.alfresco.repo.dictionary.DictionaryComponent;
import org.alfresco.repo.dictionary.DictionaryDAO;
import org.alfresco.repo.dictionary.M2Model;
+import org.alfresco.repo.node.integrity.IntegrityChecker;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
@@ -160,6 +161,7 @@ public class PerformanceNodeServiceTest extends TestCase
{
public Object doWork()
{
+ IntegrityChecker.setWarnInTransaction();
buildNodeChildren(rootNodeRef, 1, testDepth, testChildCount);
return null;
}
diff --git a/source/java/org/alfresco/repo/node/archive/ArchiveAndRestoreTest.java b/source/java/org/alfresco/repo/node/archive/ArchiveAndRestoreTest.java
index d689dd9912..1ac6c4a274 100644
--- a/source/java/org/alfresco/repo/node/archive/ArchiveAndRestoreTest.java
+++ b/source/java/org/alfresco/repo/node/archive/ArchiveAndRestoreTest.java
@@ -29,6 +29,7 @@ import junit.framework.TestCase;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.node.StoreArchiveMap;
import org.alfresco.repo.node.archive.RestoreNodeReport.RestoreStatus;
+import org.alfresco.repo.node.integrity.IntegrityChecker;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.service.ServiceRegistry;
@@ -114,6 +115,9 @@ public class ArchiveAndRestoreTest extends TestCase
txn = transactionService.getUserTransaction();
txn.begin();
+ // downgrade integrity checks
+ IntegrityChecker.setWarnInTransaction();
+
try
{
authenticationComponent.setSystemUserAsCurrentUser();
diff --git a/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java b/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java
index 0726a6a0dc..b92cc1f17e 100644
--- a/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java
+++ b/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java
@@ -1,1823 +1,1820 @@
-/*
- * Copyright (C) 2005 Alfresco, Inc.
- *
- * Licensed under the Mozilla Public License version 1.1
- * with a permitted attribution clause. You may obtain a
- * copy of the License at
- *
- * http://www.alfresco.org/legal/license.txt
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific
- * language governing permissions and limitations under the
- * License.
- */
-package org.alfresco.repo.node.db;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.Stack;
-
-import org.alfresco.error.AlfrescoRuntimeException;
-import org.alfresco.model.ContentModel;
-import org.alfresco.repo.domain.ChildAssoc;
-import org.alfresco.repo.domain.Node;
-import org.alfresco.repo.domain.NodeAssoc;
-import org.alfresco.repo.domain.NodeStatus;
-import org.alfresco.repo.domain.PropertyValue;
-import org.alfresco.repo.domain.Store;
-import org.alfresco.repo.node.AbstractNodeServiceImpl;
-import org.alfresco.repo.node.StoreArchiveMap;
-import org.alfresco.repo.security.authentication.AuthenticationUtil;
-import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
-import org.alfresco.service.cmr.dictionary.AspectDefinition;
-import org.alfresco.service.cmr.dictionary.AssociationDefinition;
-import org.alfresco.service.cmr.dictionary.ChildAssociationDefinition;
-import org.alfresco.service.cmr.dictionary.ClassDefinition;
-import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
-import org.alfresco.service.cmr.dictionary.DictionaryService;
-import org.alfresco.service.cmr.dictionary.InvalidAspectException;
-import org.alfresco.service.cmr.dictionary.InvalidTypeException;
-import org.alfresco.service.cmr.dictionary.PropertyDefinition;
-import org.alfresco.service.cmr.dictionary.TypeDefinition;
-import org.alfresco.service.cmr.repository.AssociationExistsException;
-import org.alfresco.service.cmr.repository.AssociationRef;
-import org.alfresco.service.cmr.repository.ChildAssociationRef;
-import org.alfresco.service.cmr.repository.CyclicChildRelationshipException;
-import org.alfresco.service.cmr.repository.InvalidChildAssociationRefException;
-import org.alfresco.service.cmr.repository.InvalidNodeRefException;
-import org.alfresco.service.cmr.repository.InvalidStoreRefException;
-import org.alfresco.service.cmr.repository.NodeRef;
-import org.alfresco.service.cmr.repository.NodeService;
-import org.alfresco.service.cmr.repository.Path;
-import org.alfresco.service.cmr.repository.StoreExistsException;
-import org.alfresco.service.cmr.repository.StoreRef;
-import org.alfresco.service.cmr.repository.NodeRef.Status;
-import org.alfresco.service.namespace.NamespaceService;
-import org.alfresco.service.namespace.QName;
-import org.alfresco.service.namespace.QNamePattern;
-import org.alfresco.util.ParameterCheck;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.springframework.dao.DataIntegrityViolationException;
-import org.springframework.util.Assert;
-
-/**
- * Node service using database persistence layer to fulfill functionality
- *
- * @author Derek Hulley
- */
-public class DbNodeServiceImpl extends AbstractNodeServiceImpl
-{
- private static Log logger = LogFactory.getLog(DbNodeServiceImpl.class);
- private static Log loggerPaths = LogFactory.getLog(DbNodeServiceImpl.class.getName() + ".paths");
-
- private NodeDaoService nodeDaoService;
- private StoreArchiveMap storeArchiveMap;
- private NodeService avmNodeService;
-
- public DbNodeServiceImpl()
- {
- storeArchiveMap = new StoreArchiveMap(); // in case it is not set
- }
-
- public void setNodeDaoService(NodeDaoService nodeDaoService)
- {
- this.nodeDaoService = nodeDaoService;
- }
-
- public void setStoreArchiveMap(StoreArchiveMap storeArchiveMap)
- {
- this.storeArchiveMap = storeArchiveMap;
- }
-
- public void setAvmNodeService(NodeService avmNodeService)
- {
- this.avmNodeService = avmNodeService;
- }
-
- /**
- * Performs a null-safe get of the node
- *
- * @param nodeRef the node to retrieve
- * @return Returns the node entity (never null)
- * @throws InvalidNodeRefException if the referenced node could not be found
- */
- private Node getNodeNotNull(NodeRef nodeRef) throws InvalidNodeRefException
- {
- Node unchecked = nodeDaoService.getNode(nodeRef);
- if (unchecked == null)
- {
- throw new InvalidNodeRefException("Node does not exist: " + nodeRef, nodeRef);
- }
- return unchecked;
- }
-
- public boolean exists(StoreRef storeRef)
- {
- Store store = nodeDaoService.getStore(storeRef.getProtocol(), storeRef.getIdentifier());
- boolean exists = (store != null);
- // done
- return exists;
- }
-
- public boolean exists(NodeRef nodeRef)
- {
- Node node = nodeDaoService.getNode(nodeRef);
- boolean exists = (node != null);
- // done
- return exists;
- }
-
- public Status getNodeStatus(NodeRef nodeRef)
- {
- NodeStatus nodeStatus = nodeDaoService.getNodeStatus(nodeRef, false);
- if (nodeStatus == null) // node never existed
- {
- return null;
- }
- else
- {
- return new NodeRef.Status(
- nodeStatus.getTransaction().getChangeTxnId(),
- nodeStatus.isDeleted());
- }
- }
-
- /**
- * @see NodeDaoService#getStores()
- */
- public List getStores()
- {
- List stores = nodeDaoService.getStores();
- List storeRefs = new ArrayList(stores.size());
- for (Store store : stores)
- {
- storeRefs.add(store.getStoreRef());
- }
- // Now get the AVMStores.
- List avmStores = avmNodeService.getStores();
- storeRefs.addAll(avmStores);
- // Return them all.
- return storeRefs;
- }
-
- /**
- * Defers to the typed service
- * @see StoreDaoService#createWorkspace(String)
- */
- public StoreRef createStore(String protocol, String identifier)
- {
- StoreRef storeRef = new StoreRef(protocol, identifier);
- // check that the store does not already exist
- Store store = nodeDaoService.getStore(protocol, identifier);
- if (store != null)
- {
- throw new StoreExistsException("Unable to create a store that already exists: " + storeRef, storeRef);
- }
-
- // invoke policies
- invokeBeforeCreateStore(ContentModel.TYPE_STOREROOT, storeRef);
-
- // create a new one
- store = nodeDaoService.createStore(protocol, identifier);
- // get the root node
- Node rootNode = store.getRootNode();
- // assign the root aspect - this is expected of all roots, even store roots
- addAspect(rootNode.getNodeRef(),
- ContentModel.ASPECT_ROOT,
- Collections.emptyMap());
-
- // invoke policies
- invokeOnCreateStore(rootNode.getNodeRef());
-
- // done
- if (!store.getStoreRef().equals(storeRef))
- {
- throw new RuntimeException("Incorrect store reference");
- }
- return storeRef;
- }
-
- public NodeRef getRootNode(StoreRef storeRef) throws InvalidStoreRefException
- {
- Store store = nodeDaoService.getStore(storeRef.getProtocol(), storeRef.getIdentifier());
- if (store == null)
- {
- throw new InvalidStoreRefException("Store does not exist", storeRef);
- }
- // get the root
- Node node = store.getRootNode();
- if (node == null)
- {
- throw new InvalidStoreRefException("Store does not have a root node", storeRef);
- }
- NodeRef nodeRef = node.getNodeRef();
- // done
- return nodeRef;
- }
-
- /**
- * @see #createNode(NodeRef, QName, QName, QName, Map)
- */
- public ChildAssociationRef createNode(
- NodeRef parentRef,
- QName assocTypeQName,
- QName assocQName,
- QName nodeTypeQName)
- {
- return this.createNode(parentRef, assocTypeQName, assocQName, nodeTypeQName, null);
- }
-
- /**
- * @see org.alfresco.service.cmr.repository.NodeService#createNode(org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.namespace.QName, org.alfresco.service.namespace.QName, org.alfresco.service.namespace.QName, java.util.Map)
- */
- public ChildAssociationRef createNode(
- NodeRef parentRef,
- QName assocTypeQName,
- QName assocQName,
- QName nodeTypeQName,
- Map properties)
- {
- Assert.notNull(parentRef);
- Assert.notNull(assocTypeQName);
- Assert.notNull(assocQName);
-
- // null property map is allowed
- if (properties == null)
- {
- properties = new HashMap();
- }
- else
- {
- // Copy the incomming property map since we may need to modify it later
- properties = new HashMap(properties);
- }
-
- // Invoke policy behaviour
- invokeBeforeUpdateNode(parentRef);
- invokeBeforeCreateNode(parentRef, assocTypeQName, assocQName, nodeTypeQName);
-
- // get the store that the parent belongs to
- StoreRef storeRef = parentRef.getStoreRef();
- Store store = nodeDaoService.getStore(storeRef.getProtocol(), storeRef.getIdentifier());
- if (store == null)
- {
- throw new RuntimeException("No store found for parent node: " + parentRef);
- }
-
- // check the node type
- TypeDefinition nodeTypeDef = dictionaryService.getType(nodeTypeQName);
- if (nodeTypeDef == null)
- {
- throw new InvalidTypeException(nodeTypeQName);
- }
-
- // get/generate an ID for the node
- String newId = generateGuid(properties);
-
- // create the node instance
- Node childNode = nodeDaoService.newNode(store, newId, nodeTypeQName);
-
- // get the parent node
- Node parentNode = getNodeNotNull(parentRef);
-
- // Set the default property values
- addDefaultPropertyValues(nodeTypeDef, properties);
-
- // Add the default aspects to the node
- addDefaultAspects(nodeTypeDef, childNode, properties);
-
- // set the properties - it is a new node so only set properties if there are any
- Map propertiesBefore = getPropertiesImpl(childNode);
- Map propertiesAfter = null;
- if (properties.size() > 0)
- {
- propertiesAfter = setPropertiesImpl(childNode, properties);
- }
-
- // create the association
- ChildAssoc childAssoc = nodeDaoService.newChildAssoc(
- parentNode,
- childNode,
- true,
- assocTypeQName,
- assocQName);
- setChildUniqueName(childNode); // ensure uniqueness
- ChildAssociationRef childAssocRef = childAssoc.getChildAssocRef();
-
- // Invoke policy behaviour
- invokeOnCreateNode(childAssocRef);
- invokeOnUpdateNode(parentRef);
- if (propertiesAfter != null)
- {
- invokeOnUpdateProperties(childAssocRef.getChildRef(), propertiesBefore, propertiesAfter);
- }
-
- // done
- return childAssocRef;
- }
-
- /**
- * Add the default aspects to a given node
- *
- * @param nodeTypeDef
- */
- private void addDefaultAspects(ClassDefinition classDefinition, Node node, Map properties)
- {
- NodeRef nodeRef = node.getNodeRef();
-
- // get the mandatory aspects for the node type
- List defaultAspectDefs = classDefinition.getDefaultAspects();
-
- // add all the aspects to the node
- Set nodeAspects = node.getAspects();
- for (AspectDefinition defaultAspectDef : defaultAspectDefs)
- {
- invokeBeforeAddAspect(nodeRef, defaultAspectDef.getName());
- nodeAspects.add(defaultAspectDef.getName());
- addDefaultPropertyValues(defaultAspectDef, properties);
- invokeOnAddAspect(nodeRef, defaultAspectDef.getName());
-
- // Now add any default aspects for this aspect
- addDefaultAspects(defaultAspectDef, node, properties);
- }
- }
-
- /**
- * Drops the old primary association and creates a new one
- */
- public ChildAssociationRef moveNode(
- NodeRef nodeToMoveRef,
- NodeRef newParentRef,
- QName assocTypeQName,
- QName assocQName)
- throws InvalidNodeRefException
- {
- Assert.notNull(nodeToMoveRef);
- Assert.notNull(newParentRef);
- Assert.notNull(assocTypeQName);
- Assert.notNull(assocQName);
-
- // check the node references
- Node nodeToMove = getNodeNotNull(nodeToMoveRef);
- Node newParentNode = getNodeNotNull(newParentRef);
- // get the primary parent assoc
- ChildAssoc oldAssoc = nodeDaoService.getPrimaryParentAssoc(nodeToMove);
- ChildAssociationRef oldAssocRef = oldAssoc.getChildAssocRef();
- // get the old parent
- Node oldParentNode = oldAssoc.getParent();
-
- boolean movingStore = !nodeToMoveRef.getStoreRef().equals(newParentRef.getStoreRef());
-
- // data needed for policy invocation
- QName nodeToMoveTypeQName = nodeToMove.getTypeQName();
- Set nodeToMoveAspects = nodeToMove.getAspects();
-
- // Invoke policy behaviour
- if (movingStore)
- {
- invokeBeforeDeleteNode(nodeToMoveRef);
- invokeBeforeCreateNode(newParentRef, assocTypeQName, assocQName, nodeToMoveTypeQName);
- }
- else
- {
- invokeBeforeDeleteChildAssociation(oldAssocRef);
- invokeBeforeCreateChildAssociation(newParentRef, nodeToMoveRef, assocTypeQName, assocQName);
- invokeBeforeUpdateNode(oldParentNode.getNodeRef()); // old parent will be updated
- invokeBeforeUpdateNode(newParentRef); // new parent ditto
- }
-
- // remove the child assoc from the old parent
- // don't cascade as we will still need the node afterwards
- nodeDaoService.deleteChildAssoc(oldAssoc, false);
-
- // create a new assoc
- ChildAssoc newAssoc = nodeDaoService.newChildAssoc(
- newParentNode,
- nodeToMove,
- true,
- assocTypeQName,
- assocQName);
- setChildUniqueName(nodeToMove); // ensure uniqueness
- ChildAssociationRef newAssocRef = newAssoc.getChildAssocRef();
-
- // If the node is moving stores, then drag the node hierarchy with it
- if (movingStore)
- {
- // do the move
- Store newStore = newParentNode.getStore();
- moveNodeToStore(nodeToMove, newStore);
- // the node reference will have changed too
- nodeToMoveRef = nodeToMove.getNodeRef();
- }
-
- // check that no cyclic relationships have been created
- getPaths(nodeToMoveRef, false);
-
- // invoke policy behaviour
- if (movingStore)
- {
- // TODO for now indicate that the node has been archived to prevent the version history from being removed
- // in the future a onMove policy could be added and remove the need for onDelete and onCreate to be fired here
- invokeOnDeleteNode(oldAssocRef, nodeToMoveTypeQName, nodeToMoveAspects, true);
- invokeOnCreateNode(newAssoc.getChildAssocRef());
- }
- else
- {
- invokeOnCreateChildAssociation(newAssoc.getChildAssocRef());
- invokeOnDeleteChildAssociation(oldAssoc.getChildAssocRef());
- invokeOnUpdateNode(oldParentNode.getNodeRef());
- invokeOnUpdateNode(newParentRef);
- }
- invokeOnMoveNode(oldAssocRef, newAssocRef);
-
- // update the node status
- nodeDaoService.recordChangeId(nodeToMoveRef);
-
- // done
- return newAssoc.getChildAssocRef();
- }
-
- public void setChildAssociationIndex(ChildAssociationRef childAssocRef, int index)
- {
- // get nodes
- Node parentNode = getNodeNotNull(childAssocRef.getParentRef());
- Node childNode = getNodeNotNull(childAssocRef.getChildRef());
-
- ChildAssoc assoc = nodeDaoService.getChildAssoc(
- parentNode,
- childNode,
- childAssocRef.getTypeQName(),
- childAssocRef.getQName());
- if (assoc == null)
- {
- throw new InvalidChildAssociationRefException("Unable to set child association index: \n" +
- " assoc: " + childAssocRef + "\n" +
- " index: " + index,
- childAssocRef);
- }
- // set the index
- assoc.setIndex(index);
- }
-
- public QName getType(NodeRef nodeRef) throws InvalidNodeRefException
- {
- Node node = getNodeNotNull(nodeRef);
- return node.getTypeQName();
- }
-
- /**
- * @see org.alfresco.service.cmr.repository.NodeService#setType(org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.namespace.QName)
- */
- public void setType(NodeRef nodeRef, QName typeQName) throws InvalidNodeRefException
- {
- // check the node type
- TypeDefinition nodeTypeDef = dictionaryService.getType(typeQName);
- if (nodeTypeDef == null)
- {
- throw new InvalidTypeException(typeQName);
- }
-
- // Invoke policies
- invokeBeforeUpdateNode(nodeRef);
-
- // Get the node and set the new type
- Node node = getNodeNotNull(nodeRef);
- node.setTypeQName(typeQName);
-
- // Add the default aspects to the node (update the properties with any new default values)
- Map properties = this.getPropertiesImpl(node);
- addDefaultAspects(nodeTypeDef, node, properties);
- this.setProperties(nodeRef, properties);
-
- // Invoke policies
- invokeOnUpdateNode(nodeRef);
- }
-
- /**
- * @see Node#getAspects()
- */
- public void addAspect(
- NodeRef nodeRef,
- QName aspectTypeQName,
- Map aspectProperties)
- throws InvalidNodeRefException, InvalidAspectException
- {
- // check that the aspect is legal
- AspectDefinition aspectDef = dictionaryService.getAspect(aspectTypeQName);
- if (aspectDef == null)
- {
- throw new InvalidAspectException("The aspect is invalid: " + aspectTypeQName, aspectTypeQName);
- }
-
- // Invoke policy behaviours
- invokeBeforeUpdateNode(nodeRef);
- invokeBeforeAddAspect(nodeRef, aspectTypeQName);
-
- Node node = getNodeNotNull(nodeRef);
-
- // attach the properties to the current node properties
- Map nodeProperties = getPropertiesImpl(node);
-
- if (aspectProperties != null)
- {
- nodeProperties.putAll(aspectProperties);
- }
-
- // Set any default property values that appear on the aspect
- addDefaultPropertyValues(aspectDef, nodeProperties);
-
- // Add any dependant aspect
- addDefaultAspects(aspectDef, node, nodeProperties);
-
- // Set the property values back on the node
- setProperties(nodeRef, nodeProperties);
-
- // physically attach the aspect to the node
- if (node.getAspects().add(aspectTypeQName) == true)
- {
- // Invoke policy behaviours
- invokeOnUpdateNode(nodeRef);
- invokeOnAddAspect(nodeRef, aspectTypeQName);
-
- // update the node status
- nodeDaoService.recordChangeId(nodeRef);
- }
- }
-
- /**
- * @see Node#getAspects()
- */
- public void removeAspect(NodeRef nodeRef, QName aspectTypeQName)
- throws InvalidNodeRefException, InvalidAspectException
- {
- // Invoke policy behaviours
- invokeBeforeUpdateNode(nodeRef);
- invokeBeforeRemoveAspect(nodeRef, aspectTypeQName);
-
- // get the aspect
- AspectDefinition aspectDef = dictionaryService.getAspect(aspectTypeQName);
- if (aspectDef == null)
- {
- throw new InvalidAspectException(aspectTypeQName);
- }
- // get the node
- Node node = getNodeNotNull(nodeRef);
-
- // remove the aspect, if present
- boolean removed = node.getAspects().remove(aspectTypeQName);
- // if the aspect was present, remove the associated properties
- if (removed)
- {
- Map nodeProperties = node.getProperties();
- Map propertyDefs = aspectDef.getProperties();
- for (QName propertyName : propertyDefs.keySet())
- {
- nodeProperties.remove(propertyName);
- }
-
- // Invoke policy behaviours
- invokeOnUpdateNode(nodeRef);
- invokeOnRemoveAspect(nodeRef, aspectTypeQName);
-
- // update the node status
- nodeDaoService.recordChangeId(nodeRef);
- }
- }
-
- /**
- * Performs a check on the set of node aspects
- *
- * @see Node#getAspects()
- */
- public boolean hasAspect(NodeRef nodeRef, QName aspectRef) throws InvalidNodeRefException, InvalidAspectException
- {
- Node node = getNodeNotNull(nodeRef);
- Set aspectQNames = node.getAspects();
- boolean hasAspect = aspectQNames.contains(aspectRef);
- // done
- return hasAspect;
- }
-
- public Set getAspects(NodeRef nodeRef) throws InvalidNodeRefException
- {
- Node node = getNodeNotNull(nodeRef);
- Set aspectQNames = node.getAspects();
- // copy the set to ensure initialization
- Set ret = new HashSet(aspectQNames.size());
- ret.addAll(aspectQNames);
- // done
- return ret;
- }
-
- public void deleteNode(NodeRef nodeRef)
- {
- boolean isArchivedNode = false;
- boolean requiresDelete = false;
-
- // Invoke policy behaviours
- invokeBeforeDeleteNode(nodeRef);
-
- // get the node
- Node node = getNodeNotNull(nodeRef);
- // get the primary parent-child relationship before it is gone
- ChildAssociationRef childAssocRef = getPrimaryParent(nodeRef);
- // get type and aspect QNames as they will be unavailable after the delete
- QName nodeTypeQName = node.getTypeQName();
- Set nodeAspectQNames = node.getAspects();
-
- // check if we need to archive the node
- StoreRef archiveStoreRef = null;
- if (nodeAspectQNames.contains(ContentModel.ASPECT_TEMPORARY))
- {
- // the node has the temporary aspect meaning
- // it can not be archived
- requiresDelete = true;
- isArchivedNode = false;
- }
- else
- {
- StoreRef storeRef = nodeRef.getStoreRef();
- archiveStoreRef = storeArchiveMap.getArchiveMap().get(storeRef);
- // get the type and check if we need archiving
- TypeDefinition typeDef = dictionaryService.getType(node.getTypeQName());
- if (typeDef == null || !typeDef.isArchive() || archiveStoreRef == null)
- {
- requiresDelete = true;
- }
- }
-
- if (requiresDelete)
- {
- // perform a normal deletion
- nodeDaoService.deleteNode(node, true);
- isArchivedNode = false;
- }
- else
- {
- // archive it
- archiveNode(nodeRef, archiveStoreRef);
- isArchivedNode = true;
- }
-
- // Invoke policy behaviours
- invokeOnDeleteNode(childAssocRef, nodeTypeQName, nodeAspectQNames, isArchivedNode);
- }
-
- public ChildAssociationRef addChild(NodeRef parentRef, NodeRef childRef, QName assocTypeQName, QName assocQName)
- {
- // Invoke policy behaviours
- invokeBeforeUpdateNode(parentRef);
- invokeBeforeCreateChildAssociation(parentRef, childRef, assocTypeQName, assocQName);
-
- // get the parent node and ensure that it is a container node
- Node parentNode = getNodeNotNull(parentRef);
- // get the child node
- Node childNode = getNodeNotNull(childRef);
- // make the association
- ChildAssoc assoc = nodeDaoService.newChildAssoc(
- parentNode,
- childNode,
- false,
- assocTypeQName,
- assocQName);
- // ensure name uniqueness
- setChildUniqueName(childNode);
- ChildAssociationRef assocRef = assoc.getChildAssocRef();
- NodeRef childNodeRef = assocRef.getChildRef();
-
- // check that the child addition of the child has not created a cyclic relationship
- // this functionality is provided for free in getPath
- getPaths(childNodeRef, false);
-
- // Invoke policy behaviours
- invokeOnCreateChildAssociation(assocRef);
- invokeOnUpdateNode(parentRef);
-
- return assoc.getChildAssocRef();
- }
-
- public void removeChild(NodeRef parentRef, NodeRef childRef) throws InvalidNodeRefException
- {
- Node parentNode = getNodeNotNull(parentRef);
- Node childNode = getNodeNotNull(childRef);
- Long childNodeId = childNode.getId();
-
- // get all the child assocs
- ChildAssociationRef primaryAssocRef = null;
- Collection assocs = nodeDaoService.getChildAssocs(parentNode);
- assocs = new HashSet(assocs); // copy set as we will be modifying it
- for (ChildAssoc assoc : assocs)
- {
- if (!assoc.getChild().getId().equals(childNodeId))
- {
- continue; // not a matching association
- }
- ChildAssociationRef assocRef = assoc.getChildAssocRef();
- // Is this a primary association?
- if (assoc.getIsPrimary())
- {
- // keep the primary associaton for last
- primaryAssocRef = assocRef;
- }
- else
- {
- // delete the association instance - it is not primary
- invokeBeforeDeleteChildAssociation(assocRef);
- nodeDaoService.deleteChildAssoc(assoc, true); // cascade
- invokeOnDeleteChildAssociation(assocRef);
- }
- }
- // remove the child if the primary association was a match
- if (primaryAssocRef != null)
- {
- deleteNode(primaryAssocRef.getChildRef());
- }
-
- // Invoke policy behaviours
- invokeOnUpdateNode(parentRef);
-
- // done
- }
-
- public Map getProperties(NodeRef nodeRef) throws InvalidNodeRefException
- {
- Node node = getNodeNotNull(nodeRef);
- return getPropertiesImpl(node);
- }
-
- private Map getPropertiesImpl(Node node) throws InvalidNodeRefException
- {
- NodeRef nodeRef = node.getNodeRef();
-
- Map nodeProperties = node.getProperties();
- Map ret = new HashMap(nodeProperties.size());
- // copy values
- for (Map.Entry entry: nodeProperties.entrySet())
- {
- QName propertyQName = entry.getKey();
- PropertyValue propertyValue = entry.getValue();
- // get the property definition
- PropertyDefinition propertyDef = dictionaryService.getProperty(propertyQName);
- // convert to the correct type
- Serializable value = makeSerializableValue(propertyDef, propertyValue);
- // copy across
- ret.put(propertyQName, value);
- }
- // spoof referencable properties
- addReferencableProperties(nodeRef, node.getId(), ret);
- // done
- return ret;
- }
-
- public Serializable getProperty(NodeRef nodeRef, QName qname) throws InvalidNodeRefException
- {
- // spoof referencable properties
- if (qname.equals(ContentModel.PROP_STORE_PROTOCOL))
- {
- return nodeRef.getStoreRef().getProtocol();
- }
- else if (qname.equals(ContentModel.PROP_STORE_IDENTIFIER))
- {
- return nodeRef.getStoreRef().getIdentifier();
- }
- else if (qname.equals(ContentModel.PROP_NODE_UUID))
- {
- return nodeRef.getId();
- }
-
- // get the property from the node
- Node node = getNodeNotNull(nodeRef);
-
- if (qname.equals(ContentModel.PROP_NODE_DBID))
- {
- return node.getId();
- }
-
- Map properties = node.getProperties();
- PropertyValue propertyValue = properties.get(qname);
-
- // get the property definition
- PropertyDefinition propertyDef = dictionaryService.getProperty(qname);
- // convert to the correct type
- Serializable value = makeSerializableValue(propertyDef, propertyValue);
- // done
- return value;
- }
-
- /**
- * Ensures that all required properties are present on the node and copies the
- * property values to the Node
.
- *
- * To remove a property, remove it from the map before calling this method.
- * Null-valued properties are allowed.
- *
- * If any of the values are null, a marker object is put in to mimic nulls. They will be turned back into
- * a real nulls when the properties are requested again.
- *
- * @see Node#getProperties()
- */
- public void setProperties(NodeRef nodeRef, Map properties) throws InvalidNodeRefException
- {
- Node node = getNodeNotNull(nodeRef);
-
- // Invoke policy behaviours
- invokeBeforeUpdateNode(nodeRef);
-
- // Do the set properties
- Map propertiesBefore = getPropertiesImpl(node);
- Map propertiesAfter = setPropertiesImpl(node, properties);
-
- setChildUniqueName(node); // ensure uniqueness
-
- // Invoke policy behaviours
- invokeOnUpdateNode(nodeRef);
- invokeOnUpdateProperties(nodeRef, propertiesBefore, propertiesAfter);
- }
-
- /**
- * Does the work of setting the property values. Returns a map containing the state of the properties after the set
- * operation is complete.
- *
- * @param node the node
- * @param properties the map of property values
- * @return the map of property values after the set operation is complete
- * @throws InvalidNodeRefException
- */
- private Map setPropertiesImpl(Node node, Map properties) throws InvalidNodeRefException
- {
- ParameterCheck.mandatory("properties", properties);
-
- // remove referencable properties
- removeReferencableProperties(properties);
-
- // copy properties onto node
- Map nodeProperties = node.getProperties();
- nodeProperties.clear();
-
- // check the property type and copy the values across
- for (QName propertyQName : properties.keySet())
- {
- PropertyDefinition propertyDef = dictionaryService.getProperty(propertyQName);
- Serializable value = properties.get(propertyQName);
- // get a persistable value
- PropertyValue propertyValue = makePropertyValue(propertyDef, value);
- nodeProperties.put(propertyQName, propertyValue);
- }
-
- // update the node status
- NodeRef nodeRef = node.getNodeRef();
- nodeDaoService.recordChangeId(nodeRef);
-
- // Return the properties after
- return Collections.unmodifiableMap(properties);
- }
-
- /**
- * Gets the properties map, sets the value (null is allowed) and checks that the new set
- * of properties is valid.
- *
- * @see DbNodeServiceImpl.NullPropertyValue
- */
- public void setProperty(NodeRef nodeRef, QName qname, Serializable value) throws InvalidNodeRefException
- {
- Assert.notNull(qname);
-
- // Invoke policy behaviours
- invokeBeforeUpdateNode(nodeRef);
-
- // get the node
- Node node = getNodeNotNull(nodeRef);
-
- // Do the set operation
- Map propertiesBefore = getPropertiesImpl(node);
- Map propertiesAfter = setPropertyImpl(node, qname, value);
-
- if (qname.equals(ContentModel.PROP_NAME))
- {
- setChildUniqueName(node); // ensure uniqueness
- }
-
- // Invoke policy behaviours
- invokeOnUpdateNode(nodeRef);
- invokeOnUpdateProperties(nodeRef, propertiesBefore, propertiesAfter);
- }
-
- /**
- * Does the work of setting a property value. Returns the values of the properties after the set operation is
- * complete.
- *
- * @param node the node
- * @param qname the qname of the property
- * @param value the value of the property
- * @return the values of the properties after the set operation is complete
- * @throws InvalidNodeRefException
- */
- public Map setPropertyImpl(Node node, QName qname, Serializable value) throws InvalidNodeRefException
- {
- NodeRef nodeRef = node.getNodeRef();
-
- Map properties = node.getProperties();
- PropertyDefinition propertyDef = dictionaryService.getProperty(qname);
- // get a persistable value
- PropertyValue propertyValue = makePropertyValue(propertyDef, value);
- properties.put(qname, propertyValue);
-
- // update the node status
- nodeDaoService.recordChangeId(nodeRef);
-
- return getPropertiesImpl(node);
- }
-
- /**
- * Transforms {@link Node#getParentAssocs()} to a new collection
- */
- public Collection getParents(NodeRef nodeRef) throws InvalidNodeRefException
- {
- Node node = getNodeNotNull(nodeRef);
- // get the assocs pointing to it
- Collection parentAssocs = node.getParentAssocs();
- // list of results
- Collection results = new ArrayList(parentAssocs.size());
- for (ChildAssoc assoc : parentAssocs)
- {
- // get the parent
- Node parentNode = assoc.getParent();
- results.add(parentNode.getNodeRef());
- }
- // done
- return results;
- }
-
- /**
- * Filters out any associations if their qname is not a match to the given pattern.
- */
- public List getParentAssocs(NodeRef nodeRef, QNamePattern typeQNamePattern, QNamePattern qnamePattern)
- {
- Node node = getNodeNotNull(nodeRef);
- // get the assocs pointing to it
- Collection parentAssocs = node.getParentAssocs();
- // shortcut if there are no assocs
- if (parentAssocs.size() == 0)
- {
- return Collections.emptyList();
- }
- // list of results
- List results = new ArrayList(parentAssocs.size());
- for (ChildAssoc assoc : parentAssocs)
- {
- // does the qname match the pattern?
- if (!qnamePattern.isMatch(assoc.getQname()) || !typeQNamePattern.isMatch(assoc.getTypeQName()))
- {
- // no match - ignore
- continue;
- }
- results.add(assoc.getChildAssocRef());
- }
- // done
- return results;
- }
-
- /**
- * Filters out any associations if their qname is not a match to the given pattern.
- */
- public List getChildAssocs(NodeRef nodeRef, QNamePattern typeQNamePattern, QNamePattern qnamePattern)
- {
- Node node = getNodeNotNull(nodeRef);
- // get the assocs pointing from it
- Collection childAssocRefs = nodeDaoService.getChildAssocRefs(node);
- // shortcut if there are no assocs
- if (childAssocRefs.size() == 0)
- {
- return Collections.emptyList();
- }
- // sort results
- ArrayList orderedList = new ArrayList(childAssocRefs);
- Collections.sort(orderedList);
-
- // list of results
- int nthSibling = 0;
- Iterator iterator = orderedList.iterator();
- while(iterator.hasNext())
- {
- ChildAssociationRef childAssocRef = iterator.next();
- // does the qname match the pattern?
- if (!qnamePattern.isMatch(childAssocRef.getQName()) || !typeQNamePattern.isMatch(childAssocRef.getTypeQName()))
- {
- // no match - remove
- iterator.remove();
- }
- else
- {
- childAssocRef.setNthSibling(nthSibling);
- nthSibling++;
- }
- }
- // done
- return orderedList;
- }
-
- public NodeRef getChildByName(NodeRef nodeRef, QName assocTypeQName, String childName)
- {
- Node node = getNodeNotNull(nodeRef);
- ChildAssoc childAssoc = nodeDaoService.getChildAssoc(node, assocTypeQName, childName);
- if (childAssoc != null)
- {
- return childAssoc.getChild().getNodeRef();
- }
- else
- {
- return null;
- }
- }
-
- public ChildAssociationRef getPrimaryParent(NodeRef nodeRef) throws InvalidNodeRefException
- {
- Node node = getNodeNotNull(nodeRef);
- // get the primary parent assoc
- ChildAssoc assoc = nodeDaoService.getPrimaryParentAssoc(node);
-
- // done - the assoc may be null for a root node
- ChildAssociationRef assocRef = null;
- if (assoc == null)
- {
- assocRef = new ChildAssociationRef(null, null, null, nodeRef);
- }
- else
- {
- assocRef = assoc.getChildAssocRef();
- }
- return assocRef;
- }
-
- public AssociationRef createAssociation(NodeRef sourceRef, NodeRef targetRef, QName assocTypeQName)
- throws InvalidNodeRefException, AssociationExistsException
- {
- // Invoke policy behaviours
- invokeBeforeUpdateNode(sourceRef);
-
- Node sourceNode = getNodeNotNull(sourceRef);
- Node targetNode = getNodeNotNull(targetRef);
- // see if it exists
- NodeAssoc assoc = nodeDaoService.getNodeAssoc(sourceNode, targetNode, assocTypeQName);
- if (assoc != null)
- {
- throw new AssociationExistsException(sourceRef, targetRef, assocTypeQName);
- }
- // we are sure that the association doesn't exist - make it
- assoc = nodeDaoService.newNodeAssoc(sourceNode, targetNode, assocTypeQName);
- AssociationRef assocRef = assoc.getNodeAssocRef();
-
- // Invoke policy behaviours
- invokeOnUpdateNode(sourceRef);
- invokeOnCreateAssociation(assocRef);
-
- return assocRef;
- }
-
- public void removeAssociation(NodeRef sourceRef, NodeRef targetRef, QName assocTypeQName)
- throws InvalidNodeRefException
- {
- Node sourceNode = getNodeNotNull(sourceRef);
- Node targetNode = getNodeNotNull(targetRef);
- // get the association
- NodeAssoc assoc = nodeDaoService.getNodeAssoc(sourceNode, targetNode, assocTypeQName);
- if (assoc == null)
- {
- // nothing to remove
- return;
- }
- AssociationRef assocRef = assoc.getNodeAssocRef();
-
- // Invoke policy behaviours
- invokeBeforeUpdateNode(sourceRef);
-
- // delete it
- nodeDaoService.deleteNodeAssoc(assoc);
-
- // Invoke policy behaviours
- invokeOnUpdateNode(sourceRef);
- invokeOnDeleteAssociation(assocRef);
- }
-
- public List getTargetAssocs(NodeRef sourceRef, QNamePattern qnamePattern)
- {
- Node sourceNode = getNodeNotNull(sourceRef);
- // get all assocs to target
- Collection assocs = nodeDaoService.getTargetNodeAssocs(sourceNode);
- List nodeAssocRefs = new ArrayList(assocs.size());
- for (NodeAssoc assoc : assocs)
- {
- // check qname pattern
- if (!qnamePattern.isMatch(assoc.getTypeQName()))
- {
- continue; // the assoc name doesn't match the pattern given
- }
- nodeAssocRefs.add(assoc.getNodeAssocRef());
- }
- // done
- return nodeAssocRefs;
- }
-
- public List getSourceAssocs(NodeRef targetRef, QNamePattern qnamePattern)
- {
- Node targetNode = getNodeNotNull(targetRef);
- // get all assocs to source
- Collection assocs = nodeDaoService.getSourceNodeAssocs(targetNode);
- List nodeAssocRefs = new ArrayList(assocs.size());
- for (NodeAssoc assoc : assocs)
- {
- // check qname pattern
- if (!qnamePattern.isMatch(assoc.getTypeQName()))
- {
- continue; // the assoc name doesn't match the pattern given
- }
- nodeAssocRefs.add(assoc.getNodeAssocRef());
- }
- // done
- return nodeAssocRefs;
- }
-
- /**
- * Recursive method used to build up paths from a given node to the root.
- *
- * Whilst walking up the hierarchy to the root, some nodes may have a root aspect.
- * Everytime one of these is encountered, a new path is farmed off, but the method
- * continues to walk up the hierarchy.
- *
- * @param currentNode the node to start from, i.e. the child node to work upwards from
- * @param currentPath the path from the current node to the descendent that we started from
- * @param completedPaths paths that have reached the root are added to this collection
- * @param assocStack the parent-child relationships traversed whilst building the path.
- * Used to detected cyclic relationships.
- * @param primaryOnly true if only the primary parent association must be traversed.
- * If this is true, then the only root is the top level node having no parents.
- * @throws CyclicChildRelationshipException
- */
- private void prependPaths(
- final Node currentNode,
- final Path currentPath,
- Collection completedPaths,
- Stack assocStack,
- boolean primaryOnly)
- throws CyclicChildRelationshipException
- {
- NodeRef currentNodeRef = currentNode.getNodeRef();
- // get the parent associations of the given node
- Collection parentAssocs = currentNode.getParentAssocs();
- // does the node have parents
- boolean hasParents = parentAssocs.size() > 0;
- // does the current node have a root aspect?
- boolean isRoot = hasAspect(currentNodeRef, ContentModel.ASPECT_ROOT);
- boolean isStoreRoot = currentNode.getTypeQName().equals(ContentModel.TYPE_STOREROOT);
-
- // look for a root. If we only want the primary root, then ignore all but the top-level root.
- if (isRoot && !(primaryOnly && hasParents)) // exclude primary search with parents present
- {
- // create a one-sided assoc ref for the root node and prepend to the stack
- // this effectively spoofs the fact that the current node is not below the root
- // - we put this assoc in as the first assoc in the path must be a one-sided
- // reference pointing to the root node
- ChildAssociationRef assocRef = new ChildAssociationRef(
- null,
- null,
- null,
- getRootNode(currentNode.getNodeRef().getStoreRef()));
- // create a path to save and add the 'root' assoc
- Path pathToSave = new Path();
- Path.ChildAssocElement first = null;
- for (Path.Element element: currentPath)
- {
- if (first == null)
- {
- first = (Path.ChildAssocElement) element;
- }
- else
- {
- pathToSave.append(element);
- }
- }
- if (first != null)
- {
- // mimic an association that would appear if the current node was below
- // the root node
- // or if first beneath the root node it will make the real thing
- ChildAssociationRef updateAssocRef = new ChildAssociationRef(
- isStoreRoot ? ContentModel.ASSOC_CHILDREN : first.getRef().getTypeQName(),
- getRootNode(currentNode.getNodeRef().getStoreRef()),
- first.getRef().getQName(),
- first.getRef().getChildRef());
- Path.Element newFirst = new Path.ChildAssocElement(updateAssocRef);
- pathToSave.prepend(newFirst);
- }
-
- Path.Element element = new Path.ChildAssocElement(assocRef);
- pathToSave.prepend(element);
-
- // store the path just built
- completedPaths.add(pathToSave);
- }
-
- if (parentAssocs.size() == 0 && !isRoot)
- {
- throw new RuntimeException("Node without parents does not have root aspect: " +
- currentNodeRef);
- }
- // walk up each parent association
- for (ChildAssoc assoc : parentAssocs)
- {
- // does the association already exist in the stack
- if (assocStack.contains(assoc))
- {
- // the association was present already
- throw new CyclicChildRelationshipException(
- "Cyclic parent-child relationship detected: \n" +
- " current node: " + currentNode + "\n" +
- " current path: " + currentPath + "\n" +
- " next assoc: " + assoc,
- assoc);
- }
- // do we consider only primary assocs?
- if (primaryOnly && !assoc.getIsPrimary())
- {
- continue;
- }
- // build a path element
- NodeRef parentRef = assoc.getParent().getNodeRef();
- QName qname = assoc.getQname();
- NodeRef childRef = assoc.getChild().getNodeRef();
- boolean isPrimary = assoc.getIsPrimary();
- // build a real association reference
- ChildAssociationRef assocRef = new ChildAssociationRef(assoc.getTypeQName(), parentRef, qname, childRef, isPrimary, -1);
- // Ordering is not important here: We are building distinct paths upwards
- Path.Element element = new Path.ChildAssocElement(assocRef);
- // create a new path that builds on the current path
- Path path = new Path();
- path.append(currentPath);
- // prepend element
- path.prepend(element);
- // get parent node
- Node parentNode = assoc.getParent();
-
- // push the assoc stack, recurse and pop
- assocStack.push(assoc);
- prependPaths(parentNode, path, completedPaths, assocStack, primaryOnly);
- assocStack.pop();
- }
- // done
- }
-
- /**
- * @see #getPaths(NodeRef, boolean)
- * @see #prependPaths(Node, Path, Collection, Stack, boolean)
- */
- public Path getPath(NodeRef nodeRef) throws InvalidNodeRefException
- {
- List paths = getPaths(nodeRef, true); // checks primary path count
- if (paths.size() == 1)
- {
- return paths.get(0); // we know there is only one
- }
- throw new RuntimeException("Primary path count not checked"); // checked by getPaths()
- }
-
- /**
- * When searching for primaryOnly == true
, checks that there is exactly
- * one path.
- * @see #prependPaths(Node, Path, Collection, Stack, boolean)
- */
- public List getPaths(NodeRef nodeRef, boolean primaryOnly) throws InvalidNodeRefException
- {
- // get the starting node
- Node node = getNodeNotNull(nodeRef);
- // create storage for the paths - only need 1 bucket if we are looking for the primary path
- List paths = new ArrayList(primaryOnly ? 1 : 10);
- // create an empty current path to start from
- Path currentPath = new Path();
- // create storage for touched associations
- Stack assocStack = new Stack();
- // call recursive method to sort it out
- prependPaths(node, currentPath, paths, assocStack, primaryOnly);
-
- // check that for the primary only case we have exactly one path
- if (primaryOnly && paths.size() != 1)
- {
- throw new RuntimeException("Node has " + paths.size() + " primary paths: " + nodeRef);
- }
-
- // done
- if (loggerPaths.isDebugEnabled())
- {
- StringBuilder sb = new StringBuilder(256);
- if (primaryOnly)
- {
- sb.append("Primary paths");
- }
- else
- {
- sb.append("Paths");
- }
- sb.append(" for node ").append(nodeRef);
- for (Path path : paths)
- {
- sb.append("\n").append(" ").append(path);
- }
- loggerPaths.debug(sb);
- }
- return paths;
- }
-
- private void archiveNode(NodeRef nodeRef, StoreRef archiveStoreRef)
- {
- Node node = getNodeNotNull(nodeRef);
- ChildAssoc primaryParentAssoc = nodeDaoService.getPrimaryParentAssoc(node);
-
- // add the aspect
- Set aspects = node.getAspects();
- aspects.add(ContentModel.ASPECT_ARCHIVED);
- Map properties = node.getProperties();
- PropertyValue archivedByProperty = makePropertyValue(
- dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_BY),
- AuthenticationUtil.getCurrentUserName());
- properties.put(ContentModel.PROP_ARCHIVED_BY, archivedByProperty);
- PropertyValue archivedDateProperty = makePropertyValue(
- dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_DATE),
- new Date());
- properties.put(ContentModel.PROP_ARCHIVED_DATE, archivedDateProperty);
- PropertyValue archivedPrimaryParentNodeRefProperty = makePropertyValue(
- dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC),
- primaryParentAssoc.getChildAssocRef());
- properties.put(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC, archivedPrimaryParentNodeRefProperty);
- PropertyValue originalOwnerProperty = properties.get(ContentModel.PROP_OWNER);
- PropertyValue originalCreatorProperty = properties.get(ContentModel.PROP_CREATOR);
- if (originalOwnerProperty != null || originalCreatorProperty != null)
- {
- properties.put(
- ContentModel.PROP_ARCHIVED_ORIGINAL_OWNER,
- originalOwnerProperty != null ? originalOwnerProperty : originalCreatorProperty);
- }
-
- // change the node ownership
- aspects.add(ContentModel.ASPECT_OWNABLE);
- PropertyValue newOwnerProperty = makePropertyValue(
- dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_ORIGINAL_OWNER),
- AuthenticationUtil.getCurrentUserName());
- properties.put(ContentModel.PROP_OWNER, newOwnerProperty);
-
- // move the node
- NodeRef archiveStoreRootNodeRef = getRootNode(archiveStoreRef);
- moveNode(
- nodeRef,
- archiveStoreRootNodeRef,
- ContentModel.ASSOC_CHILDREN,
- QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "archivedItem"));
-
- // get the IDs of all the node's primary children, including its own
- Map nodesById = getNodeHierarchy(node, null);
-
- // Archive all the associations between the archived nodes and non-archived nodes
- for (Node nodeToArchive : nodesById.values())
- {
- archiveAssocs(nodeToArchive, nodesById);
- }
-
- // the node reference has changed due to the store move
- nodeRef = node.getNodeRef();
- }
-
- /**
- * Performs all the necessary housekeeping involved in changing a node's store.
- * This method cascades down through all the primary children of the node as
- * well.
- *
- * @param node the node whose store is changing
- * @param store the new store for the node
- */
- private void moveNodeToStore(Node node, Store store)
- {
- // get the IDs of all the node's primary children, including its own
- Map nodesById = getNodeHierarchy(node, null);
-
- // move each node into the archive store
- for (Node nodeToMove : nodesById.values())
- {
- NodeRef oldNodeRef = nodeToMove.getNodeRef();
- nodeToMove.setStore(store);
- NodeRef newNodeRef = nodeToMove.getNodeRef();
-
- String txnId = AlfrescoTransactionSupport.getTransactionId();
- // update old status
- NodeStatus oldNodeStatus = nodeDaoService.getNodeStatus(oldNodeRef, true);
- oldNodeStatus.setNode(null);
- oldNodeStatus.getTransaction().setChangeTxnId(txnId);
- // create the new status
- NodeStatus newNodeStatus = nodeDaoService.getNodeStatus(newNodeRef, true);
- newNodeStatus.setNode(nodeToMove);
- newNodeStatus.getTransaction().setChangeTxnId(txnId);
- }
- }
-
- /**
- * Fill the map of all primary children below the given node.
- * The given node will be added to the map and the method is recursive
- * to all primary children.
- *
- * @param node the start of the hierarchy
- * @param nodesById a map of nodes that will be reused as the return value
- * @return Returns a map of nodes in the hierarchy keyed by their IDs
- */
- private Map getNodeHierarchy(Node node, Map nodesById)
- {
- if (nodesById == null)
- {
- nodesById = new HashMap(23);
- }
-
- Long id = node.getId();
- if (nodesById.containsKey(id))
- {
- // this ID was already added - circular reference
- logger.warn("Circular hierarchy found including node " + id);
- return nodesById;
- }
- // add the node to the map
- nodesById.put(id, node);
- // recurse into the primary children
- Collection childAssocs = nodeDaoService.getChildAssocs(node);
- for (ChildAssoc childAssoc : childAssocs)
- {
- // cascade into primary associations
- if (childAssoc.getIsPrimary())
- {
- Node primaryChild = childAssoc.getChild();
- nodesById = getNodeHierarchy(primaryChild, nodesById);
- }
- }
- return nodesById;
- }
-
- /**
- * Archive all associations to and from the given node, with the
- * exception of associations to or from nodes in the given map.
- *
- * Primary parent associations are also ignored.
- *
- * @param node the node whose associations must be archived
- * @param nodesById a map of nodes partaking in the archival process
- */
- private void archiveAssocs(Node node, Map nodesById)
- {
- List childAssocsToDelete = new ArrayList(5);
- // child associations
- ArrayList archivedChildAssocRefs = new ArrayList(5);
- Collection childAssocs = nodeDaoService.getChildAssocs(node);
- for (ChildAssoc assoc : childAssocs)
- {
- Long relatedNodeId = assoc.getChild().getId();
- if (nodesById.containsKey(relatedNodeId))
- {
- // a sibling in the archive process
- continue;
- }
- childAssocsToDelete.add(assoc);
- archivedChildAssocRefs.add(assoc.getChildAssocRef());
- }
- // parent associations
- ArrayList archivedParentAssocRefs = new ArrayList(5);
- for (ChildAssoc assoc : node.getParentAssocs())
- {
- Long relatedNodeId = assoc.getParent().getId();
- if (nodesById.containsKey(relatedNodeId))
- {
- // a sibling in the archive process
- continue;
- }
- else if (assoc.getIsPrimary())
- {
- // ignore the primary parent as this is handled more specifically
- continue;
- }
- childAssocsToDelete.add(assoc);
- archivedParentAssocRefs.add(assoc.getChildAssocRef());
- }
-
- List nodeAssocsToDelete = new ArrayList(5);
- // source associations
- ArrayList archivedSourceAssocRefs = new ArrayList(5);
- for (NodeAssoc assoc : nodeDaoService.getSourceNodeAssocs(node))
- {
- Long relatedNodeId = assoc.getSource().getId();
- if (nodesById.containsKey(relatedNodeId))
- {
- // a sibling in the archive process
- continue;
- }
- nodeAssocsToDelete.add(assoc);
- archivedSourceAssocRefs.add(assoc.getNodeAssocRef());
- }
- // target associations
- ArrayList archivedTargetAssocRefs = new ArrayList(5);
- for (NodeAssoc assoc : nodeDaoService.getTargetNodeAssocs(node))
- {
- Long relatedNodeId = assoc.getTarget().getId();
- if (nodesById.containsKey(relatedNodeId))
- {
- // a sibling in the archive process
- continue;
- }
- nodeAssocsToDelete.add(assoc);
- archivedTargetAssocRefs.add(assoc.getNodeAssocRef());
- }
- // delete child assocs
- for (ChildAssoc assoc : childAssocsToDelete)
- {
- nodeDaoService.deleteChildAssoc(assoc, false);
- }
- // delete node assocs
- for (NodeAssoc assoc : nodeAssocsToDelete)
- {
- nodeDaoService.deleteNodeAssoc(assoc);
- }
-
- // add archived aspect
- node.getAspects().add(ContentModel.ASPECT_ARCHIVED_ASSOCS);
- // set properties
- Map properties = node.getProperties();
-
- if (archivedParentAssocRefs.size() > 0)
- {
- PropertyDefinition propertyDef = dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_PARENT_ASSOCS);
- PropertyValue propertyValue = makePropertyValue(propertyDef, archivedParentAssocRefs);
- properties.put(ContentModel.PROP_ARCHIVED_PARENT_ASSOCS, propertyValue);
- }
- if (archivedChildAssocRefs.size() > 0)
- {
- PropertyDefinition propertyDef = dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_CHILD_ASSOCS);
- PropertyValue propertyValue = makePropertyValue(propertyDef, archivedChildAssocRefs);
- properties.put(ContentModel.PROP_ARCHIVED_CHILD_ASSOCS, propertyValue);
- }
- if (archivedSourceAssocRefs.size() > 0)
- {
- PropertyDefinition propertyDef = dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_SOURCE_ASSOCS);
- PropertyValue propertyValue = makePropertyValue(propertyDef, archivedSourceAssocRefs);
- properties.put(ContentModel.PROP_ARCHIVED_SOURCE_ASSOCS, propertyValue);
- }
- if (archivedTargetAssocRefs.size() > 0)
- {
- PropertyDefinition propertyDef = dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_TARGET_ASSOCS);
- PropertyValue propertyValue = makePropertyValue(propertyDef, archivedTargetAssocRefs);
- properties.put(ContentModel.PROP_ARCHIVED_TARGET_ASSOCS, propertyValue);
- }
- }
-
- public NodeRef getStoreArchiveNode(StoreRef storeRef)
- {
- StoreRef archiveStoreRef = storeArchiveMap.getArchiveMap().get(storeRef);
- if (archiveStoreRef == null)
- {
- // no mapping for the given store
- return null;
- }
- else
- {
- return getRootNode(archiveStoreRef);
- }
- }
-
- public NodeRef restoreNode(NodeRef archivedNodeRef, NodeRef destinationParentNodeRef, QName assocTypeQName, QName assocQName)
- {
- Node archivedNode = getNodeNotNull(archivedNodeRef);
- Set aspects = archivedNode.getAspects();
- Map properties = archivedNode.getProperties();
- // the node must be a top-level archive node
- if (!aspects.contains(ContentModel.ASPECT_ARCHIVED))
- {
- throw new AlfrescoRuntimeException("The node to archive is not an archive node");
- }
- ChildAssociationRef originalPrimaryParentAssocRef = (ChildAssociationRef) makeSerializableValue(
- dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC),
- properties.get(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC));
- PropertyValue originalOwnerProperty = properties.get(ContentModel.PROP_ARCHIVED_ORIGINAL_OWNER);
- // remove the aspect archived aspect
- aspects.remove(ContentModel.ASPECT_ARCHIVED);
- properties.remove(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC);
- properties.remove(ContentModel.PROP_ARCHIVED_BY);
- properties.remove(ContentModel.PROP_ARCHIVED_DATE);
- properties.remove(ContentModel.PROP_ARCHIVED_ORIGINAL_OWNER);
-
- // restore the original ownership
- if (originalOwnerProperty != null)
- {
- aspects.add(ContentModel.ASPECT_OWNABLE);
- properties.put(ContentModel.PROP_OWNER, originalOwnerProperty);
- }
-
- if (destinationParentNodeRef == null)
- {
- // we must restore to the original location
- destinationParentNodeRef = originalPrimaryParentAssocRef.getParentRef();
- }
- // check the associations
- if (assocTypeQName == null)
- {
- assocTypeQName = originalPrimaryParentAssocRef.getTypeQName();
- }
- if (assocQName == null)
- {
- assocQName = originalPrimaryParentAssocRef.getQName();
- }
-
- // move the node to the target parent, which may or may not be the original parent
- moveNode(
- archivedNodeRef,
- destinationParentNodeRef,
- assocTypeQName,
- assocQName);
-
- // get the IDs of all the node's primary children, including its own
- Map restoredNodesById = getNodeHierarchy(archivedNode, null);
- // Restore the archived associations, if required
- for (Node restoredNode : restoredNodesById.values())
- {
- restoreAssocs(restoredNode);
- }
-
- // the node reference has changed due to the store move
- NodeRef restoredNodeRef = archivedNode.getNodeRef();
-
- // done
- if (logger.isDebugEnabled())
- {
- logger.debug("Restored node: \n" +
- " original noderef: " + archivedNodeRef + "\n" +
- " restored noderef: " + restoredNodeRef + "\n" +
- " new parent: " + destinationParentNodeRef);
- }
- return restoredNodeRef;
- }
-
- private void restoreAssocs(Node node)
- {
- NodeRef nodeRef = node.getNodeRef();
- // set properties
- Map properties = node.getProperties();
-
- // restore parent associations
- Collection parentAssocRefs = (Collection) getProperty(
- nodeRef,
- ContentModel.PROP_ARCHIVED_PARENT_ASSOCS);
- if (parentAssocRefs != null)
- {
- for (ChildAssociationRef assocRef : parentAssocRefs)
- {
- NodeRef parentNodeRef = assocRef.getParentRef();
- if (!exists(parentNodeRef))
- {
- continue;
- }
- Node parentNode = getNodeNotNull(parentNodeRef);
- // get the name to use for the unique child check
- QName assocTypeQName = assocRef.getTypeQName();
- nodeDaoService.newChildAssoc(
- parentNode,
- node,
- assocRef.isPrimary(),
- assocTypeQName,
- assocRef.getQName());
- }
- properties.remove(ContentModel.PROP_ARCHIVED_PARENT_ASSOCS);
- }
-
- // make sure that the node name uniqueness is enforced
- setChildUniqueName(node);
-
- // restore child associations
- Collection childAssocRefs = (Collection) getProperty(
- nodeRef,
- ContentModel.PROP_ARCHIVED_CHILD_ASSOCS);
- if (childAssocRefs != null)
- {
- for (ChildAssociationRef assocRef : childAssocRefs)
- {
- NodeRef childNodeRef = assocRef.getChildRef();
- if (!exists(childNodeRef))
- {
- continue;
- }
- Node childNode = getNodeNotNull(childNodeRef);
- QName assocTypeQName = assocRef.getTypeQName();
- // get the name to use for the unique child check
- nodeDaoService.newChildAssoc(
- node,
- childNode,
- assocRef.isPrimary(),
- assocTypeQName,
- assocRef.getQName());
- // ensure that the name uniqueness is enforced for the child node
- setChildUniqueName(childNode);
- }
- properties.remove(ContentModel.PROP_ARCHIVED_CHILD_ASSOCS);
- }
- // restore source associations
- Collection sourceAssocRefs = (Collection) getProperty(
- nodeRef,
- ContentModel.PROP_ARCHIVED_SOURCE_ASSOCS);
- if (sourceAssocRefs != null)
- {
- for (AssociationRef assocRef : sourceAssocRefs)
- {
- NodeRef sourceNodeRef = assocRef.getSourceRef();
- if (!exists(sourceNodeRef))
- {
- continue;
- }
- Node sourceNode = getNodeNotNull(sourceNodeRef);
- nodeDaoService.newNodeAssoc(sourceNode, node, assocRef.getTypeQName());
- }
- properties.remove(ContentModel.PROP_ARCHIVED_SOURCE_ASSOCS);
- }
- // restore target associations
- Collection targetAssocRefs = (Collection) getProperty(
- nodeRef,
- ContentModel.PROP_ARCHIVED_TARGET_ASSOCS);
- if (targetAssocRefs != null)
- {
- for (AssociationRef assocRef : targetAssocRefs)
- {
- NodeRef targetNodeRef = assocRef.getTargetRef();
- if (!exists(targetNodeRef))
- {
- continue;
- }
- Node targetNode = getNodeNotNull(targetNodeRef);
- nodeDaoService.newNodeAssoc(node, targetNode, assocRef.getTypeQName());
- }
- properties.remove(ContentModel.PROP_ARCHIVED_TARGET_ASSOCS);
- }
- // remove the aspect
- node.getAspects().remove(ContentModel.ASPECT_ARCHIVED_ASSOCS);
- }
-
- /**
- * Checks the dictionary's definition of the association to assign a unique name to the child node.
- *
- * @param assocTypeQName the type of the child association
- * @param childNode the child node being added. The name will be extracted from it, if necessary.
- */
- private void setChildUniqueName(Node childNode)
- {
- // get the name property
- Map properties = childNode.getProperties();
- PropertyValue nameValue = properties.get(ContentModel.PROP_NAME);
- String useName = null;
- if (nameValue == null)
- {
- // no name has been assigned, so assign the ID of the child node
- useName = childNode.getUuid();
- }
- else
- {
- useName = (String) nameValue.getValue(DataTypeDefinition.TEXT);
- }
- // get all the parent assocs
- Collection parentAssocs = childNode.getParentAssocs();
- for (ChildAssoc assoc : parentAssocs)
- {
- QName assocTypeQName = assoc.getTypeQName();
- AssociationDefinition assocDef = dictionaryService.getAssociation(assocTypeQName);
- if (!assocDef.isChild())
- {
- throw new DataIntegrityViolationException("Child association has non-child type: " + assoc.getId());
- }
- ChildAssociationDefinition childAssocDef = (ChildAssociationDefinition) assocDef;
- if (childAssocDef.getDuplicateChildNamesAllowed())
- {
- // the name is irrelevant, so it doesn't need to be put into the unique key
- nodeDaoService.setChildNameUnique(assoc, null);
- }
- else
- {
- nodeDaoService.setChildNameUnique(assoc, useName);
- }
- }
- // done
- if (logger.isDebugEnabled())
- {
- logger.debug(
- "Unique name set for all " + parentAssocs.size() + " parent associations: \n" +
- " name: " + useName);
- }
- }
-}
+/*
+ * Copyright (C) 2005 Alfresco, Inc.
+ *
+ * Licensed under the Mozilla Public License version 1.1
+ * with a permitted attribution clause. You may obtain a
+ * copy of the License at
+ *
+ * http://www.alfresco.org/legal/license.txt
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific
+ * language governing permissions and limitations under the
+ * License.
+ */
+package org.alfresco.repo.node.db;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.Stack;
+
+import org.alfresco.error.AlfrescoRuntimeException;
+import org.alfresco.model.ContentModel;
+import org.alfresco.repo.domain.ChildAssoc;
+import org.alfresco.repo.domain.Node;
+import org.alfresco.repo.domain.NodeAssoc;
+import org.alfresco.repo.domain.NodeStatus;
+import org.alfresco.repo.domain.PropertyValue;
+import org.alfresco.repo.domain.Store;
+import org.alfresco.repo.node.AbstractNodeServiceImpl;
+import org.alfresco.repo.node.StoreArchiveMap;
+import org.alfresco.repo.security.authentication.AuthenticationUtil;
+import org.alfresco.service.cmr.dictionary.AspectDefinition;
+import org.alfresco.service.cmr.dictionary.AssociationDefinition;
+import org.alfresco.service.cmr.dictionary.ChildAssociationDefinition;
+import org.alfresco.service.cmr.dictionary.ClassDefinition;
+import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
+import org.alfresco.service.cmr.dictionary.DictionaryService;
+import org.alfresco.service.cmr.dictionary.InvalidAspectException;
+import org.alfresco.service.cmr.dictionary.InvalidTypeException;
+import org.alfresco.service.cmr.dictionary.PropertyDefinition;
+import org.alfresco.service.cmr.dictionary.TypeDefinition;
+import org.alfresco.service.cmr.repository.AssociationExistsException;
+import org.alfresco.service.cmr.repository.AssociationRef;
+import org.alfresco.service.cmr.repository.ChildAssociationRef;
+import org.alfresco.service.cmr.repository.CyclicChildRelationshipException;
+import org.alfresco.service.cmr.repository.InvalidChildAssociationRefException;
+import org.alfresco.service.cmr.repository.InvalidNodeRefException;
+import org.alfresco.service.cmr.repository.InvalidStoreRefException;
+import org.alfresco.service.cmr.repository.NodeRef;
+import org.alfresco.service.cmr.repository.NodeService;
+import org.alfresco.service.cmr.repository.Path;
+import org.alfresco.service.cmr.repository.StoreExistsException;
+import org.alfresco.service.cmr.repository.StoreRef;
+import org.alfresco.service.cmr.repository.NodeRef.Status;
+import org.alfresco.service.namespace.NamespaceService;
+import org.alfresco.service.namespace.QName;
+import org.alfresco.service.namespace.QNamePattern;
+import org.alfresco.util.ParameterCheck;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.springframework.dao.DataIntegrityViolationException;
+import org.springframework.util.Assert;
+
+/**
+ * Node service using database persistence layer to fulfill functionality
+ *
+ * @author Derek Hulley
+ */
+public class DbNodeServiceImpl extends AbstractNodeServiceImpl
+{
+ private static Log logger = LogFactory.getLog(DbNodeServiceImpl.class);
+ private static Log loggerPaths = LogFactory.getLog(DbNodeServiceImpl.class.getName() + ".paths");
+
+ private NodeDaoService nodeDaoService;
+ private StoreArchiveMap storeArchiveMap;
+ private NodeService avmNodeService;
+
+ public DbNodeServiceImpl()
+ {
+ storeArchiveMap = new StoreArchiveMap(); // in case it is not set
+ }
+
+ public void setNodeDaoService(NodeDaoService nodeDaoService)
+ {
+ this.nodeDaoService = nodeDaoService;
+ }
+
+ public void setStoreArchiveMap(StoreArchiveMap storeArchiveMap)
+ {
+ this.storeArchiveMap = storeArchiveMap;
+ }
+
+ public void setAvmNodeService(NodeService avmNodeService)
+ {
+ this.avmNodeService = avmNodeService;
+ }
+
+ /**
+ * Performs a null-safe get of the node
+ *
+ * @param nodeRef the node to retrieve
+ * @return Returns the node entity (never null)
+ * @throws InvalidNodeRefException if the referenced node could not be found
+ */
+ private Node getNodeNotNull(NodeRef nodeRef) throws InvalidNodeRefException
+ {
+ Node unchecked = nodeDaoService.getNode(nodeRef);
+ if (unchecked == null)
+ {
+ throw new InvalidNodeRefException("Node does not exist: " + nodeRef, nodeRef);
+ }
+ return unchecked;
+ }
+
+ public boolean exists(StoreRef storeRef)
+ {
+ Store store = nodeDaoService.getStore(storeRef.getProtocol(), storeRef.getIdentifier());
+ boolean exists = (store != null);
+ // done
+ return exists;
+ }
+
+ public boolean exists(NodeRef nodeRef)
+ {
+ Node node = nodeDaoService.getNode(nodeRef);
+ boolean exists = (node != null);
+ // done
+ return exists;
+ }
+
+ public Status getNodeStatus(NodeRef nodeRef)
+ {
+ NodeStatus nodeStatus = nodeDaoService.getNodeStatus(nodeRef, false);
+ if (nodeStatus == null) // node never existed
+ {
+ return null;
+ }
+ else
+ {
+ return new NodeRef.Status(
+ nodeStatus.getTransaction().getChangeTxnId(),
+ nodeStatus.isDeleted());
+ }
+ }
+
+ /**
+ * @see NodeDaoService#getStores()
+ */
+ public List getStores()
+ {
+ List stores = nodeDaoService.getStores();
+ List storeRefs = new ArrayList(stores.size());
+ for (Store store : stores)
+ {
+ storeRefs.add(store.getStoreRef());
+ }
+ // Now get the AVMStores.
+ List avmStores = avmNodeService.getStores();
+ storeRefs.addAll(avmStores);
+ // Return them all.
+ return storeRefs;
+ }
+
+ /**
+ * Defers to the typed service
+ * @see StoreDaoService#createWorkspace(String)
+ */
+ public StoreRef createStore(String protocol, String identifier)
+ {
+ StoreRef storeRef = new StoreRef(protocol, identifier);
+ // check that the store does not already exist
+ Store store = nodeDaoService.getStore(protocol, identifier);
+ if (store != null)
+ {
+ throw new StoreExistsException("Unable to create a store that already exists: " + storeRef, storeRef);
+ }
+
+ // invoke policies
+ invokeBeforeCreateStore(ContentModel.TYPE_STOREROOT, storeRef);
+
+ // create a new one
+ store = nodeDaoService.createStore(protocol, identifier);
+ // get the root node
+ Node rootNode = store.getRootNode();
+ // assign the root aspect - this is expected of all roots, even store roots
+ addAspect(rootNode.getNodeRef(),
+ ContentModel.ASPECT_ROOT,
+ Collections.emptyMap());
+
+ // invoke policies
+ invokeOnCreateStore(rootNode.getNodeRef());
+
+ // done
+ if (!store.getStoreRef().equals(storeRef))
+ {
+ throw new RuntimeException("Incorrect store reference");
+ }
+ return storeRef;
+ }
+
+ public NodeRef getRootNode(StoreRef storeRef) throws InvalidStoreRefException
+ {
+ Store store = nodeDaoService.getStore(storeRef.getProtocol(), storeRef.getIdentifier());
+ if (store == null)
+ {
+ throw new InvalidStoreRefException("Store does not exist", storeRef);
+ }
+ // get the root
+ Node node = store.getRootNode();
+ if (node == null)
+ {
+ throw new InvalidStoreRefException("Store does not have a root node", storeRef);
+ }
+ NodeRef nodeRef = node.getNodeRef();
+ // done
+ return nodeRef;
+ }
+
+ /**
+ * @see #createNode(NodeRef, QName, QName, QName, Map)
+ */
+ public ChildAssociationRef createNode(
+ NodeRef parentRef,
+ QName assocTypeQName,
+ QName assocQName,
+ QName nodeTypeQName)
+ {
+ return this.createNode(parentRef, assocTypeQName, assocQName, nodeTypeQName, null);
+ }
+
+ /**
+ * @see org.alfresco.service.cmr.repository.NodeService#createNode(org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.namespace.QName, org.alfresco.service.namespace.QName, org.alfresco.service.namespace.QName, java.util.Map)
+ */
+ public ChildAssociationRef createNode(
+ NodeRef parentRef,
+ QName assocTypeQName,
+ QName assocQName,
+ QName nodeTypeQName,
+ Map properties)
+ {
+ Assert.notNull(parentRef);
+ Assert.notNull(assocTypeQName);
+ Assert.notNull(assocQName);
+
+ // null property map is allowed
+ if (properties == null)
+ {
+ properties = new HashMap();
+ }
+ else
+ {
+ // Copy the incomming property map since we may need to modify it later
+ properties = new HashMap(properties);
+ }
+
+ // Invoke policy behaviour
+ invokeBeforeUpdateNode(parentRef);
+ invokeBeforeCreateNode(parentRef, assocTypeQName, assocQName, nodeTypeQName);
+
+ // get the store that the parent belongs to
+ StoreRef storeRef = parentRef.getStoreRef();
+ Store store = nodeDaoService.getStore(storeRef.getProtocol(), storeRef.getIdentifier());
+ if (store == null)
+ {
+ throw new RuntimeException("No store found for parent node: " + parentRef);
+ }
+
+ // check the node type
+ TypeDefinition nodeTypeDef = dictionaryService.getType(nodeTypeQName);
+ if (nodeTypeDef == null)
+ {
+ throw new InvalidTypeException(nodeTypeQName);
+ }
+
+ // get/generate an ID for the node
+ String newId = generateGuid(properties);
+
+ // create the node instance
+ Node childNode = nodeDaoService.newNode(store, newId, nodeTypeQName);
+
+ // get the parent node
+ Node parentNode = getNodeNotNull(parentRef);
+
+ // Set the default property values
+ addDefaultPropertyValues(nodeTypeDef, properties);
+
+ // Add the default aspects to the node
+ addDefaultAspects(nodeTypeDef, childNode, properties);
+
+ // set the properties - it is a new node so only set properties if there are any
+ Map propertiesBefore = getPropertiesImpl(childNode);
+ Map propertiesAfter = null;
+ if (properties.size() > 0)
+ {
+ propertiesAfter = setPropertiesImpl(childNode, properties);
+ }
+
+ // create the association
+ ChildAssoc childAssoc = nodeDaoService.newChildAssoc(
+ parentNode,
+ childNode,
+ true,
+ assocTypeQName,
+ assocQName);
+ setChildUniqueName(childNode); // ensure uniqueness
+ ChildAssociationRef childAssocRef = childAssoc.getChildAssocRef();
+
+ // Invoke policy behaviour
+ invokeOnCreateNode(childAssocRef);
+ invokeOnUpdateNode(parentRef);
+ if (propertiesAfter != null)
+ {
+ invokeOnUpdateProperties(childAssocRef.getChildRef(), propertiesBefore, propertiesAfter);
+ }
+
+ // done
+ return childAssocRef;
+ }
+
+ /**
+ * Add the default aspects to a given node
+ *
+ * @param nodeTypeDef
+ */
+ private void addDefaultAspects(ClassDefinition classDefinition, Node node, Map properties)
+ {
+ NodeRef nodeRef = node.getNodeRef();
+
+ // get the mandatory aspects for the node type
+ List defaultAspectDefs = classDefinition.getDefaultAspects();
+
+ // add all the aspects to the node
+ Set nodeAspects = node.getAspects();
+ for (AspectDefinition defaultAspectDef : defaultAspectDefs)
+ {
+ invokeBeforeAddAspect(nodeRef, defaultAspectDef.getName());
+ nodeAspects.add(defaultAspectDef.getName());
+ addDefaultPropertyValues(defaultAspectDef, properties);
+ invokeOnAddAspect(nodeRef, defaultAspectDef.getName());
+
+ // Now add any default aspects for this aspect
+ addDefaultAspects(defaultAspectDef, node, properties);
+ }
+ }
+
+ /**
+ * Drops the old primary association and creates a new one
+ */
+ public ChildAssociationRef moveNode(
+ NodeRef nodeToMoveRef,
+ NodeRef newParentRef,
+ QName assocTypeQName,
+ QName assocQName)
+ throws InvalidNodeRefException
+ {
+ Assert.notNull(nodeToMoveRef);
+ Assert.notNull(newParentRef);
+ Assert.notNull(assocTypeQName);
+ Assert.notNull(assocQName);
+
+ // check the node references
+ Node nodeToMove = getNodeNotNull(nodeToMoveRef);
+ Node newParentNode = getNodeNotNull(newParentRef);
+ // get the primary parent assoc
+ ChildAssoc oldAssoc = nodeDaoService.getPrimaryParentAssoc(nodeToMove);
+ ChildAssociationRef oldAssocRef = oldAssoc.getChildAssocRef();
+ // get the old parent
+ Node oldParentNode = oldAssoc.getParent();
+
+ boolean movingStore = !nodeToMoveRef.getStoreRef().equals(newParentRef.getStoreRef());
+
+ // data needed for policy invocation
+ QName nodeToMoveTypeQName = nodeToMove.getTypeQName();
+ Set nodeToMoveAspects = nodeToMove.getAspects();
+
+ // Invoke policy behaviour
+ if (movingStore)
+ {
+ invokeBeforeDeleteNode(nodeToMoveRef);
+ invokeBeforeCreateNode(newParentRef, assocTypeQName, assocQName, nodeToMoveTypeQName);
+ }
+ else
+ {
+ invokeBeforeDeleteChildAssociation(oldAssocRef);
+ invokeBeforeCreateChildAssociation(newParentRef, nodeToMoveRef, assocTypeQName, assocQName);
+ invokeBeforeUpdateNode(oldParentNode.getNodeRef()); // old parent will be updated
+ invokeBeforeUpdateNode(newParentRef); // new parent ditto
+ }
+
+ // remove the child assoc from the old parent
+ // don't cascade as we will still need the node afterwards
+ nodeDaoService.deleteChildAssoc(oldAssoc, false);
+
+ // create a new assoc
+ ChildAssoc newAssoc = nodeDaoService.newChildAssoc(
+ newParentNode,
+ nodeToMove,
+ true,
+ assocTypeQName,
+ assocQName);
+ setChildUniqueName(nodeToMove); // ensure uniqueness
+ ChildAssociationRef newAssocRef = newAssoc.getChildAssocRef();
+
+ // If the node is moving stores, then drag the node hierarchy with it
+ if (movingStore)
+ {
+ // do the move
+ Store newStore = newParentNode.getStore();
+ moveNodeToStore(nodeToMove, newStore);
+ // the node reference will have changed too
+ nodeToMoveRef = nodeToMove.getNodeRef();
+ }
+
+ // check that no cyclic relationships have been created
+ getPaths(nodeToMoveRef, false);
+
+ // invoke policy behaviour
+ if (movingStore)
+ {
+ // TODO for now indicate that the node has been archived to prevent the version history from being removed
+ // in the future a onMove policy could be added and remove the need for onDelete and onCreate to be fired here
+ invokeOnDeleteNode(oldAssocRef, nodeToMoveTypeQName, nodeToMoveAspects, true);
+ invokeOnCreateNode(newAssoc.getChildAssocRef());
+ }
+ else
+ {
+ invokeOnCreateChildAssociation(newAssoc.getChildAssocRef());
+ invokeOnDeleteChildAssociation(oldAssoc.getChildAssocRef());
+ invokeOnUpdateNode(oldParentNode.getNodeRef());
+ invokeOnUpdateNode(newParentRef);
+ }
+ invokeOnMoveNode(oldAssocRef, newAssocRef);
+
+ // update the node status
+ nodeDaoService.recordChangeId(nodeToMoveRef);
+
+ // done
+ return newAssoc.getChildAssocRef();
+ }
+
+ public void setChildAssociationIndex(ChildAssociationRef childAssocRef, int index)
+ {
+ // get nodes
+ Node parentNode = getNodeNotNull(childAssocRef.getParentRef());
+ Node childNode = getNodeNotNull(childAssocRef.getChildRef());
+
+ ChildAssoc assoc = nodeDaoService.getChildAssoc(
+ parentNode,
+ childNode,
+ childAssocRef.getTypeQName(),
+ childAssocRef.getQName());
+ if (assoc == null)
+ {
+ throw new InvalidChildAssociationRefException("Unable to set child association index: \n" +
+ " assoc: " + childAssocRef + "\n" +
+ " index: " + index,
+ childAssocRef);
+ }
+ // set the index
+ assoc.setIndex(index);
+ }
+
+ public QName getType(NodeRef nodeRef) throws InvalidNodeRefException
+ {
+ Node node = getNodeNotNull(nodeRef);
+ return node.getTypeQName();
+ }
+
+ /**
+ * @see org.alfresco.service.cmr.repository.NodeService#setType(org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.namespace.QName)
+ */
+ public void setType(NodeRef nodeRef, QName typeQName) throws InvalidNodeRefException
+ {
+ // check the node type
+ TypeDefinition nodeTypeDef = dictionaryService.getType(typeQName);
+ if (nodeTypeDef == null)
+ {
+ throw new InvalidTypeException(typeQName);
+ }
+
+ // Invoke policies
+ invokeBeforeUpdateNode(nodeRef);
+
+ // Get the node and set the new type
+ Node node = getNodeNotNull(nodeRef);
+ node.setTypeQName(typeQName);
+
+ // Add the default aspects to the node (update the properties with any new default values)
+ Map properties = this.getPropertiesImpl(node);
+ addDefaultAspects(nodeTypeDef, node, properties);
+ this.setProperties(nodeRef, properties);
+
+ // Invoke policies
+ invokeOnUpdateNode(nodeRef);
+ }
+
+ /**
+ * @see Node#getAspects()
+ */
+ public void addAspect(
+ NodeRef nodeRef,
+ QName aspectTypeQName,
+ Map aspectProperties)
+ throws InvalidNodeRefException, InvalidAspectException
+ {
+ // check that the aspect is legal
+ AspectDefinition aspectDef = dictionaryService.getAspect(aspectTypeQName);
+ if (aspectDef == null)
+ {
+ throw new InvalidAspectException("The aspect is invalid: " + aspectTypeQName, aspectTypeQName);
+ }
+
+ // Invoke policy behaviours
+ invokeBeforeUpdateNode(nodeRef);
+ invokeBeforeAddAspect(nodeRef, aspectTypeQName);
+
+ Node node = getNodeNotNull(nodeRef);
+
+ // attach the properties to the current node properties
+ Map nodeProperties = getPropertiesImpl(node);
+
+ if (aspectProperties != null)
+ {
+ nodeProperties.putAll(aspectProperties);
+ }
+
+ // Set any default property values that appear on the aspect
+ addDefaultPropertyValues(aspectDef, nodeProperties);
+
+ // Add any dependant aspect
+ addDefaultAspects(aspectDef, node, nodeProperties);
+
+ // Set the property values back on the node
+ setProperties(nodeRef, nodeProperties);
+
+ // physically attach the aspect to the node
+ if (node.getAspects().add(aspectTypeQName) == true)
+ {
+ // Invoke policy behaviours
+ invokeOnUpdateNode(nodeRef);
+ invokeOnAddAspect(nodeRef, aspectTypeQName);
+
+ // update the node status
+ nodeDaoService.recordChangeId(nodeRef);
+ }
+ }
+
+ /**
+ * @see Node#getAspects()
+ */
+ public void removeAspect(NodeRef nodeRef, QName aspectTypeQName)
+ throws InvalidNodeRefException, InvalidAspectException
+ {
+ // Invoke policy behaviours
+ invokeBeforeUpdateNode(nodeRef);
+ invokeBeforeRemoveAspect(nodeRef, aspectTypeQName);
+
+ // get the aspect
+ AspectDefinition aspectDef = dictionaryService.getAspect(aspectTypeQName);
+ if (aspectDef == null)
+ {
+ throw new InvalidAspectException(aspectTypeQName);
+ }
+ // get the node
+ Node node = getNodeNotNull(nodeRef);
+
+ // remove the aspect, if present
+ boolean removed = node.getAspects().remove(aspectTypeQName);
+ // if the aspect was present, remove the associated properties
+ if (removed)
+ {
+ Map nodeProperties = node.getProperties();
+ Map propertyDefs = aspectDef.getProperties();
+ for (QName propertyName : propertyDefs.keySet())
+ {
+ nodeProperties.remove(propertyName);
+ }
+
+ // Invoke policy behaviours
+ invokeOnUpdateNode(nodeRef);
+ invokeOnRemoveAspect(nodeRef, aspectTypeQName);
+
+ // update the node status
+ nodeDaoService.recordChangeId(nodeRef);
+ }
+ }
+
+ /**
+ * Performs a check on the set of node aspects
+ *
+ * @see Node#getAspects()
+ */
+ public boolean hasAspect(NodeRef nodeRef, QName aspectRef) throws InvalidNodeRefException, InvalidAspectException
+ {
+ Node node = getNodeNotNull(nodeRef);
+ Set aspectQNames = node.getAspects();
+ boolean hasAspect = aspectQNames.contains(aspectRef);
+ // done
+ return hasAspect;
+ }
+
+ public Set getAspects(NodeRef nodeRef) throws InvalidNodeRefException
+ {
+ Node node = getNodeNotNull(nodeRef);
+ Set aspectQNames = node.getAspects();
+ // copy the set to ensure initialization
+ Set ret = new HashSet(aspectQNames.size());
+ ret.addAll(aspectQNames);
+ // done
+ return ret;
+ }
+
+ public void deleteNode(NodeRef nodeRef)
+ {
+ boolean isArchivedNode = false;
+ boolean requiresDelete = false;
+
+ // Invoke policy behaviours
+ invokeBeforeDeleteNode(nodeRef);
+
+ // get the node
+ Node node = getNodeNotNull(nodeRef);
+ // get the primary parent-child relationship before it is gone
+ ChildAssociationRef childAssocRef = getPrimaryParent(nodeRef);
+ // get type and aspect QNames as they will be unavailable after the delete
+ QName nodeTypeQName = node.getTypeQName();
+ Set nodeAspectQNames = node.getAspects();
+
+ // check if we need to archive the node
+ StoreRef archiveStoreRef = null;
+ if (nodeAspectQNames.contains(ContentModel.ASPECT_TEMPORARY))
+ {
+ // the node has the temporary aspect meaning
+ // it can not be archived
+ requiresDelete = true;
+ isArchivedNode = false;
+ }
+ else
+ {
+ StoreRef storeRef = nodeRef.getStoreRef();
+ archiveStoreRef = storeArchiveMap.getArchiveMap().get(storeRef);
+ // get the type and check if we need archiving
+ TypeDefinition typeDef = dictionaryService.getType(node.getTypeQName());
+ if (typeDef == null || !typeDef.isArchive() || archiveStoreRef == null)
+ {
+ requiresDelete = true;
+ }
+ }
+
+ if (requiresDelete)
+ {
+ // perform a normal deletion
+ nodeDaoService.deleteNode(node, true);
+ isArchivedNode = false;
+ }
+ else
+ {
+ // archive it
+ archiveNode(nodeRef, archiveStoreRef);
+ isArchivedNode = true;
+ }
+
+ // Invoke policy behaviours
+ invokeOnDeleteNode(childAssocRef, nodeTypeQName, nodeAspectQNames, isArchivedNode);
+ }
+
+ public ChildAssociationRef addChild(NodeRef parentRef, NodeRef childRef, QName assocTypeQName, QName assocQName)
+ {
+ // Invoke policy behaviours
+ invokeBeforeUpdateNode(parentRef);
+ invokeBeforeCreateChildAssociation(parentRef, childRef, assocTypeQName, assocQName);
+
+ // get the parent node and ensure that it is a container node
+ Node parentNode = getNodeNotNull(parentRef);
+ // get the child node
+ Node childNode = getNodeNotNull(childRef);
+ // make the association
+ ChildAssoc assoc = nodeDaoService.newChildAssoc(
+ parentNode,
+ childNode,
+ false,
+ assocTypeQName,
+ assocQName);
+ // ensure name uniqueness
+ setChildUniqueName(childNode);
+ ChildAssociationRef assocRef = assoc.getChildAssocRef();
+ NodeRef childNodeRef = assocRef.getChildRef();
+
+ // check that the child addition of the child has not created a cyclic relationship
+ // this functionality is provided for free in getPath
+ getPaths(childNodeRef, false);
+
+ // Invoke policy behaviours
+ invokeOnCreateChildAssociation(assocRef);
+ invokeOnUpdateNode(parentRef);
+
+ return assoc.getChildAssocRef();
+ }
+
+ public void removeChild(NodeRef parentRef, NodeRef childRef) throws InvalidNodeRefException
+ {
+ Node parentNode = getNodeNotNull(parentRef);
+ Node childNode = getNodeNotNull(childRef);
+ Long childNodeId = childNode.getId();
+
+ // get all the child assocs
+ ChildAssociationRef primaryAssocRef = null;
+ Collection assocs = nodeDaoService.getChildAssocs(parentNode);
+ assocs = new HashSet(assocs); // copy set as we will be modifying it
+ for (ChildAssoc assoc : assocs)
+ {
+ if (!assoc.getChild().getId().equals(childNodeId))
+ {
+ continue; // not a matching association
+ }
+ ChildAssociationRef assocRef = assoc.getChildAssocRef();
+ // Is this a primary association?
+ if (assoc.getIsPrimary())
+ {
+ // keep the primary associaton for last
+ primaryAssocRef = assocRef;
+ }
+ else
+ {
+ // delete the association instance - it is not primary
+ invokeBeforeDeleteChildAssociation(assocRef);
+ nodeDaoService.deleteChildAssoc(assoc, true); // cascade
+ invokeOnDeleteChildAssociation(assocRef);
+ }
+ }
+ // remove the child if the primary association was a match
+ if (primaryAssocRef != null)
+ {
+ deleteNode(primaryAssocRef.getChildRef());
+ }
+
+ // Invoke policy behaviours
+ invokeOnUpdateNode(parentRef);
+
+ // done
+ }
+
+ public Map getProperties(NodeRef nodeRef) throws InvalidNodeRefException
+ {
+ Node node = getNodeNotNull(nodeRef);
+ return getPropertiesImpl(node);
+ }
+
+ private Map getPropertiesImpl(Node node) throws InvalidNodeRefException
+ {
+ NodeRef nodeRef = node.getNodeRef();
+
+ Map nodeProperties = node.getProperties();
+ Map ret = new HashMap(nodeProperties.size());
+ // copy values
+ for (Map.Entry entry: nodeProperties.entrySet())
+ {
+ QName propertyQName = entry.getKey();
+ PropertyValue propertyValue = entry.getValue();
+ // get the property definition
+ PropertyDefinition propertyDef = dictionaryService.getProperty(propertyQName);
+ // convert to the correct type
+ Serializable value = makeSerializableValue(propertyDef, propertyValue);
+ // copy across
+ ret.put(propertyQName, value);
+ }
+ // spoof referencable properties
+ addReferencableProperties(nodeRef, node.getId(), ret);
+ // done
+ return ret;
+ }
+
+ public Serializable getProperty(NodeRef nodeRef, QName qname) throws InvalidNodeRefException
+ {
+ // spoof referencable properties
+ if (qname.equals(ContentModel.PROP_STORE_PROTOCOL))
+ {
+ return nodeRef.getStoreRef().getProtocol();
+ }
+ else if (qname.equals(ContentModel.PROP_STORE_IDENTIFIER))
+ {
+ return nodeRef.getStoreRef().getIdentifier();
+ }
+ else if (qname.equals(ContentModel.PROP_NODE_UUID))
+ {
+ return nodeRef.getId();
+ }
+
+ // get the property from the node
+ Node node = getNodeNotNull(nodeRef);
+
+ if (qname.equals(ContentModel.PROP_NODE_DBID))
+ {
+ return node.getId();
+ }
+
+ Map properties = node.getProperties();
+ PropertyValue propertyValue = properties.get(qname);
+
+ // get the property definition
+ PropertyDefinition propertyDef = dictionaryService.getProperty(qname);
+ // convert to the correct type
+ Serializable value = makeSerializableValue(propertyDef, propertyValue);
+ // done
+ return value;
+ }
+
+ /**
+ * Ensures that all required properties are present on the node and copies the
+ * property values to the Node
.
+ *
+ * To remove a property, remove it from the map before calling this method.
+ * Null-valued properties are allowed.
+ *
+ * If any of the values are null, a marker object is put in to mimic nulls. They will be turned back into
+ * a real nulls when the properties are requested again.
+ *
+ * @see Node#getProperties()
+ */
+ public void setProperties(NodeRef nodeRef, Map properties) throws InvalidNodeRefException
+ {
+ Node node = getNodeNotNull(nodeRef);
+
+ // Invoke policy behaviours
+ invokeBeforeUpdateNode(nodeRef);
+
+ // Do the set properties
+ Map propertiesBefore = getPropertiesImpl(node);
+ Map propertiesAfter = setPropertiesImpl(node, properties);
+
+ setChildUniqueName(node); // ensure uniqueness
+
+ // Invoke policy behaviours
+ invokeOnUpdateNode(nodeRef);
+ invokeOnUpdateProperties(nodeRef, propertiesBefore, propertiesAfter);
+ }
+
+ /**
+ * Does the work of setting the property values. Returns a map containing the state of the properties after the set
+ * operation is complete.
+ *
+ * @param node the node
+ * @param properties the map of property values
+ * @return the map of property values after the set operation is complete
+ * @throws InvalidNodeRefException
+ */
+ private Map setPropertiesImpl(Node node, Map properties) throws InvalidNodeRefException
+ {
+ ParameterCheck.mandatory("properties", properties);
+
+ // remove referencable properties
+ removeReferencableProperties(properties);
+
+ // copy properties onto node
+ Map nodeProperties = node.getProperties();
+ nodeProperties.clear();
+
+ // check the property type and copy the values across
+ for (QName propertyQName : properties.keySet())
+ {
+ PropertyDefinition propertyDef = dictionaryService.getProperty(propertyQName);
+ Serializable value = properties.get(propertyQName);
+ // get a persistable value
+ PropertyValue propertyValue = makePropertyValue(propertyDef, value);
+ nodeProperties.put(propertyQName, propertyValue);
+ }
+
+ // update the node status
+ NodeRef nodeRef = node.getNodeRef();
+ nodeDaoService.recordChangeId(nodeRef);
+
+ // Return the properties after
+ return Collections.unmodifiableMap(properties);
+ }
+
+ /**
+ * Gets the properties map, sets the value (null is allowed) and checks that the new set
+ * of properties is valid.
+ *
+ * @see DbNodeServiceImpl.NullPropertyValue
+ */
+ public void setProperty(NodeRef nodeRef, QName qname, Serializable value) throws InvalidNodeRefException
+ {
+ Assert.notNull(qname);
+
+ // Invoke policy behaviours
+ invokeBeforeUpdateNode(nodeRef);
+
+ // get the node
+ Node node = getNodeNotNull(nodeRef);
+
+ // Do the set operation
+ Map propertiesBefore = getPropertiesImpl(node);
+ Map propertiesAfter = setPropertyImpl(node, qname, value);
+
+ if (qname.equals(ContentModel.PROP_NAME))
+ {
+ setChildUniqueName(node); // ensure uniqueness
+ }
+
+ // Invoke policy behaviours
+ invokeOnUpdateNode(nodeRef);
+ invokeOnUpdateProperties(nodeRef, propertiesBefore, propertiesAfter);
+ }
+
+ /**
+ * Does the work of setting a property value. Returns the values of the properties after the set operation is
+ * complete.
+ *
+ * @param node the node
+ * @param qname the qname of the property
+ * @param value the value of the property
+ * @return the values of the properties after the set operation is complete
+ * @throws InvalidNodeRefException
+ */
+ public Map setPropertyImpl(Node node, QName qname, Serializable value) throws InvalidNodeRefException
+ {
+ NodeRef nodeRef = node.getNodeRef();
+
+ Map properties = node.getProperties();
+ PropertyDefinition propertyDef = dictionaryService.getProperty(qname);
+ // get a persistable value
+ PropertyValue propertyValue = makePropertyValue(propertyDef, value);
+ properties.put(qname, propertyValue);
+
+ // update the node status
+ nodeDaoService.recordChangeId(nodeRef);
+
+ return getPropertiesImpl(node);
+ }
+
+ /**
+ * Transforms {@link Node#getParentAssocs()} to a new collection
+ */
+ public Collection getParents(NodeRef nodeRef) throws InvalidNodeRefException
+ {
+ Node node = getNodeNotNull(nodeRef);
+ // get the assocs pointing to it
+ Collection parentAssocs = node.getParentAssocs();
+ // list of results
+ Collection results = new ArrayList(parentAssocs.size());
+ for (ChildAssoc assoc : parentAssocs)
+ {
+ // get the parent
+ Node parentNode = assoc.getParent();
+ results.add(parentNode.getNodeRef());
+ }
+ // done
+ return results;
+ }
+
+ /**
+ * Filters out any associations if their qname is not a match to the given pattern.
+ */
+ public List getParentAssocs(NodeRef nodeRef, QNamePattern typeQNamePattern, QNamePattern qnamePattern)
+ {
+ Node node = getNodeNotNull(nodeRef);
+ // get the assocs pointing to it
+ Collection parentAssocs = node.getParentAssocs();
+ // shortcut if there are no assocs
+ if (parentAssocs.size() == 0)
+ {
+ return Collections.emptyList();
+ }
+ // list of results
+ List results = new ArrayList(parentAssocs.size());
+ for (ChildAssoc assoc : parentAssocs)
+ {
+ // does the qname match the pattern?
+ if (!qnamePattern.isMatch(assoc.getQname()) || !typeQNamePattern.isMatch(assoc.getTypeQName()))
+ {
+ // no match - ignore
+ continue;
+ }
+ results.add(assoc.getChildAssocRef());
+ }
+ // done
+ return results;
+ }
+
+ /**
+ * Filters out any associations if their qname is not a match to the given pattern.
+ */
+ public List getChildAssocs(NodeRef nodeRef, QNamePattern typeQNamePattern, QNamePattern qnamePattern)
+ {
+ Node node = getNodeNotNull(nodeRef);
+ // get the assocs pointing from it
+ Collection childAssocRefs = nodeDaoService.getChildAssocRefs(node);
+ // shortcut if there are no assocs
+ if (childAssocRefs.size() == 0)
+ {
+ return Collections.emptyList();
+ }
+ // sort results
+ ArrayList orderedList = new ArrayList(childAssocRefs);
+ Collections.sort(orderedList);
+
+ // list of results
+ int nthSibling = 0;
+ Iterator iterator = orderedList.iterator();
+ while(iterator.hasNext())
+ {
+ ChildAssociationRef childAssocRef = iterator.next();
+ // does the qname match the pattern?
+ if (!qnamePattern.isMatch(childAssocRef.getQName()) || !typeQNamePattern.isMatch(childAssocRef.getTypeQName()))
+ {
+ // no match - remove
+ iterator.remove();
+ }
+ else
+ {
+ childAssocRef.setNthSibling(nthSibling);
+ nthSibling++;
+ }
+ }
+ // done
+ return orderedList;
+ }
+
+ public NodeRef getChildByName(NodeRef nodeRef, QName assocTypeQName, String childName)
+ {
+ Node node = getNodeNotNull(nodeRef);
+ ChildAssoc childAssoc = nodeDaoService.getChildAssoc(node, assocTypeQName, childName);
+ if (childAssoc != null)
+ {
+ return childAssoc.getChild().getNodeRef();
+ }
+ else
+ {
+ return null;
+ }
+ }
+
+ public ChildAssociationRef getPrimaryParent(NodeRef nodeRef) throws InvalidNodeRefException
+ {
+ Node node = getNodeNotNull(nodeRef);
+ // get the primary parent assoc
+ ChildAssoc assoc = nodeDaoService.getPrimaryParentAssoc(node);
+
+ // done - the assoc may be null for a root node
+ ChildAssociationRef assocRef = null;
+ if (assoc == null)
+ {
+ assocRef = new ChildAssociationRef(null, null, null, nodeRef);
+ }
+ else
+ {
+ assocRef = assoc.getChildAssocRef();
+ }
+ return assocRef;
+ }
+
+ public AssociationRef createAssociation(NodeRef sourceRef, NodeRef targetRef, QName assocTypeQName)
+ throws InvalidNodeRefException, AssociationExistsException
+ {
+ // Invoke policy behaviours
+ invokeBeforeUpdateNode(sourceRef);
+
+ Node sourceNode = getNodeNotNull(sourceRef);
+ Node targetNode = getNodeNotNull(targetRef);
+ // see if it exists
+ NodeAssoc assoc = nodeDaoService.getNodeAssoc(sourceNode, targetNode, assocTypeQName);
+ if (assoc != null)
+ {
+ throw new AssociationExistsException(sourceRef, targetRef, assocTypeQName);
+ }
+ // we are sure that the association doesn't exist - make it
+ assoc = nodeDaoService.newNodeAssoc(sourceNode, targetNode, assocTypeQName);
+ AssociationRef assocRef = assoc.getNodeAssocRef();
+
+ // Invoke policy behaviours
+ invokeOnUpdateNode(sourceRef);
+ invokeOnCreateAssociation(assocRef);
+
+ return assocRef;
+ }
+
+ public void removeAssociation(NodeRef sourceRef, NodeRef targetRef, QName assocTypeQName)
+ throws InvalidNodeRefException
+ {
+ Node sourceNode = getNodeNotNull(sourceRef);
+ Node targetNode = getNodeNotNull(targetRef);
+ // get the association
+ NodeAssoc assoc = nodeDaoService.getNodeAssoc(sourceNode, targetNode, assocTypeQName);
+ if (assoc == null)
+ {
+ // nothing to remove
+ return;
+ }
+ AssociationRef assocRef = assoc.getNodeAssocRef();
+
+ // Invoke policy behaviours
+ invokeBeforeUpdateNode(sourceRef);
+
+ // delete it
+ nodeDaoService.deleteNodeAssoc(assoc);
+
+ // Invoke policy behaviours
+ invokeOnUpdateNode(sourceRef);
+ invokeOnDeleteAssociation(assocRef);
+ }
+
+ public List getTargetAssocs(NodeRef sourceRef, QNamePattern qnamePattern)
+ {
+ Node sourceNode = getNodeNotNull(sourceRef);
+ // get all assocs to target
+ Collection assocs = nodeDaoService.getTargetNodeAssocs(sourceNode);
+ List nodeAssocRefs = new ArrayList(assocs.size());
+ for (NodeAssoc assoc : assocs)
+ {
+ // check qname pattern
+ if (!qnamePattern.isMatch(assoc.getTypeQName()))
+ {
+ continue; // the assoc name doesn't match the pattern given
+ }
+ nodeAssocRefs.add(assoc.getNodeAssocRef());
+ }
+ // done
+ return nodeAssocRefs;
+ }
+
+ public List getSourceAssocs(NodeRef targetRef, QNamePattern qnamePattern)
+ {
+ Node targetNode = getNodeNotNull(targetRef);
+ // get all assocs to source
+ Collection assocs = nodeDaoService.getSourceNodeAssocs(targetNode);
+ List nodeAssocRefs = new ArrayList(assocs.size());
+ for (NodeAssoc assoc : assocs)
+ {
+ // check qname pattern
+ if (!qnamePattern.isMatch(assoc.getTypeQName()))
+ {
+ continue; // the assoc name doesn't match the pattern given
+ }
+ nodeAssocRefs.add(assoc.getNodeAssocRef());
+ }
+ // done
+ return nodeAssocRefs;
+ }
+
+ /**
+ * Recursive method used to build up paths from a given node to the root.
+ *
+ * Whilst walking up the hierarchy to the root, some nodes may have a root aspect.
+ * Everytime one of these is encountered, a new path is farmed off, but the method
+ * continues to walk up the hierarchy.
+ *
+ * @param currentNode the node to start from, i.e. the child node to work upwards from
+ * @param currentPath the path from the current node to the descendent that we started from
+ * @param completedPaths paths that have reached the root are added to this collection
+ * @param assocStack the parent-child relationships traversed whilst building the path.
+ * Used to detected cyclic relationships.
+ * @param primaryOnly true if only the primary parent association must be traversed.
+ * If this is true, then the only root is the top level node having no parents.
+ * @throws CyclicChildRelationshipException
+ */
+ private void prependPaths(
+ final Node currentNode,
+ final Path currentPath,
+ Collection completedPaths,
+ Stack assocStack,
+ boolean primaryOnly)
+ throws CyclicChildRelationshipException
+ {
+ NodeRef currentNodeRef = currentNode.getNodeRef();
+ // get the parent associations of the given node
+ Collection parentAssocs = currentNode.getParentAssocs();
+ // does the node have parents
+ boolean hasParents = parentAssocs.size() > 0;
+ // does the current node have a root aspect?
+ boolean isRoot = hasAspect(currentNodeRef, ContentModel.ASPECT_ROOT);
+ boolean isStoreRoot = currentNode.getTypeQName().equals(ContentModel.TYPE_STOREROOT);
+
+ // look for a root. If we only want the primary root, then ignore all but the top-level root.
+ if (isRoot && !(primaryOnly && hasParents)) // exclude primary search with parents present
+ {
+ // create a one-sided assoc ref for the root node and prepend to the stack
+ // this effectively spoofs the fact that the current node is not below the root
+ // - we put this assoc in as the first assoc in the path must be a one-sided
+ // reference pointing to the root node
+ ChildAssociationRef assocRef = new ChildAssociationRef(
+ null,
+ null,
+ null,
+ getRootNode(currentNode.getNodeRef().getStoreRef()));
+ // create a path to save and add the 'root' assoc
+ Path pathToSave = new Path();
+ Path.ChildAssocElement first = null;
+ for (Path.Element element: currentPath)
+ {
+ if (first == null)
+ {
+ first = (Path.ChildAssocElement) element;
+ }
+ else
+ {
+ pathToSave.append(element);
+ }
+ }
+ if (first != null)
+ {
+ // mimic an association that would appear if the current node was below
+ // the root node
+ // or if first beneath the root node it will make the real thing
+ ChildAssociationRef updateAssocRef = new ChildAssociationRef(
+ isStoreRoot ? ContentModel.ASSOC_CHILDREN : first.getRef().getTypeQName(),
+ getRootNode(currentNode.getNodeRef().getStoreRef()),
+ first.getRef().getQName(),
+ first.getRef().getChildRef());
+ Path.Element newFirst = new Path.ChildAssocElement(updateAssocRef);
+ pathToSave.prepend(newFirst);
+ }
+
+ Path.Element element = new Path.ChildAssocElement(assocRef);
+ pathToSave.prepend(element);
+
+ // store the path just built
+ completedPaths.add(pathToSave);
+ }
+
+ if (parentAssocs.size() == 0 && !isRoot)
+ {
+ throw new RuntimeException("Node without parents does not have root aspect: " +
+ currentNodeRef);
+ }
+ // walk up each parent association
+ for (ChildAssoc assoc : parentAssocs)
+ {
+ // does the association already exist in the stack
+ if (assocStack.contains(assoc))
+ {
+ // the association was present already
+ throw new CyclicChildRelationshipException(
+ "Cyclic parent-child relationship detected: \n" +
+ " current node: " + currentNode + "\n" +
+ " current path: " + currentPath + "\n" +
+ " next assoc: " + assoc,
+ assoc);
+ }
+ // do we consider only primary assocs?
+ if (primaryOnly && !assoc.getIsPrimary())
+ {
+ continue;
+ }
+ // build a path element
+ NodeRef parentRef = assoc.getParent().getNodeRef();
+ QName qname = assoc.getQname();
+ NodeRef childRef = assoc.getChild().getNodeRef();
+ boolean isPrimary = assoc.getIsPrimary();
+ // build a real association reference
+ ChildAssociationRef assocRef = new ChildAssociationRef(assoc.getTypeQName(), parentRef, qname, childRef, isPrimary, -1);
+ // Ordering is not important here: We are building distinct paths upwards
+ Path.Element element = new Path.ChildAssocElement(assocRef);
+ // create a new path that builds on the current path
+ Path path = new Path();
+ path.append(currentPath);
+ // prepend element
+ path.prepend(element);
+ // get parent node
+ Node parentNode = assoc.getParent();
+
+ // push the assoc stack, recurse and pop
+ assocStack.push(assoc);
+ prependPaths(parentNode, path, completedPaths, assocStack, primaryOnly);
+ assocStack.pop();
+ }
+ // done
+ }
+
+ /**
+ * @see #getPaths(NodeRef, boolean)
+ * @see #prependPaths(Node, Path, Collection, Stack, boolean)
+ */
+ public Path getPath(NodeRef nodeRef) throws InvalidNodeRefException
+ {
+ List paths = getPaths(nodeRef, true); // checks primary path count
+ if (paths.size() == 1)
+ {
+ return paths.get(0); // we know there is only one
+ }
+ throw new RuntimeException("Primary path count not checked"); // checked by getPaths()
+ }
+
+ /**
+ * When searching for primaryOnly == true
, checks that there is exactly
+ * one path.
+ * @see #prependPaths(Node, Path, Collection, Stack, boolean)
+ */
+ public List getPaths(NodeRef nodeRef, boolean primaryOnly) throws InvalidNodeRefException
+ {
+ // get the starting node
+ Node node = getNodeNotNull(nodeRef);
+ // create storage for the paths - only need 1 bucket if we are looking for the primary path
+ List paths = new ArrayList(primaryOnly ? 1 : 10);
+ // create an empty current path to start from
+ Path currentPath = new Path();
+ // create storage for touched associations
+ Stack assocStack = new Stack();
+ // call recursive method to sort it out
+ prependPaths(node, currentPath, paths, assocStack, primaryOnly);
+
+ // check that for the primary only case we have exactly one path
+ if (primaryOnly && paths.size() != 1)
+ {
+ throw new RuntimeException("Node has " + paths.size() + " primary paths: " + nodeRef);
+ }
+
+ // done
+ if (loggerPaths.isDebugEnabled())
+ {
+ StringBuilder sb = new StringBuilder(256);
+ if (primaryOnly)
+ {
+ sb.append("Primary paths");
+ }
+ else
+ {
+ sb.append("Paths");
+ }
+ sb.append(" for node ").append(nodeRef);
+ for (Path path : paths)
+ {
+ sb.append("\n").append(" ").append(path);
+ }
+ loggerPaths.debug(sb);
+ }
+ return paths;
+ }
+
+ private void archiveNode(NodeRef nodeRef, StoreRef archiveStoreRef)
+ {
+ Node node = getNodeNotNull(nodeRef);
+ ChildAssoc primaryParentAssoc = nodeDaoService.getPrimaryParentAssoc(node);
+
+ // add the aspect
+ Set aspects = node.getAspects();
+ aspects.add(ContentModel.ASPECT_ARCHIVED);
+ Map properties = node.getProperties();
+ PropertyValue archivedByProperty = makePropertyValue(
+ dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_BY),
+ AuthenticationUtil.getCurrentUserName());
+ properties.put(ContentModel.PROP_ARCHIVED_BY, archivedByProperty);
+ PropertyValue archivedDateProperty = makePropertyValue(
+ dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_DATE),
+ new Date());
+ properties.put(ContentModel.PROP_ARCHIVED_DATE, archivedDateProperty);
+ PropertyValue archivedPrimaryParentNodeRefProperty = makePropertyValue(
+ dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC),
+ primaryParentAssoc.getChildAssocRef());
+ properties.put(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC, archivedPrimaryParentNodeRefProperty);
+ PropertyValue originalOwnerProperty = properties.get(ContentModel.PROP_OWNER);
+ PropertyValue originalCreatorProperty = properties.get(ContentModel.PROP_CREATOR);
+ if (originalOwnerProperty != null || originalCreatorProperty != null)
+ {
+ properties.put(
+ ContentModel.PROP_ARCHIVED_ORIGINAL_OWNER,
+ originalOwnerProperty != null ? originalOwnerProperty : originalCreatorProperty);
+ }
+
+ // change the node ownership
+ aspects.add(ContentModel.ASPECT_OWNABLE);
+ PropertyValue newOwnerProperty = makePropertyValue(
+ dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_ORIGINAL_OWNER),
+ AuthenticationUtil.getCurrentUserName());
+ properties.put(ContentModel.PROP_OWNER, newOwnerProperty);
+
+ // move the node
+ NodeRef archiveStoreRootNodeRef = getRootNode(archiveStoreRef);
+ moveNode(
+ nodeRef,
+ archiveStoreRootNodeRef,
+ ContentModel.ASSOC_CHILDREN,
+ QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "archivedItem"));
+
+ // get the IDs of all the node's primary children, including its own
+ Map nodesById = getNodeHierarchy(node, null);
+
+ // Archive all the associations between the archived nodes and non-archived nodes
+ for (Node nodeToArchive : nodesById.values())
+ {
+ archiveAssocs(nodeToArchive, nodesById);
+ }
+
+ // the node reference has changed due to the store move
+ nodeRef = node.getNodeRef();
+ }
+
+ /**
+ * Performs all the necessary housekeeping involved in changing a node's store.
+ * This method cascades down through all the primary children of the node as
+ * well.
+ *
+ * @param node the node whose store is changing
+ * @param store the new store for the node
+ */
+ private void moveNodeToStore(Node node, Store store)
+ {
+ // get the IDs of all the node's primary children, including its own
+ Map nodesById = getNodeHierarchy(node, null);
+
+ // move each node into the archive store
+ for (Node nodeToMove : nodesById.values())
+ {
+ NodeRef oldNodeRef = nodeToMove.getNodeRef();
+ nodeToMove.setStore(store);
+ NodeRef newNodeRef = nodeToMove.getNodeRef();
+
+ // update old status
+ NodeStatus oldNodeStatus = nodeDaoService.getNodeStatus(oldNodeRef, true);
+ oldNodeStatus.setNode(null);
+ // create the new status
+ NodeStatus newNodeStatus = nodeDaoService.getNodeStatus(newNodeRef, true);
+ newNodeStatus.setNode(nodeToMove);
+ }
+ }
+
+ /**
+ * Fill the map of all primary children below the given node.
+ * The given node will be added to the map and the method is recursive
+ * to all primary children.
+ *
+ * @param node the start of the hierarchy
+ * @param nodesById a map of nodes that will be reused as the return value
+ * @return Returns a map of nodes in the hierarchy keyed by their IDs
+ */
+ private Map getNodeHierarchy(Node node, Map nodesById)
+ {
+ if (nodesById == null)
+ {
+ nodesById = new HashMap(23);
+ }
+
+ Long id = node.getId();
+ if (nodesById.containsKey(id))
+ {
+ // this ID was already added - circular reference
+ logger.warn("Circular hierarchy found including node " + id);
+ return nodesById;
+ }
+ // add the node to the map
+ nodesById.put(id, node);
+ // recurse into the primary children
+ Collection childAssocs = nodeDaoService.getChildAssocs(node);
+ for (ChildAssoc childAssoc : childAssocs)
+ {
+ // cascade into primary associations
+ if (childAssoc.getIsPrimary())
+ {
+ Node primaryChild = childAssoc.getChild();
+ nodesById = getNodeHierarchy(primaryChild, nodesById);
+ }
+ }
+ return nodesById;
+ }
+
+ /**
+ * Archive all associations to and from the given node, with the
+ * exception of associations to or from nodes in the given map.
+ *
+ * Primary parent associations are also ignored.
+ *
+ * @param node the node whose associations must be archived
+ * @param nodesById a map of nodes partaking in the archival process
+ */
+ private void archiveAssocs(Node node, Map nodesById)
+ {
+ List childAssocsToDelete = new ArrayList(5);
+ // child associations
+ ArrayList archivedChildAssocRefs = new ArrayList(5);
+ Collection childAssocs = nodeDaoService.getChildAssocs(node);
+ for (ChildAssoc assoc : childAssocs)
+ {
+ Long relatedNodeId = assoc.getChild().getId();
+ if (nodesById.containsKey(relatedNodeId))
+ {
+ // a sibling in the archive process
+ continue;
+ }
+ childAssocsToDelete.add(assoc);
+ archivedChildAssocRefs.add(assoc.getChildAssocRef());
+ }
+ // parent associations
+ ArrayList archivedParentAssocRefs = new ArrayList(5);
+ for (ChildAssoc assoc : node.getParentAssocs())
+ {
+ Long relatedNodeId = assoc.getParent().getId();
+ if (nodesById.containsKey(relatedNodeId))
+ {
+ // a sibling in the archive process
+ continue;
+ }
+ else if (assoc.getIsPrimary())
+ {
+ // ignore the primary parent as this is handled more specifically
+ continue;
+ }
+ childAssocsToDelete.add(assoc);
+ archivedParentAssocRefs.add(assoc.getChildAssocRef());
+ }
+
+ List nodeAssocsToDelete = new ArrayList(5);
+ // source associations
+ ArrayList archivedSourceAssocRefs = new ArrayList(5);
+ for (NodeAssoc assoc : nodeDaoService.getSourceNodeAssocs(node))
+ {
+ Long relatedNodeId = assoc.getSource().getId();
+ if (nodesById.containsKey(relatedNodeId))
+ {
+ // a sibling in the archive process
+ continue;
+ }
+ nodeAssocsToDelete.add(assoc);
+ archivedSourceAssocRefs.add(assoc.getNodeAssocRef());
+ }
+ // target associations
+ ArrayList archivedTargetAssocRefs = new ArrayList(5);
+ for (NodeAssoc assoc : nodeDaoService.getTargetNodeAssocs(node))
+ {
+ Long relatedNodeId = assoc.getTarget().getId();
+ if (nodesById.containsKey(relatedNodeId))
+ {
+ // a sibling in the archive process
+ continue;
+ }
+ nodeAssocsToDelete.add(assoc);
+ archivedTargetAssocRefs.add(assoc.getNodeAssocRef());
+ }
+ // delete child assocs
+ for (ChildAssoc assoc : childAssocsToDelete)
+ {
+ nodeDaoService.deleteChildAssoc(assoc, false);
+ }
+ // delete node assocs
+ for (NodeAssoc assoc : nodeAssocsToDelete)
+ {
+ nodeDaoService.deleteNodeAssoc(assoc);
+ }
+
+ // add archived aspect
+ node.getAspects().add(ContentModel.ASPECT_ARCHIVED_ASSOCS);
+ // set properties
+ Map properties = node.getProperties();
+
+ if (archivedParentAssocRefs.size() > 0)
+ {
+ PropertyDefinition propertyDef = dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_PARENT_ASSOCS);
+ PropertyValue propertyValue = makePropertyValue(propertyDef, archivedParentAssocRefs);
+ properties.put(ContentModel.PROP_ARCHIVED_PARENT_ASSOCS, propertyValue);
+ }
+ if (archivedChildAssocRefs.size() > 0)
+ {
+ PropertyDefinition propertyDef = dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_CHILD_ASSOCS);
+ PropertyValue propertyValue = makePropertyValue(propertyDef, archivedChildAssocRefs);
+ properties.put(ContentModel.PROP_ARCHIVED_CHILD_ASSOCS, propertyValue);
+ }
+ if (archivedSourceAssocRefs.size() > 0)
+ {
+ PropertyDefinition propertyDef = dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_SOURCE_ASSOCS);
+ PropertyValue propertyValue = makePropertyValue(propertyDef, archivedSourceAssocRefs);
+ properties.put(ContentModel.PROP_ARCHIVED_SOURCE_ASSOCS, propertyValue);
+ }
+ if (archivedTargetAssocRefs.size() > 0)
+ {
+ PropertyDefinition propertyDef = dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_TARGET_ASSOCS);
+ PropertyValue propertyValue = makePropertyValue(propertyDef, archivedTargetAssocRefs);
+ properties.put(ContentModel.PROP_ARCHIVED_TARGET_ASSOCS, propertyValue);
+ }
+ }
+
+ public NodeRef getStoreArchiveNode(StoreRef storeRef)
+ {
+ StoreRef archiveStoreRef = storeArchiveMap.getArchiveMap().get(storeRef);
+ if (archiveStoreRef == null)
+ {
+ // no mapping for the given store
+ return null;
+ }
+ else
+ {
+ return getRootNode(archiveStoreRef);
+ }
+ }
+
+ public NodeRef restoreNode(NodeRef archivedNodeRef, NodeRef destinationParentNodeRef, QName assocTypeQName, QName assocQName)
+ {
+ Node archivedNode = getNodeNotNull(archivedNodeRef);
+ Set aspects = archivedNode.getAspects();
+ Map properties = archivedNode.getProperties();
+ // the node must be a top-level archive node
+ if (!aspects.contains(ContentModel.ASPECT_ARCHIVED))
+ {
+ throw new AlfrescoRuntimeException("The node to archive is not an archive node");
+ }
+ ChildAssociationRef originalPrimaryParentAssocRef = (ChildAssociationRef) makeSerializableValue(
+ dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC),
+ properties.get(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC));
+ PropertyValue originalOwnerProperty = properties.get(ContentModel.PROP_ARCHIVED_ORIGINAL_OWNER);
+ // remove the aspect archived aspect
+ aspects.remove(ContentModel.ASPECT_ARCHIVED);
+ properties.remove(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC);
+ properties.remove(ContentModel.PROP_ARCHIVED_BY);
+ properties.remove(ContentModel.PROP_ARCHIVED_DATE);
+ properties.remove(ContentModel.PROP_ARCHIVED_ORIGINAL_OWNER);
+
+ // restore the original ownership
+ if (originalOwnerProperty != null)
+ {
+ aspects.add(ContentModel.ASPECT_OWNABLE);
+ properties.put(ContentModel.PROP_OWNER, originalOwnerProperty);
+ }
+
+ if (destinationParentNodeRef == null)
+ {
+ // we must restore to the original location
+ destinationParentNodeRef = originalPrimaryParentAssocRef.getParentRef();
+ }
+ // check the associations
+ if (assocTypeQName == null)
+ {
+ assocTypeQName = originalPrimaryParentAssocRef.getTypeQName();
+ }
+ if (assocQName == null)
+ {
+ assocQName = originalPrimaryParentAssocRef.getQName();
+ }
+
+ // move the node to the target parent, which may or may not be the original parent
+ moveNode(
+ archivedNodeRef,
+ destinationParentNodeRef,
+ assocTypeQName,
+ assocQName);
+
+ // get the IDs of all the node's primary children, including its own
+ Map restoredNodesById = getNodeHierarchy(archivedNode, null);
+ // Restore the archived associations, if required
+ for (Node restoredNode : restoredNodesById.values())
+ {
+ restoreAssocs(restoredNode);
+ }
+
+ // the node reference has changed due to the store move
+ NodeRef restoredNodeRef = archivedNode.getNodeRef();
+
+ // done
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("Restored node: \n" +
+ " original noderef: " + archivedNodeRef + "\n" +
+ " restored noderef: " + restoredNodeRef + "\n" +
+ " new parent: " + destinationParentNodeRef);
+ }
+ return restoredNodeRef;
+ }
+
+ private void restoreAssocs(Node node)
+ {
+ NodeRef nodeRef = node.getNodeRef();
+ // set properties
+ Map