diff --git a/config/alfresco/avm-services-context.xml b/config/alfresco/avm-services-context.xml
index 8977a758df..a002ec43c2 100644
--- a/config/alfresco/avm-services-context.xml
+++ b/config/alfresco/avm-services-context.xml
@@ -73,6 +73,12 @@
+
+ ${orphanReaper.lockRefreshTime}
+
+
+ ${orphanReaper.lockTimeOut}
+
diff --git a/config/alfresco/core-services-context.xml b/config/alfresco/core-services-context.xml
index 2f1726e72d..9d4e82b90a 100644
--- a/config/alfresco/core-services-context.xml
+++ b/config/alfresco/core-services-context.xml
@@ -662,7 +662,9 @@
cm:likesRatingSchemeTotalcm:likesRatingSchemeCountcm:fiveStarRatingSchemeCount
- cm:fiveStarRatingSchemeTotal
+ cm:fiveStarRatingSchemeTotal
+
+ fm:commentCount
diff --git a/config/alfresco/extension/file-servers-custom.xml.sample b/config/alfresco/extension/file-servers-custom.xml.sample
deleted file mode 100644
index ae418494ee..0000000000
--- a/config/alfresco/extension/file-servers-custom.xml.sample
+++ /dev/null
@@ -1,94 +0,0 @@
-
-
-
-
-
-
-
-
-
-
- workspace://SpacesStore
- /app:company_home
-
-
-
- __Alfresco.url
- http://${localname}:8080/alfresco/
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- alfresco/desktop/Alfresco.exe
- http://${localname}:8080/alfresco/
-
-
- org.alfresco.filesys.repo.desk.CheckInOutDesktopAction
- CheckInOut
- __CheckInOut.exe
-
-
- org.alfresco.filesys.repo.desk.JavaScriptDesktopAction
- JavaScriptURL
- __ShowDetails.exe
-
- anyFiles
- copyToTarget
-
-
- org.alfresco.filesys.repo.desk.EchoDesktopAction
- Echo
- __AlfrescoEcho.exe
-
-
- org.alfresco.filesys.repo.desk.URLDesktopAction
- URL
- __AlfrescoURL.exe
-
-
- org.alfresco.filesys.repo.desk.CmdLineDesktopAction
- CmdLine
- __AlfrescoCmd.exe
-
-
- org.alfresco.filesys.repo.desk.JavaScriptDesktopAction
- JavaScript
- __AlfrescoScript.exe
-
- anyFiles, multiplePaths , allowNoParams
- confirm, copyToTarget
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/config/alfresco/extension/file-servers-custom.xml.sample2 b/config/alfresco/extension/file-servers-custom.xml.sample2
deleted file mode 100644
index 0fa89c5895..0000000000
--- a/config/alfresco/extension/file-servers-custom.xml.sample2
+++ /dev/null
@@ -1,61 +0,0 @@
-
-
-
-
-
-
-
-
-
- workspace://SpacesStore
- /app:company_home
-
-
-
- __Alfresco.url
- http://${localname}:8080/alfresco/
-
-
-
-
-
-
-
-
-
- alfresco/desktop/Alfresco.exe
- http://${localname}:8080/alfresco/
-
-
- org.alfresco.filesys.repo.desk.CheckInOutDesktopAction
- CheckInOut
- __CheckInOut.exe
-
-
- org.alfresco.filesys.repo.desk.JavaScriptDesktopAction
- JavaScriptURL
- __ShowDetails.exe
-
- anyFiles
- copyToTarget
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/config/alfresco/extension/network-protocol-context.xml.sample b/config/alfresco/extension/network-protocol-context.xml.sample
deleted file mode 100644
index d2b30f9cb4..0000000000
--- a/config/alfresco/extension/network-protocol-context.xml.sample
+++ /dev/null
@@ -1,18 +0,0 @@
-
-
-
-
-
-
-
-
-
-
- classpath:alfresco/file-servers.xml
-
- classpath:alfresco/extension/file-servers-custom.xml
-
-
-
-
-
\ No newline at end of file
diff --git a/config/alfresco/ibatis/org.hibernate.dialect.Dialect/node-common-SqlMap.xml b/config/alfresco/ibatis/org.hibernate.dialect.Dialect/node-common-SqlMap.xml
index 9289457fe4..be9979d71c 100644
--- a/config/alfresco/ibatis/org.hibernate.dialect.Dialect/node-common-SqlMap.xml
+++ b/config/alfresco/ibatis/org.hibernate.dialect.Dialect/node-common-SqlMap.xml
@@ -1069,6 +1069,12 @@
and prop4.string_value like #{pattern}
+
+ and assoc.type_qname_id in
+
+ #{item}
+
+
@@ -1097,6 +1103,12 @@
#{item}
+
+ and assoc.type_qname_id in
+
+ #{item}
+
+
and prop4.string_value like #{pattern}
diff --git a/config/alfresco/ibatis/org.hibernate.dialect.Dialect/query-test-common-SqlMap.xml b/config/alfresco/ibatis/org.hibernate.dialect.Dialect/query-test-common-SqlMap.xml
index 6c4569501f..07d81ab68b 100644
--- a/config/alfresco/ibatis/org.hibernate.dialect.Dialect/query-test-common-SqlMap.xml
+++ b/config/alfresco/ibatis/org.hibernate.dialect.Dialect/query-test-common-SqlMap.xml
@@ -55,5 +55,32 @@
JUNKED
+
+
+
+
\ No newline at end of file
diff --git a/config/alfresco/messages/content-model.properties b/config/alfresco/messages/content-model.properties
index 916ec5fa8d..454cfb9a77 100644
--- a/config/alfresco/messages/content-model.properties
+++ b/config/alfresco/messages/content-model.properties
@@ -358,3 +358,6 @@ cm_contentmodel.property.cm_isIndexed.title=Is Indexed
cm_contentmodel.property.cm_isIndexed.description=Is the node indexed and can be found via search.
cm_contentmodel.property.cm_isContentIndexed.title=Is Content Indexed
cm_contentmodel.property.cm_isContentIndexed.description=Are the node's d:content properties indexed?
+
+cm_contentmodel.property.cm_tagScopeSummary.title=Tag Summary
+cm_contentmodel.property.cm_tagScopeSummary.description=Tag Summary
\ No newline at end of file
diff --git a/config/alfresco/messages/site-model.properties b/config/alfresco/messages/site-model.properties
new file mode 100644
index 0000000000..dd39280d29
--- /dev/null
+++ b/config/alfresco/messages/site-model.properties
@@ -0,0 +1,5 @@
+# Display labels for Site Model
+st_siteModel.property.st_sitePreset.title=Site Preset
+st_siteModel.property.st_sitePreset.description=Site Preset
+st_siteModel.property.st_siteVisibility.title=Site Visibility
+st_siteModel.property.st_siteVisibility.description=Site Visibility
\ No newline at end of file
diff --git a/config/alfresco/model/systemModel.xml b/config/alfresco/model/systemModel.xml
index d67a5b859c..844f9388a3 100644
--- a/config/alfresco/model/systemModel.xml
+++ b/config/alfresco/model/systemModel.xml
@@ -96,11 +96,24 @@
Store Rootsys:container
+
+
+
+ false
+ false
+
+
+ sys:lost_found
+ false
+ false
+
+
+ sys:aspect_root
-
+
Referencesys:base
@@ -112,6 +125,11 @@
+
+ Lost+Found
+ sys:container
+
+
diff --git a/config/alfresco/mt/mt-base-context.xml b/config/alfresco/mt/mt-base-context.xml
index 44f6268918..ef86911e60 100644
--- a/config/alfresco/mt/mt-base-context.xml
+++ b/config/alfresco/mt/mt-base-context.xml
@@ -33,6 +33,7 @@
${alfresco_user_store.adminusername}
+
diff --git a/config/alfresco/repository.properties b/config/alfresco/repository.properties
index d2d4379566..9e4b297610 100644
--- a/config/alfresco/repository.properties
+++ b/config/alfresco/repository.properties
@@ -789,6 +789,10 @@ deployment.filesystem.default.rootdir=./www
deployment.filesystem.default.name=filesystem
deployment.filesystem.default.metadatadir=${deployment.filesystem.metadatadir}/default
+# OrphanReaper
+orphanReaper.lockRefreshTime=60000
+orphanReaper.lockTimeOut=3600000
+
#
# Encryption properties
#
diff --git a/config/alfresco/site-services-context.xml b/config/alfresco/site-services-context.xml
index c62b35306f..023c023a1b 100644
--- a/config/alfresco/site-services-context.xml
+++ b/config/alfresco/site-services-context.xml
@@ -9,6 +9,12 @@
alfresco/model/siteModel.xml
+
+
+
+ alfresco/messages/site-model
+
+
diff --git a/config/alfresco/subsystems/Authentication/kerberos/kerberos-authentication-context.xml b/config/alfresco/subsystems/Authentication/kerberos/kerberos-authentication-context.xml
index 2ba2255825..a8aac32ac8 100644
--- a/config/alfresco/subsystems/Authentication/kerberos/kerberos-authentication-context.xml
+++ b/config/alfresco/subsystems/Authentication/kerberos/kerberos-authentication-context.xml
@@ -76,6 +76,9 @@
true
+
+ ${kerberos.authentication.cifs.enableTicketCracking}
+
\ No newline at end of file
diff --git a/config/alfresco/subsystems/Authentication/kerberos/kerberos-authentication.properties b/config/alfresco/subsystems/Authentication/kerberos/kerberos-authentication.properties
index 00198ffeb0..423defd9a3 100644
--- a/config/alfresco/subsystems/Authentication/kerberos/kerberos-authentication.properties
+++ b/config/alfresco/subsystems/Authentication/kerberos/kerberos-authentication.properties
@@ -3,4 +3,5 @@ kerberos.authentication.user.configEntryName=Alfresco
kerberos.authentication.defaultAdministratorUserNames=
kerberos.authentication.cifs.configEntryName=AlfrescoCIFS
kerberos.authentication.cifs.password=secret
+kerberos.authentication.cifs.enableTicketCracking=false
kerberos.authentication.authenticateCIFS=true
\ No newline at end of file
diff --git a/config/alfresco/subsystems/fileServers/default/network-protocol-context.xml b/config/alfresco/subsystems/fileServers/default/network-protocol-context.xml
index 6f54d94a55..36577b79a3 100644
--- a/config/alfresco/subsystems/fileServers/default/network-protocol-context.xml
+++ b/config/alfresco/subsystems/fileServers/default/network-protocol-context.xml
@@ -144,6 +144,12 @@
+
+
+ ^\._.*
+ 30000
+ HIGH
+ [0-9A-F]{8}+$
diff --git a/config/alfresco/thumbnail-service-context.xml b/config/alfresco/thumbnail-service-context.xml
index 6531b08cab..a6dc5246d6 100644
--- a/config/alfresco/thumbnail-service-context.xml
+++ b/config/alfresco/thumbnail-service-context.xml
@@ -58,6 +58,7 @@
+
diff --git a/source/java/org/alfresco/cmis/renditions/CMISRenditionServiceTest.java b/source/java/org/alfresco/cmis/renditions/CMISRenditionServiceTest.java
index 973514066d..a609b553c1 100644
--- a/source/java/org/alfresco/cmis/renditions/CMISRenditionServiceTest.java
+++ b/source/java/org/alfresco/cmis/renditions/CMISRenditionServiceTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005-2010 Alfresco Software Limited.
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -199,6 +199,7 @@ public class CMISRenditionServiceTest extends BaseCMISTest
contentWriter.setLocale(Locale.ENGLISH);
contentWriter.putContent(documentContent);
ContentReader contentReader = fileFolderService.getReader(textDocument);
+ // contentReader will not be null as an exception will have been thrown if there was a problem
NodeRef document = fileFolderService.create(rootNodeRef, documentName, ContentModel.TYPE_CONTENT).getNodeRef();
contentWriter = fileFolderService.getWriter(document);
diff --git a/source/java/org/alfresco/filesys/auth/cifs/EnterpriseCifsAuthenticator.java b/source/java/org/alfresco/filesys/auth/cifs/EnterpriseCifsAuthenticator.java
index 362246174f..0c2c9f4196 100644
--- a/source/java/org/alfresco/filesys/auth/cifs/EnterpriseCifsAuthenticator.java
+++ b/source/java/org/alfresco/filesys/auth/cifs/EnterpriseCifsAuthenticator.java
@@ -137,9 +137,17 @@ public class EnterpriseCifsAuthenticator extends CifsAuthenticatorBase implement
private byte[] m_negTokenInit;
private String m_mecListMIC;
+ // Enable Kerberos debug output
+
private boolean kerberosDebug;
+ // Disable NTLM logons, only Kerberos logons allowed
+
private boolean disableNTLM;
+
+ // Enable ticket cracking code, required for Java5 JVMs
+
+ private boolean m_enableTicketCracking;
/**
* Class constructor
@@ -202,6 +210,20 @@ public class EnterpriseCifsAuthenticator extends CifsAuthenticatorBase implement
this.m_acceptNTLMv1 = !disallowNTLMv1;
}
+ /**
+ * Enable Kerbeors ticket cracking code that is required for Java5
+ *
+ * @param enaTktCracking boolean
+ */
+ public void setEnableTicketCracking( boolean enaTktCracking) {
+ m_enableTicketCracking = enaTktCracking;
+
+ // Debug
+
+ if ( logger.isInfoEnabled() && enaTktCracking)
+ logger.info("CIFS Kerberos authentication, ticket cracking enabled (for mutual authentication)");
+ }
+
/**
* Initialize the authenticator (via the config service)
*
@@ -1372,7 +1394,7 @@ public class EnterpriseCifsAuthenticator extends CifsAuthenticatorBase implement
KrbAuthContext krbAuthCtx = null;
- if ( krbApReq.hasMutualAuthentication())
+ if ( krbApReq.hasMutualAuthentication() && m_enableTicketCracking == true)
{
// Allocate the Kerberos authentication and parse the AP-REQ
diff --git a/source/java/org/alfresco/filesys/repo/CommandExecutorImpl.java b/source/java/org/alfresco/filesys/repo/CommandExecutorImpl.java
index 241bf6e91c..55207718de 100644
--- a/source/java/org/alfresco/filesys/repo/CommandExecutorImpl.java
+++ b/source/java/org/alfresco/filesys/repo/CommandExecutorImpl.java
@@ -15,6 +15,7 @@ import org.alfresco.filesys.repo.rules.commands.CopyContentCommand;
import org.alfresco.filesys.repo.rules.commands.CreateFileCommand;
import org.alfresco.filesys.repo.rules.commands.DeleteFileCommand;
import org.alfresco.filesys.repo.rules.commands.DoNothingCommand;
+import org.alfresco.filesys.repo.rules.commands.MoveFileCommand;
import org.alfresco.filesys.repo.rules.commands.OpenFileCommand;
import org.alfresco.filesys.repo.rules.commands.ReduceQuotaCommand;
import org.alfresco.filesys.repo.rules.commands.RemoveNoContentFileOnError;
@@ -248,6 +249,12 @@ public class CommandExecutorImpl implements CommandExecutor
RenameFileCommand rename = (RenameFileCommand)command;
diskInterface.renameFile(sess, tree, rename.getFromPath(), rename.getToPath());
}
+ else if(command instanceof MoveFileCommand)
+ {
+ logger.debug("move command");
+ MoveFileCommand rename = (MoveFileCommand)command;
+ diskInterface.renameFile(sess, tree, rename.getFromPath(), rename.getToPath());
+ }
else if(command instanceof CopyContentCommand)
{
if(logger.isDebugEnabled())
diff --git a/source/java/org/alfresco/filesys/repo/ContentDiskDriver2.java b/source/java/org/alfresco/filesys/repo/ContentDiskDriver2.java
index c5f3501629..41b86929ec 100644
--- a/source/java/org/alfresco/filesys/repo/ContentDiskDriver2.java
+++ b/source/java/org/alfresco/filesys/repo/ContentDiskDriver2.java
@@ -1489,7 +1489,7 @@ public class ContentDiskDriver2 extends AlfrescoDiskDriver implements ExtendedD
// Check for delete permission
if ( permissionService.hasPermission(nodeRef, PermissionService.DELETE) == AccessStatus.DENIED)
{
- throw new AccessDeniedException("No delete access to :" + name);
+ throw new PermissionDeniedException("No delete access to :" + name);
}
// Check if the node is locked
diff --git a/source/java/org/alfresco/filesys/repo/ContentDiskDriverTest.java b/source/java/org/alfresco/filesys/repo/ContentDiskDriverTest.java
index 92fd2039c7..e820644f99 100644
--- a/source/java/org/alfresco/filesys/repo/ContentDiskDriverTest.java
+++ b/source/java/org/alfresco/filesys/repo/ContentDiskDriverTest.java
@@ -4341,6 +4341,196 @@ public class ContentDiskDriverTest extends TestCase
} // test set modified scenario
+ /**
+ * This test tries to simulate the cifs shuffling that is done
+ * from Save from Mac Lion by TextEdit
+ *
+ * a) Lock file created. (._test.txt)
+ * b) Temp file created in temporary folder (test.txt)
+ * c) Target file deleted
+ * d) Temp file renamed to target file.
+ * e) Lock file deleted
+ *
+ */
+ public void testScenarioMacLionTextEdit() throws Exception
+ {
+ logger.debug("testScenarioLionTextEdit");
+ final String FILE_NAME = "test.txt";
+ final String LOCK_FILE_NAME = "._test.txt";
+ final String TEMP_FILE_NAME = "test.txt";
+
+ final String UPDATED_TEXT = "Mac Lion Text Updated Content";
+
+ class TestContext
+ {
+ NetworkFile lockFileHandle;
+ NetworkFile firstFileHandle;
+ NetworkFile tempFileHandle;
+ NodeRef testNodeRef; // node ref of test.doc
+ };
+
+ final TestContext testContext = new TestContext();
+
+ final String TEST_ROOT_DIR = "\\ContentDiskDriverTest";
+ final String TEST_DIR = "\\ContentDiskDriverTest\\testScenarioLionTextEdit";
+ final String TEST_TEMP_DIR = "\\ContentDiskDriverTest\\testScenarioLionTextEdit\\.Temporary Items";
+
+ ServerConfiguration scfg = new ServerConfiguration("testServer");
+ TestServer testServer = new TestServer("testServer", scfg);
+ final SrvSession testSession = new TestSrvSession(666, testServer, "test", "remoteName");
+ DiskSharedDevice share = getDiskSharedDevice();
+ final TreeConnection testConnection = testServer.getTreeConnection(share);
+ final RetryingTransactionHelper tran = transactionService.getRetryingTransactionHelper();
+
+ /**
+ * Create a file in the test directory
+ */
+ RetryingTransactionCallback createFileCB = new RetryingTransactionCallback() {
+
+ @Override
+ public Void execute() throws Throwable
+ {
+ /**
+ * Create the test directory we are going to use
+ */
+ FileOpenParams createRootDirParams = new FileOpenParams(TEST_ROOT_DIR, 0, AccessMode.ReadWrite, FileAttribute.NTNormal, 0);
+ FileOpenParams createDirParams = new FileOpenParams(TEST_DIR, 0, AccessMode.ReadWrite, FileAttribute.NTNormal, 0);
+ FileOpenParams createTempDirParams = new FileOpenParams(TEST_TEMP_DIR, 0, AccessMode.ReadWrite, FileAttribute.NTNormal, 0);
+ driver.createDirectory(testSession, testConnection, createRootDirParams);
+ driver.createDirectory(testSession, testConnection, createDirParams);
+ driver.createDirectory(testSession, testConnection, createTempDirParams);
+
+ /**
+ * Create the file we are going to use
+ */
+ FileOpenParams createFileParams = new FileOpenParams(TEST_DIR + "\\" + FILE_NAME, 0, AccessMode.ReadWrite, FileAttribute.NTNormal, 0);
+ testContext.firstFileHandle = driver.createFile(testSession, testConnection, createFileParams);
+ assertNotNull(testContext.firstFileHandle);
+
+ String testContent = "Mac Lion Text";
+ byte[] testContentBytes = testContent.getBytes();
+
+ driver.writeFile(testSession, testConnection, testContext.firstFileHandle, testContentBytes, 0, testContentBytes.length, 0);
+ driver.closeFile(testSession, testConnection, testContext.firstFileHandle);
+
+ /**
+ * Create the temp file we are going to use
+ */
+ FileOpenParams createTempFileParams = new FileOpenParams(TEST_TEMP_DIR + "\\" + FILE_NAME, 0, AccessMode.ReadWrite, FileAttribute.NTNormal, 0);
+ testContext.tempFileHandle = driver.createFile(testSession, testConnection, createTempFileParams);
+ assertNotNull(testContext.tempFileHandle);
+
+ testContent = UPDATED_TEXT;
+ testContentBytes = testContent.getBytes();
+ driver.writeFile(testSession, testConnection, testContext.tempFileHandle, testContentBytes, 0, testContentBytes.length, 0);
+ driver.closeFile(testSession, testConnection, testContext.tempFileHandle);
+
+ return null;
+ }
+ };
+ tran.doInTransaction(createFileCB, false, true);
+
+ /**
+ * a) create the lock file
+ */
+ RetryingTransactionCallback createLockFileCB = new RetryingTransactionCallback() {
+
+ @Override
+ public Void execute() throws Throwable
+ {
+ /**
+ * Create the lock file we are going to use
+ */
+ FileOpenParams createFileParams = new FileOpenParams(TEST_DIR + "\\" + LOCK_FILE_NAME, 0, AccessMode.ReadWrite, FileAttribute.NTNormal, 0);
+ testContext.lockFileHandle = driver.createFile(testSession, testConnection, createFileParams);
+ assertNotNull(testContext.lockFileHandle);
+ testContext.lockFileHandle.closeFile();
+
+ /**
+ * Also add versionable to target file
+ */
+ testContext.testNodeRef = getNodeForPath(testConnection, TEST_DIR + "\\" + FILE_NAME);
+ nodeService.addAspect(testContext.testNodeRef, ContentModel.ASPECT_VERSIONABLE, null);
+
+
+ return null;
+ }
+ };
+ tran.doInTransaction(createLockFileCB, false, true);
+
+ /**
+ * b) Delete the target file
+ */
+ RetryingTransactionCallback deleteTargetFileCB = new RetryingTransactionCallback() {
+
+ @Override
+ public Void execute() throws Throwable
+ {
+ driver.deleteFile(testSession, testConnection, TEST_DIR + "\\" + FILE_NAME);
+ return null;
+ }
+ };
+ tran.doInTransaction(deleteTargetFileCB, false, true);
+
+ /**
+ * c) Move the temp file into place
+ */
+ RetryingTransactionCallback moveTempFileCB = new RetryingTransactionCallback() {
+
+ @Override
+ public Void execute() throws Throwable
+ {
+ driver.renameFile(testSession, testConnection, TEST_TEMP_DIR + "\\" + TEMP_FILE_NAME, TEST_DIR + "\\" + FILE_NAME);
+ return null;
+ }
+ };
+ tran.doInTransaction(moveTempFileCB, false, true);
+
+
+ /**
+ * d) Delete Lock File
+ */
+ RetryingTransactionCallback deleteLockFileCB = new RetryingTransactionCallback() {
+
+ @Override
+ public Void execute() throws Throwable
+ {
+ driver.deleteFile(testSession, testConnection, TEST_DIR + "\\" + LOCK_FILE_NAME);
+
+ return null;
+ }
+ };
+
+ tran.doInTransaction(deleteLockFileCB, false, true);
+
+ RetryingTransactionCallback validateCB = new RetryingTransactionCallback() {
+
+ @Override
+ public Void execute() throws Throwable
+ {
+
+ NodeRef shuffledNodeRef = getNodeForPath(testConnection, TEST_DIR + "\\" + FILE_NAME);
+
+ assertEquals("shuffledNode ref is different", shuffledNodeRef, testContext.testNodeRef);
+ assertTrue("", nodeService.hasAspect(shuffledNodeRef, ContentModel.ASPECT_VERSIONABLE));
+
+ ContentReader reader = contentService.getReader(shuffledNodeRef, ContentModel.PROP_CONTENT);
+ assertNotNull("Reader is null", reader);
+ String s = reader.getContentString();
+ assertEquals("content not written", UPDATED_TEXT, s);
+
+
+ return null;
+ }
+ };
+
+ tran.doInTransaction(validateCB, false, true);
+
+ } // testScenarioLionTextEdit
+
+
+
+
/**
* Test server
*/
diff --git a/source/java/org/alfresco/filesys/repo/NonTransactionalRuleContentDiskDriver.java b/source/java/org/alfresco/filesys/repo/NonTransactionalRuleContentDiskDriver.java
index ae4cb1a3c4..3a200a445f 100644
--- a/source/java/org/alfresco/filesys/repo/NonTransactionalRuleContentDiskDriver.java
+++ b/source/java/org/alfresco/filesys/repo/NonTransactionalRuleContentDiskDriver.java
@@ -34,6 +34,7 @@ import org.alfresco.filesys.repo.rules.RuleEvaluator;
import org.alfresco.filesys.repo.rules.operations.CloseFileOperation;
import org.alfresco.filesys.repo.rules.operations.CreateFileOperation;
import org.alfresco.filesys.repo.rules.operations.DeleteFileOperation;
+import org.alfresco.filesys.repo.rules.operations.MoveFileOperation;
import org.alfresco.filesys.repo.rules.operations.OpenFileOperation;
import org.alfresco.filesys.repo.rules.operations.RenameFileOperation;
import org.alfresco.jlan.server.SrvSession;
@@ -430,17 +431,24 @@ public class NonTransactionalRuleContentDiskDriver implements ExtendedDiskInterf
}
else
{
- logger.debug("move - call renameFile directly");
-// // TODO Use old interface for rename/move until think
-// // through move operation and how it applies to the evaluator contexts
-// // plural since there will be two contexts.
-// logger.debug("move");
-// Operation o = new MoveFileOperation(oldFile, newFile);
-// Command c = ruleEvaluator.evaluate(ctx, o);
-//
-// commandExecutor.execute(sess, tree, c);
+ logger.debug("moveFileCommand - move between folders");
+
+ Operation o = new MoveFileOperation(oldFile, newFile, oldPath, newPath, rootNode);
- diskInterface.renameFile(sess, tree, oldPath, newPath);
+ /*
+ * Note: At the moment we only have move scenarios for the destination folder - so
+ * we only need to evaluate against a single (destination) context/folder.
+ * This will require re-design as and when we need to have scenarios for the source/folder
+ */
+
+ //EvaluatorContext ctx1 = getEvaluatorContext(driverState, oldFolder);
+ EvaluatorContext ctx2 = getEvaluatorContext(driverState, newFolder);
+
+ Command c = ruleEvaluator.evaluate(ctx2, o);
+
+ commandExecutor.execute(sess, tree, c);
+
+ // diskInterface.renameFile(sess, tree, oldPath, newPath);
}
diff --git a/source/java/org/alfresco/filesys/repo/TempNetworkFile.java b/source/java/org/alfresco/filesys/repo/TempNetworkFile.java
index 135a9a4e71..62d4192f21 100644
--- a/source/java/org/alfresco/filesys/repo/TempNetworkFile.java
+++ b/source/java/org/alfresco/filesys/repo/TempNetworkFile.java
@@ -4,6 +4,7 @@ import java.io.File;
import java.io.IOException;
import java.io.Reader;
+import org.alfresco.jlan.server.filesys.FileAttribute;
import org.alfresco.jlan.server.filesys.cache.FileState;
import org.alfresco.jlan.server.filesys.cache.NetworkFileStateInterface;
import org.alfresco.jlan.smb.server.disk.JavaNetworkFile;
@@ -27,6 +28,7 @@ public class TempNetworkFile extends JavaNetworkFile implements NetworkFileState
{
super(file, netPath);
setFullName(netPath);
+ setAttributes(FileAttribute.NTNormal);
}
/**
@@ -39,6 +41,7 @@ public class TempNetworkFile extends JavaNetworkFile implements NetworkFileState
{
super(file, netPath);
setFullName(netPath);
+ setAttributes(FileAttribute.NTNormal);
}
/**
diff --git a/source/java/org/alfresco/filesys/repo/rules/RuleEvaluator.java b/source/java/org/alfresco/filesys/repo/rules/RuleEvaluator.java
index d420ee37a5..0520179c23 100644
--- a/source/java/org/alfresco/filesys/repo/rules/RuleEvaluator.java
+++ b/source/java/org/alfresco/filesys/repo/rules/RuleEvaluator.java
@@ -34,8 +34,12 @@ public interface RuleEvaluator
public EvaluatorContext createContext();
/**
- * Evaluate the scenarios against the current operation
- * @param Command the command to fulfill the operation
+ * Evaluate the scenarios contained within the context against the current operation
+ * @param context - the context to evaluate the operation
+ * @param operation - the operation to be evaluated.
+ * @return Command the command to fulfil the operation
*/
- public Command evaluate(EvaluatorContext context, Operation operation);
+ public Command evaluate(EvaluatorContext context, Operation operation);
+
+
}
diff --git a/source/java/org/alfresco/filesys/repo/rules/ScenarioLockedDeleteShuffle.java b/source/java/org/alfresco/filesys/repo/rules/ScenarioLockedDeleteShuffle.java
new file mode 100644
index 0000000000..d9568aa8e7
--- /dev/null
+++ b/source/java/org/alfresco/filesys/repo/rules/ScenarioLockedDeleteShuffle.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright (C) 2005-2010 Alfresco Software Limited.
+ *
+ * This file is part of Alfresco
+ *
+ * Alfresco is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Alfresco is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with Alfresco. If not, see .
+ */
+package org.alfresco.filesys.repo.rules;
+
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.alfresco.filesys.repo.rules.ScenarioInstance.Ranking;
+import org.alfresco.filesys.repo.rules.operations.CreateFileOperation;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/**
+ * A locked delete shuffle
+ */
+public class ScenarioLockedDeleteShuffle implements Scenario
+{
+ private static Log logger = LogFactory.getLog(ScenarioLockedDeleteShuffle.class);
+
+ /**
+ * The regex pattern of a create that will trigger a new instance of
+ * the scenario.
+ */
+ private Pattern pattern;
+ private String strPattern;
+
+
+ private long timeout = 30000;
+
+ private Ranking ranking = Ranking.HIGH;
+
+ @Override
+ public ScenarioInstance createInstance(final List currentInstances, Operation operation)
+ {
+ /**
+ * This scenario is triggered by a create of a file matching
+ * the pattern
+ */
+ if(operation instanceof CreateFileOperation)
+ {
+ CreateFileOperation c = (CreateFileOperation)operation;
+
+ Matcher m = pattern.matcher(c.getName());
+ if(m.matches())
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("New Scenario Locked Delete Shuffle Instance pattern:" + strPattern);
+ }
+
+ ScenarioLockedDeleteShuffleInstance instance = new ScenarioLockedDeleteShuffleInstance() ;
+ instance.setTimeout(timeout);
+ instance.setRanking(ranking);
+ return instance;
+ }
+ }
+
+ // No not interested.
+ return null;
+
+ }
+
+ public void setPattern(String pattern)
+ {
+ this.pattern = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE);
+ this.strPattern = pattern;
+ }
+
+ public String getPattern()
+ {
+ return this.strPattern;
+ }
+
+ public void setTimeout(long timeout)
+ {
+ this.timeout = timeout;
+ }
+
+ public long getTimeout()
+ {
+ return timeout;
+ }
+
+ public void setRanking(Ranking ranking)
+ {
+ this.ranking = ranking;
+ }
+
+ public Ranking getRanking()
+ {
+ return ranking;
+ }
+}
diff --git a/source/java/org/alfresco/filesys/repo/rules/ScenarioLockedDeleteShuffleInstance.java b/source/java/org/alfresco/filesys/repo/rules/ScenarioLockedDeleteShuffleInstance.java
new file mode 100644
index 0000000000..d83ad8d0c3
--- /dev/null
+++ b/source/java/org/alfresco/filesys/repo/rules/ScenarioLockedDeleteShuffleInstance.java
@@ -0,0 +1,283 @@
+/*
+ * Copyright (C) 2005-2010 Alfresco Software Limited.
+ *
+ * This file is part of Alfresco
+ *
+ * Alfresco is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Alfresco is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with Alfresco. If not, see .
+ */
+package org.alfresco.filesys.repo.rules;
+
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.alfresco.filesys.repo.rules.commands.CompoundCommand;
+import org.alfresco.filesys.repo.rules.commands.CopyContentCommand;
+import org.alfresco.filesys.repo.rules.commands.DeleteFileCommand;
+import org.alfresco.filesys.repo.rules.commands.RenameFileCommand;
+import org.alfresco.filesys.repo.rules.operations.CreateFileOperation;
+import org.alfresco.filesys.repo.rules.operations.DeleteFileOperation;
+import org.alfresco.filesys.repo.rules.operations.MoveFileOperation;
+import org.alfresco.filesys.repo.rules.operations.RenameFileOperation;
+import org.alfresco.jlan.server.filesys.FileName;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/**
+ * This is an instance of a "locked delete shuffle" triggered by a create of a
+ * file matching a specified pattern.
+ *
+ *
First implemented for TextEdit from MacOS Lion
+ *
+ *
+ * Sequence of operations.
+ * a) Lock file created. Typically with an obscure name.
+ * b) Temp file created in temporary folder
+ * c) Target file deleted
+ * d) Temp file renamed to target file.
+ * e) Lock file deleted
+ *
+ * If this filter is active then this is what happens.
+ * a) Lock file created. Lock file created (X).
+ * b) Temp file created - in another folder.
+ * c) Existing file deleted. Scenario kicks in to rename rather than delete.
+ * d) New file moved into place (X to Y). Scenario kicks in
+ * 1) renames file from step c
+ * 2) copies content from temp file to target file
+ * 3) deletes temp file.
+ * e) Lock file deleted.
+ */
+public class ScenarioLockedDeleteShuffleInstance implements ScenarioInstance
+{
+ private static Log logger = LogFactory.getLog(ScenarioLockedDeleteShuffleInstance.class);
+
+ enum InternalState
+ {
+ NONE,
+ LOCKED, // Lock file has been created and not deleted
+ DELETE_SUBSTITUTED, // Scenario has intervened and renamed rather than delete
+ MOVED
+ }
+
+ InternalState internalState = InternalState.NONE;
+
+ private Date startTime = new Date();
+
+ private String lockName;
+
+ private Ranking ranking;
+
+
+ /**
+ * Timeout in ms. Default 30 seconds.
+ */
+ private long timeout = 30000;
+
+ private boolean isComplete;
+
+ /**
+ * Keep track of deletes that we substitute with a rename
+ * could be more than one if scenarios overlap
+ *
+ * From, TempFileName
+ */
+ private Map deletes = new HashMap();
+
+ /**
+ * Evaluate the next operation
+ * @param operation
+ */
+ public Command evaluate(Operation operation)
+ {
+
+ /**
+ * Anti-pattern for all states - delete the lock file
+ */
+ if(lockName != null)
+ {
+ if(operation instanceof DeleteFileOperation)
+ {
+ DeleteFileOperation d = (DeleteFileOperation)operation;
+ if(d.getName().equals(lockName))
+ {
+ logger.debug("Anti-pattern : Lock file deleted");
+ isComplete = true;
+ return null;
+ }
+ }
+ }
+
+ /**
+ * Anti-pattern : timeout
+ */
+ Date now = new Date();
+ if(now.getTime() > startTime.getTime() + getTimeout())
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("Instance timed out");
+ }
+ }
+
+ switch (internalState)
+ {
+ case NONE:
+ // Looking for a create transition
+ if(operation instanceof CreateFileOperation)
+ {
+ CreateFileOperation c = (CreateFileOperation)operation;
+ this.lockName = c.getName();
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("entering LOCKED state: " + lockName);
+ }
+ internalState = InternalState.LOCKED;
+ return null;
+ }
+ else
+ {
+ // anything else bomb out
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("State error, expected a CREATE");
+ }
+ isComplete = true;
+ }
+ break;
+
+ case LOCKED:
+
+ /**
+ * Looking for target file being deleted
+ *
+ * Need to intervene and replace delete with a rename to temp file.
+ */
+ if(operation instanceof DeleteFileOperation)
+ {
+ DeleteFileOperation d = (DeleteFileOperation)operation;
+
+
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("entering DELETE_SUBSTITUTED state: " + lockName);
+ }
+
+ String tempName = ".shuffle" + d.getName();
+
+ deletes.put(d.getName(), tempName);
+
+ String[] paths = FileName.splitPath(d.getPath());
+ String currentFolder = paths[0];
+
+ RenameFileCommand r1 = new RenameFileCommand(d.getName(), tempName, d.getRootNodeRef(), d.getPath(), currentFolder + "\\" + tempName);
+
+ internalState = InternalState.DELETE_SUBSTITUTED;
+
+ return r1;
+
+ }
+
+ case DELETE_SUBSTITUTED:
+
+ /**
+ * Looking for a move operation of the deleted file
+ */
+ if(operation instanceof MoveFileOperation)
+ {
+ MoveFileOperation m = (MoveFileOperation)operation;
+
+ String targetFile = m.getTo();
+
+ if(deletes.containsKey(targetFile))
+ {
+ String tempName = deletes.get(targetFile);
+
+ String[] paths = FileName.splitPath(m.getToPath());
+ String currentFolder = paths[0];
+
+ /**
+ * This is where the scenario fires.
+ * a) Rename the temp file back to the targetFile
+ * b) Copy content from moved file
+ * c) Delete rather than move file
+ */
+ ArrayList commands = new ArrayList();
+
+ RenameFileCommand r1 = new RenameFileCommand(tempName, targetFile, m.getRootNodeRef(), currentFolder + "\\" + tempName, m.getToPath());
+
+ CopyContentCommand copyContent = new CopyContentCommand(m.getFrom(), targetFile, m.getRootNodeRef(), m.getFromPath(), m.getToPath());
+
+ DeleteFileCommand d1 = new DeleteFileCommand(m.getFrom(), m.getRootNodeRef(), m.getFromPath());
+
+ commands.add(r1);
+ commands.add(copyContent);
+ commands.add(d1);
+
+ logger.debug("Scenario complete");
+ isComplete = true;
+
+ return new CompoundCommand(commands);
+
+ }
+
+ //TODO - Need to consider error cases and "overlap"
+
+// if(logger.isDebugEnabled())
+// {
+// logger.debug("entering MOVED state: " + lockName);
+// }
+// internalState = InternalState.MOVED;
+ }
+
+
+ case MOVED:
+
+ }
+
+ return null;
+ }
+
+ @Override
+ public boolean isComplete()
+ {
+ return isComplete;
+ }
+
+ @Override
+ public Ranking getRanking()
+ {
+ return ranking;
+ }
+
+ public void setRanking(Ranking ranking)
+ {
+ this.ranking = ranking;
+ }
+
+ public String toString()
+ {
+ return "ScenarioLockedDeleteShuffleInstance:" + lockName;
+ }
+
+ public void setTimeout(long timeout)
+ {
+ this.timeout = timeout;
+ }
+
+ public long getTimeout()
+ {
+ return timeout;
+ }
+}
diff --git a/source/java/org/alfresco/filesys/repo/rules/ScenarioSimpleNonBufferedInstance.java b/source/java/org/alfresco/filesys/repo/rules/ScenarioSimpleNonBufferedInstance.java
index e5b43068af..7fcbc6cfe8 100644
--- a/source/java/org/alfresco/filesys/repo/rules/ScenarioSimpleNonBufferedInstance.java
+++ b/source/java/org/alfresco/filesys/repo/rules/ScenarioSimpleNonBufferedInstance.java
@@ -71,7 +71,7 @@ public class ScenarioSimpleNonBufferedInstance implements ScenarioInstance
else if(operation instanceof MoveFileOperation)
{
MoveFileOperation m = (MoveFileOperation)operation;
- return new MoveFileCommand(m.getFrom(), m.getTo());
+ return new MoveFileCommand(m.getFrom(), m.getTo(), m.getRootNodeRef(), m.getFromPath(), m.getToPath());
}
else if(operation instanceof OpenFileOperation)
{
diff --git a/source/java/org/alfresco/filesys/repo/rules/commands/MoveFileCommand.java b/source/java/org/alfresco/filesys/repo/rules/commands/MoveFileCommand.java
index 64ab21ff53..6d2d69dd99 100644
--- a/source/java/org/alfresco/filesys/repo/rules/commands/MoveFileCommand.java
+++ b/source/java/org/alfresco/filesys/repo/rules/commands/MoveFileCommand.java
@@ -22,20 +22,24 @@ import java.util.List;
import org.alfresco.filesys.repo.rules.Command;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport.TxnReadState;
+import org.alfresco.service.cmr.repository.NodeRef;
-/**
- * Rename command
- */
public class MoveFileCommand implements Command
{
private String from;
private String to;
+ private NodeRef rootNode;
+ private String fromPath;
+ private String toPath;
- public MoveFileCommand(String from, String to)
+ public MoveFileCommand(String from, String to, NodeRef rootNode, String fromPath, String toPath)
{
this.from = from;
this.to = to;
+ this.rootNode = rootNode;
+ this.fromPath = fromPath;
+ this.toPath = toPath;
}
@@ -55,4 +59,40 @@ public class MoveFileCommand implements Command
{
return TxnReadState.TXN_READ_WRITE;
}
+
+
+ public void setRootNode(NodeRef rootNode)
+ {
+ this.rootNode = rootNode;
+ }
+
+
+ public NodeRef getRootNode()
+ {
+ return rootNode;
+ }
+
+
+ public void setFromPath(String fromPath)
+ {
+ this.fromPath = fromPath;
+ }
+
+
+ public String getFromPath()
+ {
+ return fromPath;
+ }
+
+
+ public void setToPath(String toPath)
+ {
+ this.toPath = toPath;
+ }
+
+
+ public String getToPath()
+ {
+ return toPath;
+ }
}
diff --git a/source/java/org/alfresco/filesys/repo/rules/operations/DeleteFileOperation.java b/source/java/org/alfresco/filesys/repo/rules/operations/DeleteFileOperation.java
index 5aa187fa98..65eadf26ae 100644
--- a/source/java/org/alfresco/filesys/repo/rules/operations/DeleteFileOperation.java
+++ b/source/java/org/alfresco/filesys/repo/rules/operations/DeleteFileOperation.java
@@ -28,6 +28,12 @@ public class DeleteFileOperation implements Operation
private NodeRef rootNodeRef;
private String path;
+ /**
+ * Delete File Operation
+ * @param name of file
+ * @param rootNodeRef root node ref
+ * @param path path + name of file to delete
+ */
public DeleteFileOperation(String name, NodeRef rootNodeRef, String path)
{
this.name = name;
diff --git a/source/java/org/alfresco/filesys/repo/rules/operations/MoveFileOperation.java b/source/java/org/alfresco/filesys/repo/rules/operations/MoveFileOperation.java
index 55da175e80..637daea827 100644
--- a/source/java/org/alfresco/filesys/repo/rules/operations/MoveFileOperation.java
+++ b/source/java/org/alfresco/filesys/repo/rules/operations/MoveFileOperation.java
@@ -19,16 +19,34 @@
package org.alfresco.filesys.repo.rules.operations;
import org.alfresco.filesys.repo.rules.Operation;
+import org.alfresco.service.cmr.repository.NodeRef;
+/**
+ * Rename a file within the same directory
+ */
public class MoveFileOperation implements Operation
{
private String from;
private String to;
+ private String fromPath;
+ private String toPath;
+ NodeRef rootNodeRef;
- public MoveFileOperation(String from, String to)
+ /**
+ *
+ * @param from name of file from
+ * @param to name of file to
+ * @param fromPath full path of from
+ * @param toPath full path of to
+ * @param rootNodeRef
+ */
+ public MoveFileOperation(String from, String to, String fromPath, String toPath, NodeRef rootNodeRef)
{
this.from = from;
this.to = to;
+ this.fromPath = fromPath;
+ this.toPath = toPath;
+ this.rootNodeRef = rootNodeRef;
}
@@ -42,29 +60,44 @@ public class MoveFileOperation implements Operation
return to;
}
+ public String getToPath()
+ {
+ return toPath;
+ }
+
+ public String getFromPath()
+ {
+ return fromPath;
+ }
+
+ public NodeRef getRootNodeRef()
+ {
+ return rootNodeRef;
+ }
+
public String toString()
{
- return "MoveFileOperation: from " + from + " to "+ to;
+ return "MoveFileOperation: from " + fromPath + " to "+ toPath;
}
public int hashCode()
{
- return from.hashCode();
+ return fromPath.hashCode();
}
public boolean equals(Object o)
{
if(o instanceof MoveFileOperation)
{
- MoveFileOperation r = (MoveFileOperation)o;
- if(from.equals(r.getFrom()) && to.equals(r.getTo()))
+ RenameFileOperation r = (RenameFileOperation)o;
+ if(fromPath.equals(r.getFromPath()) && toPath.equals(r.getToPath()))
{
return true;
}
}
return false;
}
-
-
-
}
+
+
+
diff --git a/source/java/org/alfresco/repo/avm/OrphanReaper.java b/source/java/org/alfresco/repo/avm/OrphanReaper.java
index 7f6f45c2dc..725754c46a 100644
--- a/source/java/org/alfresco/repo/avm/OrphanReaper.java
+++ b/source/java/org/alfresco/repo/avm/OrphanReaper.java
@@ -20,11 +20,13 @@ package org.alfresco.repo.avm;
import java.util.LinkedList;
import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
import org.alfresco.repo.domain.avm.AVMHistoryLinkEntity;
import org.alfresco.repo.domain.avm.AVMMergeLinkEntity;
import org.alfresco.repo.domain.permissions.Acl;
import org.alfresco.repo.lock.JobLockService;
+import org.alfresco.repo.lock.JobLockService.JobLockRefreshCallback;
import org.alfresco.repo.lock.LockAcquisitionException;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.repository.ContentData;
@@ -46,7 +48,7 @@ public class OrphanReaper
{
synchronized (this)
{
- if (fRunning)
+ if (fRunning.get())
{
if (fgLogger.isDebugEnabled())
{
@@ -56,7 +58,7 @@ public class OrphanReaper
return;
}
- fRunning = true;
+ fRunning.set(true);
if (fgLogger.isTraceEnabled())
{
@@ -68,7 +70,7 @@ public class OrphanReaper
do
{
doBatch();
- if (fDone)
+ if (fDone.get())
{
if (fgLogger.isTraceEnabled())
{
@@ -90,13 +92,13 @@ public class OrphanReaper
// Do nothing.
}
}
- while (fActive);
+ while (fActive.get());
}
finally
{
synchronized (this)
{
- fRunning = false;
+ fRunning.set(false);
if (fgLogger.isTraceEnabled())
{
@@ -116,33 +118,39 @@ public class OrphanReaper
*/
private TransactionService fTransactionService;
+ /**
+ * How many ms before refreshing the lock?
+ */
+ private long lockRefreshTime = 60000;
+
+ /**
+ * How long in ms to keep the lock in total before giving up, just in case there is a dead lock.
+ */
+ private long lockTimeOut = 3600000;
+
/**
* Active base sleep interval.
*/
- private long fActiveBaseSleep;
+ private long fActiveBaseSleep = 1000;
/**
* Batch size.
*/
- private int fBatchSize;
+ private int fBatchSize = 50;
/**
* Whether we are currently active, ie have work queued up.
+ * Using Atomics so that the memory model is synchronized between threads.
*/
- private boolean fActive;
-
- private boolean fDone = false;
-
- private boolean fRunning = false;
-
+ private AtomicBoolean fActive = new AtomicBoolean(false);
+ private AtomicBoolean fDone = new AtomicBoolean(false);
+ private AtomicBoolean fRunning = new AtomicBoolean(false);
+
/**
* Create one with default parameters.
*/
public OrphanReaper()
{
- fActiveBaseSleep = 1000;
- fBatchSize = 50;
- fActive = false;
}
// Setters for configuration.
@@ -188,20 +196,37 @@ public class OrphanReaper
this.jobLockService = jobLockService;
}
- /**
- * Start things up after configuration is complete.
- */
- // public void init()
- // {
- // fThread = new Thread(this);
- // fThread.start();
- // }
+ public void setLockRefreshTime(long lockRefreshTime)
+ {
+ this.lockRefreshTime = lockRefreshTime;
+ }
+
+ public long getLockRefreshTime()
+ {
+ return lockRefreshTime;
+ }
+
+ public long getTimeToLive()
+ {
+ return getLockRefreshTime() * 2;
+ }
+
+ public void setLockTimeOut(long lockTimeOut)
+ {
+ this.lockTimeOut = lockTimeOut;
+ }
+
+ public long getLockTimeOut()
+ {
+ return lockTimeOut;
+ }
+
/**
* Shutdown the reaper. This needs to be called when the application shuts down.
*/
public void shutDown()
{
- fDone = true;
+ fDone.set(true);
}
/**
@@ -209,11 +234,11 @@ public class OrphanReaper
*
* @return Returns the lock token or null
*/
- private String getLock(long time)
+ private String getLock()
{
try
{
- return jobLockService.getLock(LOCK, time);
+ return jobLockService.getLock(LOCK, getTimeToLive());
}
catch (LockAcquisitionException e)
{
@@ -222,48 +247,54 @@ public class OrphanReaper
}
/**
- * Attempts to get the lock. If it fails, the current transaction is marked for rollback.
- *
- * @return Returns the lock token
+ * Creates a callback to refresh the lock if we are still doing work.
+ * @param lockToken to refresh
+ * @param lockHeld flag to indicate if the lock is needed/held.
+ * @param start when processing started.
*/
- private void refreshLock(String lockToken, long time)
+ private void createLockRefreshCallback(final String lockToken, final AtomicBoolean lockHeld, final long start)
{
if (lockToken == null)
{
throw new IllegalArgumentException("Must provide existing lockToken");
}
- jobLockService.refreshLock(lockToken, LOCK, time);
+
+ JobLockRefreshCallback callback = new JobLockRefreshCallback()
+ {
+ @Override
+ public boolean isActive()
+ {
+ boolean active = lockHeld.get();
+ if (active)
+ {
+ // Check for deadlock
+ if (System.currentTimeMillis() >= start + getLockTimeOut())
+ {
+ active = false;
+ lockHeld.set(false); // if not deadlocked this stop processing in the main thread
+ fgLogger.error("Lock held too long. Do we have a deadlock? Restart process.");
+ }
+ }
+ return active;
+ }
+
+ @Override
+ public void lockReleased()
+ {
+ lockHeld.set(false);
+ }
+ };
+
+ jobLockService.refreshLock(lockToken, LOCK, getTimeToLive(), callback);
}
-
- /**
- * Sit in a loop, periodically querying for orphans. When orphans are found, unhook them in bite sized batches.
- */
- // public void run()
- // {
- // while (!fDone)
- // {
- // synchronized (this)
- // {
- // try
- // {
- // wait(fActive? fActiveBaseSleep : fInactiveBaseSleep);
- // }
- // catch (InterruptedException ie)
- // {
- // // Do nothing.
- // }
- // doBatch();
- // }
- // }
- // }
/**
* This is really for debugging and testing. Allows another thread to mark the orphan reaper busy so that it can
* monitor for it's being done.
*/
public void activate()
{
- fActive = true;
+ fActive.set(true);
}
/**
@@ -273,7 +304,7 @@ public class OrphanReaper
*/
public boolean isActive()
{
- return fActive;
+ return fActive.get();
}
/**
@@ -285,20 +316,30 @@ public class OrphanReaper
{
public Object execute() throws Exception
{
- String lockToken = getLock(20000L);
+ final long start = System.currentTimeMillis();
+ int reapCnt = 0;
+
+ String lockToken = getLock();
if (lockToken == null)
{
fgLogger.info("Can't get lock. Assume multiple reapers ...");
- fActive = false;
+ fActive.set(false);
return null;
}
- if (fgLogger.isTraceEnabled())
+ AtomicBoolean lockHeld = new AtomicBoolean(true);
+ try
{
- fgLogger.trace("Orphan reaper doBatch: batchSize="+fBatchSize+", fActiveBaseSleep="+fActiveBaseSleep);
- }
+ // Creates a callback that refreshes the lock as long the code in this try block is
+ // still running. If the JVM crashes, the lock will time out. Just in case the lock
+ // still times out, we check at several points in processing and have an overall
+ // timeout in case of deadlock.
+ createLockRefreshCallback(lockToken, lockHeld, start);
+ if (fgLogger.isTraceEnabled())
+ {
+ fgLogger.trace("Orphan reaper doBatch: batchSize="+fBatchSize+", fActiveBaseSleep="+fActiveBaseSleep);
+ }
- refreshLock(lockToken, fBatchSize * 100L);
List nodes = AVMDAOs.Instance().fAVMNodeDAO.getOrphans(fBatchSize);
if (nodes.size() == 0)
{
@@ -307,11 +348,14 @@ public class OrphanReaper
fgLogger.trace("Nothing to purge (set fActive = false)");
}
- fActive = false;
+ fActive.set(false);
return null;
}
- refreshLock(lockToken, nodes.size() * 100L);
+ if (!lockHeld.get())
+ {
+ throw new LockAcquisitionException("Lock lost. Finding orphans to reap.");
+ }
LinkedList fPurgeQueue = new LinkedList();
for (AVMNode node : nodes)
{
@@ -323,125 +367,150 @@ public class OrphanReaper
fgLogger.debug("Queue was empty so got more orphans from DB. Orphan queue size = "+fPurgeQueue.size());
}
- fActive = true;
+ fActive.set(true);
- int reapCnt = 0;
-
- long start = System.currentTimeMillis();
-
- for (int i = 0; i < fBatchSize; i++)
- {
- if (fPurgeQueue.size() == 0)
+ for (int i = 0; i < fBatchSize; i++)
{
- if (fgLogger.isTraceEnabled())
+ if (fPurgeQueue.size() == 0)
{
- fgLogger.trace("Purge queue is empty (fpurgeQueue size = "+fPurgeQueue.size()+")");
- }
-
- fPurgeQueue = null;
- break;
- }
-
- refreshLock(lockToken, 10000L);
- Long nodeId = fPurgeQueue.removeFirst();
- AVMNode node = AVMDAOs.Instance().fAVMNodeDAO.getByID(nodeId);
-
- // Save away the ancestor and merged from fields from this node.
-
- AVMNode ancestor = null;
- AVMHistoryLinkEntity hlEntity = AVMDAOs.Instance().newAVMNodeLinksDAO.getHistoryLinkByDescendent(node.getId());
- if (hlEntity != null)
- {
- ancestor = AVMDAOs.Instance().fAVMNodeDAO.getByID(hlEntity.getAncestorNodeId());
- AVMDAOs.Instance().newAVMNodeLinksDAO.deleteHistoryLink(hlEntity.getAncestorNodeId(), hlEntity.getDescendentNodeId());
- }
-
- AVMNode mergedFrom = null;
- AVMMergeLinkEntity mlEntity = AVMDAOs.Instance().newAVMNodeLinksDAO.getMergeLinkByTo(node.getId());
- if (mlEntity != null)
- {
- mergedFrom = AVMDAOs.Instance().fAVMNodeDAO.getByID(mlEntity.getMergeFromNodeId());
- AVMDAOs.Instance().newAVMNodeLinksDAO.deleteMergeLink(mlEntity.getMergeFromNodeId(), mlEntity.getMergeToNodeId());
- }
-
- // Get all the nodes that have this node as ancestor.
- List hlEntities = AVMDAOs.Instance().newAVMNodeLinksDAO.getHistoryLinksByAncestor(node.getId());
- for (AVMHistoryLinkEntity link : hlEntities)
- {
- AVMNode desc = AVMDAOs.Instance().fAVMNodeDAO.getByID(link.getDescendentNodeId());
- if (desc != null)
- {
- desc.setAncestor(ancestor);
- if (desc.getMergedFrom() == null)
+ if (fgLogger.isTraceEnabled())
{
- desc.setMergedFrom(mergedFrom);
+ fgLogger.trace("Purge queue is empty (fpurgeQueue size = "
+ + fPurgeQueue.size() + ")");
+ }
+
+ fPurgeQueue = null;
+ break;
+ }
+
+ if (!lockHeld.get())
+ {
+ throw new LockAcquisitionException("Lock lost. Orphan reap loop: "+i);
+ }
+ Long nodeId = fPurgeQueue.removeFirst();
+ AVMNode node = AVMDAOs.Instance().fAVMNodeDAO.getByID(nodeId);
+
+ // Save away the ancestor and merged from fields from
+ // this node.
+
+ AVMNode ancestor = null;
+ AVMHistoryLinkEntity hlEntity = AVMDAOs.Instance().newAVMNodeLinksDAO
+ .getHistoryLinkByDescendent(node.getId());
+ if (hlEntity != null)
+ {
+ ancestor = AVMDAOs.Instance().fAVMNodeDAO.getByID(hlEntity
+ .getAncestorNodeId());
+ AVMDAOs.Instance().newAVMNodeLinksDAO.deleteHistoryLink(
+ hlEntity.getAncestorNodeId(), hlEntity.getDescendentNodeId());
+ }
+
+ AVMNode mergedFrom = null;
+ AVMMergeLinkEntity mlEntity = AVMDAOs.Instance().newAVMNodeLinksDAO
+ .getMergeLinkByTo(node.getId());
+ if (mlEntity != null)
+ {
+ mergedFrom = AVMDAOs.Instance().fAVMNodeDAO.getByID(mlEntity
+ .getMergeFromNodeId());
+ AVMDAOs.Instance().newAVMNodeLinksDAO.deleteMergeLink(
+ mlEntity.getMergeFromNodeId(), mlEntity.getMergeToNodeId());
+ }
+
+ // Get all the nodes that have this node as ancestor.
+ List hlEntities = AVMDAOs.Instance().newAVMNodeLinksDAO
+ .getHistoryLinksByAncestor(node.getId());
+ for (AVMHistoryLinkEntity link : hlEntities)
+ {
+ AVMNode desc = AVMDAOs.Instance().fAVMNodeDAO.getByID(link
+ .getDescendentNodeId());
+ if (desc != null)
+ {
+ desc.setAncestor(ancestor);
+ if (desc.getMergedFrom() == null)
+ {
+ desc.setMergedFrom(mergedFrom);
+ }
+ }
+ AVMDAOs.Instance().newAVMNodeLinksDAO.deleteHistoryLink(
+ link.getAncestorNodeId(), link.getDescendentNodeId());
+ }
+ // Get all the nodes that have this node as mergedFrom
+ List mlEntities = AVMDAOs.Instance().newAVMNodeLinksDAO
+ .getMergeLinksByFrom(node.getId());
+ for (AVMMergeLinkEntity link : mlEntities)
+ {
+ AVMNode mto = AVMDAOs.Instance().fAVMNodeDAO.getByID(link
+ .getMergeToNodeId());
+ if (mto != null)
+ {
+ mto.setMergedFrom(ancestor);
+ }
+ AVMDAOs.Instance().newAVMNodeLinksDAO.deleteMergeLink(
+ link.getMergeFromNodeId(), link.getMergeToNodeId());
+ }
+
+ // Get rid of all properties belonging to this node.
+ AVMDAOs.Instance().fAVMNodeDAO.deleteProperties(node.getId());
+
+ // Get rid of all aspects belonging to this node.
+ AVMDAOs.Instance().fAVMNodeDAO.deleteAspects(node.getId());
+
+ // Get rid of ACL.
+ @SuppressWarnings("unused")
+ Acl acl = node.getAcl();
+ node.setAcl(null);
+ // Unused acls will be garbage collected
+ // Many acls will be shared
+ // Extra work for directories.
+ if (node.getType() == AVMNodeType.PLAIN_DIRECTORY
+ || node.getType() == AVMNodeType.LAYERED_DIRECTORY)
+ {
+ // First get rid of all child entries for the node.
+ AVMDAOs.Instance().fChildEntryDAO.deleteByParent(node);
+ }
+ else if (node.getType() == AVMNodeType.PLAIN_FILE)
+ {
+ PlainFileNode file = (PlainFileNode) node;
+ if (file.isLegacyContentData())
+ {
+ // We quickly convert the old ContentData to the
+ // new storage
+ ContentData contentData = file.getContentData();
+ file.setContentData(contentData);
+ }
+ Long contentDataId = file.getContentDataId();
+ if (contentDataId != null)
+ {
+ // The ContentDataDAO will take care of
+ // dereferencing and cleanup
+ AVMDAOs.Instance().contentDataDAO.deleteContentData(contentDataId);
}
}
- AVMDAOs.Instance().newAVMNodeLinksDAO.deleteHistoryLink(link.getAncestorNodeId(), link.getDescendentNodeId());
- }
- // Get all the nodes that have this node as mergedFrom
- List mlEntities = AVMDAOs.Instance().newAVMNodeLinksDAO.getMergeLinksByFrom(node.getId());
- for (AVMMergeLinkEntity link : mlEntities)
- {
- AVMNode mto = AVMDAOs.Instance().fAVMNodeDAO.getByID(link.getMergeToNodeId());
- if (mto != null)
- {
- mto.setMergedFrom(ancestor);
- }
- AVMDAOs.Instance().newAVMNodeLinksDAO.deleteMergeLink(link.getMergeFromNodeId(), link.getMergeToNodeId());
- }
-
- // Get rid of all properties belonging to this node.
- AVMDAOs.Instance().fAVMNodeDAO.deleteProperties(node.getId());
-
- // Get rid of all aspects belonging to this node.
- AVMDAOs.Instance().fAVMNodeDAO.deleteAspects(node.getId());
-
- // Get rid of ACL.
- @SuppressWarnings("unused")
- Acl acl = node.getAcl();
- node.setAcl(null);
- // Unused acls will be garbage collected
- // Many acls will be shared
- // Extra work for directories.
- if (node.getType() == AVMNodeType.PLAIN_DIRECTORY || node.getType() == AVMNodeType.LAYERED_DIRECTORY)
- {
- // First get rid of all child entries for the node.
- AVMDAOs.Instance().fChildEntryDAO.deleteByParent(node);
- }
- else if (node.getType() == AVMNodeType.PLAIN_FILE)
- {
- PlainFileNode file = (PlainFileNode)node;
- if (file.isLegacyContentData())
- {
- // We quickly convert the old ContentData to the new storage
- ContentData contentData = file.getContentData();
- file.setContentData(contentData);
- }
- Long contentDataId = file.getContentDataId();
- if (contentDataId != null)
- {
- // The ContentDataDAO will take care of dereferencing and cleanup
- AVMDAOs.Instance().contentDataDAO.deleteContentData(contentDataId);
- }
- }
-
+
// Finally, delete it
AVMDAOs.Instance().fAVMNodeDAO.delete(node);
-
+
if (fgLogger.isTraceEnabled())
{
- fgLogger.trace("Deleted Node ["+node.getId()+"]");
+ fgLogger.trace("Deleted Node [" + node.getId() + "]");
}
-
- reapCnt++;
+
+ reapCnt++;
+ }
+ // Check we still have the lock at the end
+ if (!lockHeld.get())
+ {
+ throw new LockAcquisitionException("Lock lost at the end of processing");
+ }
}
-
- jobLockService.releaseLock(lockToken, LOCK);
-
- if (fgLogger.isDebugEnabled())
+ finally
{
- fgLogger.debug("Reaped "+reapCnt+" nodes in "+(System.currentTimeMillis()-start)+" msecs");
+ lockHeld.set(false);
+ jobLockService.releaseLock(lockToken, LOCK);
+
+ if (fgLogger.isDebugEnabled())
+ {
+ fgLogger.debug("Reaped "+reapCnt+" nodes in "+(System.currentTimeMillis()-start)+" ms");
+ }
}
return null;
@@ -456,7 +525,7 @@ public class OrphanReaper
}
catch (Exception e)
{
- fgLogger.error("Garbage collector error", e);
+ fgLogger.warn("Garbage collector error. Restarting process", e);
}
}
}
diff --git a/source/java/org/alfresco/repo/calendar/CalendarServiceImpl.java b/source/java/org/alfresco/repo/calendar/CalendarServiceImpl.java
index e030b5059c..6295c87edc 100644
--- a/source/java/org/alfresco/repo/calendar/CalendarServiceImpl.java
+++ b/source/java/org/alfresco/repo/calendar/CalendarServiceImpl.java
@@ -294,7 +294,7 @@ public class CalendarServiceImpl implements CalendarService
// Run the canned query
GetChildrenCannedQueryFactory getChildrenCannedQueryFactory = (GetChildrenCannedQueryFactory)cannedQueryRegistry.getNamedObject(CANNED_QUERY_GET_CHILDREN);
GetChildrenCannedQuery cq = (GetChildrenCannedQuery)getChildrenCannedQueryFactory.getCannedQuery(
- container, null, types, null, sort, paging);
+ container, null, null, types, null, sort, paging);
// Execute the canned query
CannedQueryResults results = cq.execute();
diff --git a/source/java/org/alfresco/repo/content/ContentServiceImpl.java b/source/java/org/alfresco/repo/content/ContentServiceImpl.java
index 29a1f1e5b4..34e7de94d3 100644
--- a/source/java/org/alfresco/repo/content/ContentServiceImpl.java
+++ b/source/java/org/alfresco/repo/content/ContentServiceImpl.java
@@ -562,6 +562,10 @@ public class ContentServiceImpl implements ContentService, ApplicationContextAwa
throws NoTransformerException, ContentIOException
{
// check that source and target mimetypes are available
+ if (reader == null)
+ {
+ throw new AlfrescoRuntimeException("The content reader must be set");
+ }
String sourceMimetype = reader.getMimetype();
if (sourceMimetype == null)
{
diff --git a/source/java/org/alfresco/repo/dictionary/TestModel.java b/source/java/org/alfresco/repo/dictionary/TestModel.java
index 4f0c05a7cc..5eeab22062 100644
--- a/source/java/org/alfresco/repo/dictionary/TestModel.java
+++ b/source/java/org/alfresco/repo/dictionary/TestModel.java
@@ -36,13 +36,23 @@ import org.alfresco.repo.tenant.TenantService;
*/
public class TestModel
{
-
+ /**
+ * Test model
+ *
+ * Java command line client
+ *
+ * Syntax:
+ *
+ * TestModel [-h] [model filename]*
+ *
+ * Returns 0 for success.
+ */
public static void main(String[] args)
{
if (args != null && args.length > 0 && args[0].equals("-h"))
{
System.out.println("TestModel [model filename]*");
- System.exit(0);
+ System.exit(1);
}
System.out.println("Testing dictionary model definitions...");
@@ -91,6 +101,9 @@ public class TestModel
bootstrap.setDictionaryDAO(dictionaryDAO);
bootstrap.bootstrap();
System.out.println("Models are valid.");
+
+ System.exit(0); // Success
+
}
catch(Exception e)
{
@@ -101,6 +114,7 @@ public class TestModel
System.out.println(t.getMessage());
t = t.getCause();
}
+ System.exit(2); // Not Success
}
}
diff --git a/source/java/org/alfresco/repo/domain/node/AbstractNodeDAOImpl.java b/source/java/org/alfresco/repo/domain/node/AbstractNodeDAOImpl.java
index b93e4405bf..7ac064e6e4 100644
--- a/source/java/org/alfresco/repo/domain/node/AbstractNodeDAOImpl.java
+++ b/source/java/org/alfresco/repo/domain/node/AbstractNodeDAOImpl.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005-2010 Alfresco Software Limited.
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -60,6 +60,7 @@ import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.transaction.TransactionAwareSingleton;
import org.alfresco.repo.transaction.TransactionListenerAdapter;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport.TxnReadState;
+import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.dictionary.InvalidTypeException;
@@ -85,7 +86,7 @@ import org.alfresco.util.GUID;
import org.alfresco.util.Pair;
import org.alfresco.util.PropertyCheck;
import org.alfresco.util.ReadWriteLockExecuter;
-import org.alfresco.util.SerializationUtils;
+import org.alfresco.util.ValueProtectingMap;
import org.alfresco.util.EqualsHelper.MapValueComparison;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -992,9 +993,15 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
* @throws ConcurrencyFailureException if the ID doesn't reference a live node
*/
private Node getNodeNotNull(Long nodeId)
+ {
+ return getNodeNotNullImpl(nodeId, false);
+ }
+
+ private Node getNodeNotNullImpl(Long nodeId, boolean deleted)
{
Pair pair = nodesCache.getByKey(nodeId);
- if (pair == null || pair.getSecond().getDeleted())
+
+ if (pair == null || (pair.getSecond().getDeleted() && (!deleted)))
{
// Force a removal from the cache
nodesCache.removeByKey(nodeId);
@@ -1009,11 +1016,11 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
}
else
{
- throw new ConcurrencyFailureException(
- "No live node exists: \n" +
+ logger.warn("No live node exists: \n" +
" ID: " + nodeId + "\n" +
" Cache row: " + pair.getSecond() + "\n" +
" DB row: " + dbNode);
+ throw new NotLiveNodeException(pair);
}
}
else
@@ -1110,7 +1117,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
childNodeName = node.getUuid();
}
ChildAssocEntity assoc = newChildAssocImpl(
- parentNodeId, nodeId, true, assocTypeQName, assocQName, childNodeName);
+ parentNodeId, nodeId, true, assocTypeQName, assocQName, childNodeName, false);
// There will be no other parent assocs
boolean isRoot = false;
@@ -1821,6 +1828,8 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
public Map getNodeProperties(Long nodeId)
{
Map props = getNodePropertiesCached(nodeId);
+ // Create a shallow copy to allow additions
+ props = new HashMap(props);
Node node = getNodeNotNull(nodeId);
// Handle sys:referenceable
@@ -1838,6 +1847,10 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
props.putAll(auditableProperties.getAuditableProperties());
}
+ // Wrap to ensure that we only clone values if the client attempts to modify
+ // the map or retrieve values that might, themselves, be mutable
+ props = new ValueProtectingMap(props, NodePropertyValue.IMMUTABLE_CLASSES);
+
// Done
if (isDebugEnabled)
{
@@ -1874,6 +1887,10 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
else
{
Map props = getNodePropertiesCached(nodeId);
+ // Wrap to ensure that we only clone values if the client attempts to modify
+ // the map or retrieve values that might, themselves, be mutable
+ props = new ValueProtectingMap(props, NodePropertyValue.IMMUTABLE_CLASSES);
+ // The 'get' here will clone the value if it is mutable
value = props.get(propertyQName);
}
// Done
@@ -2118,8 +2135,9 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
Map propsToCache = null;
if (isAddOnly)
{
+ // Copy cache properties for additions
+ propsToCache = new HashMap(oldPropsCached);
// Combine the old and new properties
- propsToCache = oldPropsCached;
propsToCache.putAll(propsToAdd);
}
else
@@ -2194,10 +2212,13 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
{
// Touch the node; all caches are fine
touchNode(nodeId, null, null, false, false, false);
- // Update cache
+ // Get cache props
Map cachedProps = getNodePropertiesCached(nodeId);
- cachedProps.keySet().removeAll(propertyQNames);
- setNodePropertiesCached(nodeId, cachedProps);
+ // Remove deleted properties
+ Map props = new HashMap(cachedProps);
+ props.keySet().removeAll(propertyQNames);
+ // Update cache
+ setNodePropertiesCached(nodeId, props);
}
// Done
return deleteCount > 0;
@@ -2250,7 +2271,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
}
/**
- * @return Returns a writable copy of the cached property map
+ * @return Returns the read-only cached property map
*/
private Map getNodePropertiesCached(Long nodeId)
{
@@ -2261,11 +2282,9 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
invalidateNodeCaches(nodeId);
throw new DataIntegrityViolationException("Invalid node ID: " + nodeId);
}
+ // We have the properties from the cache
Map cachedProperties = cacheEntry.getSecond();
- // Need to return a harmlessly mutable map
- Map properties = copyPropertiesAgainstModification(cachedProperties);
- // Done
- return properties;
+ return cachedProperties;
}
/**
@@ -2277,7 +2296,6 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
private void setNodePropertiesCached(Long nodeId, Map properties)
{
NodeVersionKey nodeVersionKey = getNodeNotNull(nodeId).getNodeVersionKey();
- properties = copyPropertiesAgainstModification(properties);
propertiesCache.setValue(nodeVersionKey, Collections.unmodifiableMap(properties));
}
@@ -2293,26 +2311,6 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
}
}
- /**
- * Shallow-copies to a new map except for maps and collections that are binary serialized
- */
- private Map copyPropertiesAgainstModification(Map original)
- {
- // Copy the values, ensuring that any collections are copied as well
- Map copy = new HashMap((int)(original.size() * 1.3));
- for (Map.Entry element : original.entrySet())
- {
- QName key = element.getKey();
- Serializable value = element.getValue();
- if (value instanceof Collection> || value instanceof Map, ?>)
- {
- value = (Serializable) SerializationUtils.deserialize(SerializationUtils.serialize(value));
- }
- copy.put(key, value);
- }
- return copy;
- }
-
/**
* Callback to cache node properties. The DAO callback only does the simple {@link #findByKey(Long)}.
*
@@ -2808,7 +2806,8 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
boolean isPrimary,
final QName assocTypeQName,
QName assocQName,
- final String childNodeName)
+ final String childNodeName,
+ boolean allowDeletedChild)
{
Assert.notNull(parentNodeId, "parentNodeId");
Assert.notNull(childNodeId, "childNodeId");
@@ -2818,7 +2817,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
// Get parent and child nodes. We need them later, so just get them now.
final Node parentNode = getNodeNotNull(parentNodeId);
- final Node childNode = getNodeNotNull(childNodeId);
+ final Node childNode = getNodeNotNullImpl(childNodeId, allowDeletedChild);
final ChildAssocEntity assoc = new ChildAssocEntity();
// Parent node
@@ -2905,7 +2904,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
ParentAssocsInfo parentAssocInfo = getParentAssocsCached(childNodeId);
// Create it
ChildAssocEntity assoc = newChildAssocImpl(
- parentNodeId, childNodeId, false, assocTypeQName, assocQName, childNodeName);
+ parentNodeId, childNodeId, false, assocTypeQName, assocQName, childNodeName, false);
Long assocId = assoc.getId();
// Touch the node; all caches are fine
touchNode(childNodeId, null, null, false, false, false);
@@ -3478,6 +3477,142 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
return paths;
}
+ private void bindFixAssocAndCollectLostAndFound(final Pair lostNodePair, final String lostName, final ChildAssocEntity assoc)
+ {
+ AlfrescoTransactionSupport.bindListener(new TransactionListenerAdapter()
+ {
+ @Override
+ public void afterRollback()
+ {
+ if (transactionService.getAllowWrite())
+ {
+ // New transaction
+ RetryingTransactionCallback callback = new RetryingTransactionCallback()
+ {
+ public Void execute() throws Throwable
+ {
+ if (assoc == null)
+ {
+ // 'child' with missing parent assoc => collect lost+found orphan child
+ collectLostAndFoundNode(lostNodePair, lostName);
+ logger.error("ALF-13066: Orphan child node has been re-homed under lost_found: "+lostNodePair);
+ }
+ else
+ {
+ // 'child' with deleted parent assoc => delete invalid parent assoc and if primary then collect lost+found orphan child
+ deleteChildAssoc(assoc.getId());
+ logger.error("ALF-12358: Deleted parent - removed child assoc: "+assoc.getId());
+
+ if (assoc.isPrimary())
+ {
+ collectLostAndFoundNode(lostNodePair, lostName);
+ logger.error("ALF-12358: Orphan child node has been re-homed under lost_found: "+lostNodePair);
+ }
+ }
+
+ return null;
+ }
+ };
+ transactionService.getRetryingTransactionHelper().doInTransaction(callback, false, true);
+ }
+ }
+ });
+ }
+
+ private void collectLostAndFoundNode(Pair lostNodePair, String lostName)
+ {
+ Long childNodeId = lostNodePair.getFirst();
+ NodeRef lostNodeRef = lostNodePair.getSecond();
+
+ Long newParentNodeId = getOrCreateLostAndFoundContainer(lostNodeRef.getStoreRef()).getId();
+
+ String assocName = lostName+"-"+System.currentTimeMillis();
+ // Create new primary assoc (re-home the orphan node under lost_found)
+ ChildAssocEntity assoc = newChildAssocImpl(newParentNodeId,
+ childNodeId,
+ true,
+ ContentModel.ASSOC_CHILDREN,
+ QName.createQName(assocName),
+ assocName,
+ true);
+
+ // Touch the node; all caches are fine
+ touchNode(childNodeId, null, null, false, false, false);
+
+ // update cache
+ boolean isRoot = false;
+ boolean isStoreRoot = false;
+ ParentAssocsInfo parentAssocInfo = new ParentAssocsInfo(isRoot, isStoreRoot, assoc);
+ setParentAssocsCached(childNodeId, parentAssocInfo);
+
+ /*
+ // Update ACLs for moved tree - note: actually a NOOP if oldParentAclId is null
+ Long newParentAclId = newParentNode.getAclId();
+ Long oldParentAclId = null; // unknown
+ accessControlListDAO.updateInheritance(childNodeId, oldParentAclId, newParentAclId);
+ */
+ }
+
+ private Node getOrCreateLostAndFoundContainer(StoreRef storeRef)
+ {
+ Pair rootNodePair = getRootNode(storeRef);
+ Long rootParentNodeId = rootNodePair.getFirst();
+
+ final List> nodes = new ArrayList>(1);
+ NodeDAO.ChildAssocRefQueryCallback callback = new NodeDAO.ChildAssocRefQueryCallback()
+ {
+ public boolean handle(
+ Pair childAssocPair,
+ Pair parentNodePair,
+ Pair childNodePair
+ )
+ {
+ nodes.add(childNodePair);
+ // More results
+ return true;
+ }
+
+ @Override
+ public boolean preLoadNodes()
+ {
+ return false;
+ }
+
+ @Override
+ public boolean orderResults()
+ {
+ return false;
+ }
+
+ @Override
+ public void done()
+ {
+ }
+ };
+ Set assocTypeQNames = new HashSet(1);
+ assocTypeQNames.add(ContentModel.ASSOC_LOST_AND_FOUND);
+ getChildAssocs(rootParentNodeId, assocTypeQNames, callback);
+
+ Node lostFoundNode = null;
+ if (nodes.size() > 0)
+ {
+ lostFoundNode = getNodeNotNull(nodes.get(0).getFirst());
+
+ if (nodes.size() > 1)
+ {
+ logger.warn("More than one lost_found, using first: "+lostFoundNode.getNodeRef());
+ }
+ }
+ else
+ {
+ lostFoundNode = newNode(rootParentNodeId, ContentModel.ASSOC_LOST_AND_FOUND, ContentModel.ASSOC_LOST_AND_FOUND, storeRef, null, ContentModel.TYPE_LOST_AND_FOUND, Locale.US, ContentModel.ASSOC_LOST_AND_FOUND.getLocalName(), null).getChildNode();
+
+ logger.info("Created lost_found: "+lostFoundNode.getNodeRef());
+ }
+
+ return lostFoundNode;
+ }
+
/**
* Build the paths for a node
*
@@ -3516,9 +3651,9 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
Pair rootNodePair = getRootNode(currentStoreRef);
currentRootNodePair = new Pair(currentStoreRef, rootNodePair.getSecond());
}
-
+
// get the parent associations of the given node
- ParentAssocsInfo parentAssocInfo = getParentAssocsCached(currentNodeId);
+ ParentAssocsInfo parentAssocInfo = getParentAssocsCached(currentNodeId); // note: currently may throw NotLiveNodeException
// does the node have parents
boolean hasParents = parentAssocInfo.getParentAssocs().size() > 0;
@@ -3583,8 +3718,12 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
// Force a retry. The cached node was stale
throw new DataIntegrityViolationException("Stale cache detected for Node #" + currentNodeId);
}
- // We have a corrupt repository
- throw new RuntimeException("Node without parents does not have root aspect: " + currentNodeRef);
+
+ // We have a corrupt repository - non-root node has a missing parent ?!
+ bindFixAssocAndCollectLostAndFound(currentNodePair, "nonRootNodeWithoutParents", null);
+
+ // throw - error will be logged and then bound txn listener (afterRollback) will be called
+ throw new NonRootNodeWithoutParentsException(currentNodePair);
}
// walk up each parent association
for (Map.Entry entry : parentAssocInfo.getParentAssocs().entrySet())
@@ -3631,10 +3770,25 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
" Prepending path parent: \n" +
" Parent node: " + parentNodePair);
}
-
+
// push the assoc stack, recurse and pop
assocIdStack.push(assocId);
- prependPaths(parentNodePair, currentRootNodePair, path, completedPaths, assocIdStack, primaryOnly);
+
+ try
+ {
+ prependPaths(parentNodePair, currentRootNodePair, path, completedPaths, assocIdStack, primaryOnly);
+ }
+ catch (final NotLiveNodeException re)
+ {
+ if (re.getNodePair().equals(parentNodePair))
+ {
+ // We have a corrupt repository - deleted parent pointing to live child ?!
+ bindFixAssocAndCollectLostAndFound(currentNodePair, "childNodeWithDeletedParent", assoc);
+ }
+ // rethrow - this will cause error/rollback
+ throw re;
+ }
+
assocIdStack.pop();
}
// done
diff --git a/source/java/org/alfresco/repo/domain/node/NodePropertyValue.java b/source/java/org/alfresco/repo/domain/node/NodePropertyValue.java
index 7f086c6d98..57cb084ac8 100644
--- a/source/java/org/alfresco/repo/domain/node/NodePropertyValue.java
+++ b/source/java/org/alfresco/repo/domain/node/NodePropertyValue.java
@@ -27,8 +27,10 @@ import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
+import java.util.Set;
import javax.crypto.SealedObject;
@@ -45,6 +47,7 @@ import org.alfresco.service.cmr.repository.Period;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.EqualsHelper;
+import org.alfresco.util.ValueProtectingMap;
import org.alfresco.util.VersionNumber;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -67,6 +70,31 @@ public class NodePropertyValue implements Cloneable, Serializable
private static Log logger = LogFactory.getLog(NodePropertyValue.class);
private static Log loggerOracle = LogFactory.getLog(NodePropertyValue.class.getName() + ".oracle");
+
+ /**
+ * Immutable classes in addition to {@link ValueProtectingMap#DEFAULT_IMMUTABLE_CLASSES}
+ *
ContentData
+ *
ContentDataId
+ *
NodeRef
+ *
ChildAssociationRef
+ *
AssociationRef
+ *
QName
+ *
VersionNumber
+ *
Period
+ */
+ public static final Set> IMMUTABLE_CLASSES;
+ static
+ {
+ IMMUTABLE_CLASSES = new HashSet>(13);
+ IMMUTABLE_CLASSES.add(ContentData.class);
+ IMMUTABLE_CLASSES.add(ContentDataId.class);
+ IMMUTABLE_CLASSES.add(NodeRef.class);
+ IMMUTABLE_CLASSES.add(ChildAssociationRef.class);
+ IMMUTABLE_CLASSES.add(AssociationRef.class);
+ IMMUTABLE_CLASSES.add(QName.class);
+ IMMUTABLE_CLASSES.add(VersionNumber.class);
+ IMMUTABLE_CLASSES.add(Period.class);
+ }
/** potential value types */
private static enum ValueType
diff --git a/source/java/org/alfresco/repo/domain/node/NonRootNodeWithoutParentsException.java b/source/java/org/alfresco/repo/domain/node/NonRootNodeWithoutParentsException.java
new file mode 100644
index 0000000000..f48d0924d0
--- /dev/null
+++ b/source/java/org/alfresco/repo/domain/node/NonRootNodeWithoutParentsException.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
+ *
+ * This file is part of Alfresco
+ *
+ * Alfresco is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Alfresco is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with Alfresco. If not, see .
+ */
+package org.alfresco.repo.domain.node;
+
+import org.alfresco.service.cmr.repository.NodeRef;
+import org.alfresco.util.Pair;
+import org.springframework.dao.ConcurrencyFailureException;
+
+/**
+ * For internal use only: see ALF-13066 / ALF-12358
+ */
+/* package */ class NonRootNodeWithoutParentsException extends ConcurrencyFailureException
+{
+ private static final long serialVersionUID = 5920138218201628243L;
+
+ private final Pair nodePair;
+
+ public NonRootNodeWithoutParentsException(Pair nodePair)
+ {
+ super("Node without parents does not have root aspect: " + nodePair);
+ this.nodePair = nodePair;
+ }
+
+ public Pair getNodePair()
+ {
+ return nodePair;
+ }
+}
diff --git a/source/java/org/alfresco/repo/domain/node/NotLiveNodeException.java b/source/java/org/alfresco/repo/domain/node/NotLiveNodeException.java
new file mode 100644
index 0000000000..a2825bb639
--- /dev/null
+++ b/source/java/org/alfresco/repo/domain/node/NotLiveNodeException.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
+ *
+ * This file is part of Alfresco
+ *
+ * Alfresco is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Alfresco is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with Alfresco. If not, see .
+ */
+package org.alfresco.repo.domain.node;
+
+import org.alfresco.service.cmr.repository.NodeRef;
+import org.alfresco.util.Pair;
+import org.springframework.dao.ConcurrencyFailureException;
+
+/**
+ * For internal use only: see ALF-13066 / ALF-12358
+ */
+/* package */ class NotLiveNodeException extends ConcurrencyFailureException
+{
+ private static final long serialVersionUID = 5920138218201628243L;
+
+ private final Pair nodePair;
+
+ public NotLiveNodeException(Pair nodePair)
+ {
+ super("Unexpected deleted node");
+ this.nodePair = nodePair;
+ }
+
+ public Pair getNodePair()
+ {
+ return new Pair(nodePair.getFirst(), nodePair.getSecond().getNodeRef());
+ }
+}
diff --git a/source/java/org/alfresco/repo/imap/ImapServiceImpl.java b/source/java/org/alfresco/repo/imap/ImapServiceImpl.java
index 4066e368c4..bf00960a82 100644
--- a/source/java/org/alfresco/repo/imap/ImapServiceImpl.java
+++ b/source/java/org/alfresco/repo/imap/ImapServiceImpl.java
@@ -966,11 +966,28 @@ public class ImapServiceImpl implements ImapService, OnCreateChildAssociationPol
private void setFlag(NodeRef nodeRef, Flag flag, boolean value)
{
checkForFlaggableAspect(nodeRef);
- AccessStatus status = permissionService.hasPermission(nodeRef, PermissionService.WRITE_PROPERTIES);
+
+ String permission = (flag == Flag.DELETED ? PermissionService.DELETE_NODE : PermissionService.WRITE_PROPERTIES);
+
+ AccessStatus status = permissionService.hasPermission(nodeRef, permission);
if (status == AccessStatus.DENIED)
{
- logger.debug("[setFlag] Access denied to add FLAG to " + nodeRef);
- //TODO should we throw an exception here?
+ if(flag == Flag.DELETED)
+ {
+ logger.debug("[setFlag] Access denied to set DELETED FLAG:" + nodeRef);
+ throw new AccessDeniedException("No permission to set DELETED flag");
+ }
+ if(flag == Flag.SEEN)
+ {
+ logger.debug("[setFlag] Access denied to set SEEN FLAG:" + nodeRef);
+ //TODO - should we throw an exception here?
+ //throw new AccessDeniedException("No permission to set DELETED flag");
+ }
+ else
+ {
+ logger.debug("[setFlag] Access denied to set flag:" + nodeRef);
+ throw new AccessDeniedException("No permission to set flag:" + flag.toString());
+ }
}
else
{
diff --git a/source/java/org/alfresco/repo/jscript/ScriptNode.java b/source/java/org/alfresco/repo/jscript/ScriptNode.java
index c5d3a9c830..10b468503f 100644
--- a/source/java/org/alfresco/repo/jscript/ScriptNode.java
+++ b/source/java/org/alfresco/repo/jscript/ScriptNode.java
@@ -2733,6 +2733,11 @@ public class ScriptNode implements Scopeable, NamespacePrefixResolverProvider
String nodeMimeType = getMimetype();
Serializable value = this.nodeService.getProperty(nodeRef, ContentModel.PROP_CONTENT);
ContentData contentData = DefaultTypeConverter.INSTANCE.convert(ContentData.class, value);
+ if (contentData == null)
+ {
+ logger.info("Unable to create thumbnail '" + details.getName() + "' as there is no content");
+ return null;
+ }
if (!registry.isThumbnailDefinitionAvailable(contentData.getContentUrl(), nodeMimeType, getSize(), details))
{
logger.info("Unable to create thumbnail '" + details.getName() + "' for " +
diff --git a/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImpl.java b/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImpl.java
index 3a5b73bbec..6446add82c 100644
--- a/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImpl.java
+++ b/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImpl.java
@@ -486,9 +486,9 @@ public class FileFolderServiceImpl implements FileFolderService
// get canned query
GetChildrenCannedQueryFactory getChildrenCannedQueryFactory = (GetChildrenCannedQueryFactory)cannedQueryRegistry.getNamedObject(CANNED_QUERY_FILEFOLDER_LIST);
-
- GetChildrenCannedQuery cq = (GetChildrenCannedQuery)getChildrenCannedQueryFactory.getCannedQuery(contextNodeRef, pattern, searchTypeQNames, null, sortProps, pagingRequest);
-
+
+ GetChildrenCannedQuery cq = (GetChildrenCannedQuery)getChildrenCannedQueryFactory.getCannedQuery(contextNodeRef, pattern, Collections.singleton(ContentModel.ASSOC_CONTAINS), searchTypeQNames, null, sortProps, pagingRequest);
+
// execute canned query
CannedQueryResults results = cq.execute();
diff --git a/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImplTest.java b/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImplTest.java
index 13bfe8c094..a75047da18 100644
--- a/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImplTest.java
+++ b/source/java/org/alfresco/repo/model/filefolder/FileFolderServiceImplTest.java
@@ -38,6 +38,7 @@ import org.alfresco.model.ForumModel;
import org.alfresco.query.PagingRequest;
import org.alfresco.query.PagingResults;
import org.alfresco.repo.content.MimetypeMap;
+import org.alfresco.repo.dictionary.DictionaryBootstrap;
import org.alfresco.repo.dictionary.DictionaryDAO;
import org.alfresco.repo.dictionary.M2Model;
import org.alfresco.repo.dictionary.M2Type;
@@ -46,6 +47,7 @@ import org.alfresco.repo.node.integrity.IntegrityChecker;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
import org.alfresco.repo.security.permissions.AccessDeniedException;
+import org.alfresco.repo.tenant.TenantService;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.model.FileExistsException;
import org.alfresco.service.cmr.model.FileFolderService;
@@ -100,11 +102,13 @@ public class FileFolderServiceImplTest extends TestCase
private NodeService nodeService;
private FileFolderService fileFolderService;
private PermissionService permissionService;
+ private TenantService tenantService;
private MutableAuthenticationService authenticationService;
private DictionaryDAO dictionaryDAO;
private UserTransaction txn;
private NodeRef rootNodeRef;
private NodeRef workingRootNodeRef;
+ private NodeRef workingRootNodeRef1;
@Override
public void setUp() throws Exception
@@ -116,6 +120,7 @@ public class FileFolderServiceImplTest extends TestCase
permissionService = serviceRegistry.getPermissionService();
authenticationService = (MutableAuthenticationService) ctx.getBean("AuthenticationService");
dictionaryDAO = (DictionaryDAO) ctx.getBean("dictionaryDAO");
+ tenantService = (TenantService) ctx.getBean("tenantService");
// start the transaction
txn = transactionService.getUserTransaction();
@@ -149,6 +154,33 @@ public class FileFolderServiceImplTest extends TestCase
}
Reader reader = new InputStreamReader(is);
importerService.importView(reader, importLocation, null, null);
+
+ // Load test model
+ DictionaryBootstrap bootstrap = new DictionaryBootstrap();
+ List bootstrapModels = new ArrayList();
+ bootstrapModels.add("org/alfresco/repo/model/filefolder/testModel.xml");
+ List labels = new ArrayList();
+ bootstrap.setModels(bootstrapModels);
+ bootstrap.setLabels(labels);
+ bootstrap.setDictionaryDAO(dictionaryDAO);
+ bootstrap.setTenantService(tenantService);
+ bootstrap.bootstrap();
+
+ workingRootNodeRef1 = nodeService.createNode(
+ rootNodeRef,
+ ContentModel.ASSOC_CHILDREN,
+ QName.createQName(NamespaceService.ALFRESCO_URI, "working root1"),
+ QName.createQName("http://www.alfresco.org/test/filefoldertest/1.0", "folder")).getChildRef();
+ nodeService.createNode(
+ workingRootNodeRef1,
+ ContentModel.ASSOC_CONTAINS,
+ QName.createQName(NamespaceService.ALFRESCO_URI, "node1"),
+ ContentModel.TYPE_CONTENT).getChildRef();
+ nodeService.createNode(
+ workingRootNodeRef1,
+ QName.createQName("http://www.alfresco.org/test/filefoldertest/1.0", "contains1"),
+ QName.createQName(NamespaceService.ALFRESCO_URI, "node2"),
+ ContentModel.TYPE_CONTENT).getChildRef();
}
public void tearDown() throws Exception
@@ -1327,4 +1359,14 @@ public class FileFolderServiceImplTest extends TestCase
checkFileList(files, 2, 0, expectedNames);
}
+
+ public void testALF12758()
+ {
+ // test that the FileFolderService returns only cm:contains children
+ PagingRequest pagingRequest = new PagingRequest(0, Integer.MAX_VALUE);
+ PagingResults pagingResults = fileFolderService.list(workingRootNodeRef1, true, true, null, null, null, pagingRequest);
+ assertNotNull(pagingResults);
+ assertNotNull(pagingResults.getPage());
+ assertEquals(1, pagingResults.getPage().size());
+ }
}
diff --git a/source/java/org/alfresco/repo/model/filefolder/testModel.xml b/source/java/org/alfresco/repo/model/filefolder/testModel.xml
new file mode 100644
index 0000000000..b0f63594b7
--- /dev/null
+++ b/source/java/org/alfresco/repo/model/filefolder/testModel.xml
@@ -0,0 +1,51 @@
+
+
+ Alfresco Content Model
+ Alfresco
+ 20012-02-23
+ 1.0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Test Folder
+ cm:folder
+ true
+
+
+
+ false
+ true
+
+
+ sys:base
+ false
+ true
+
+ false
+ true
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/source/java/org/alfresco/repo/node/NodeServiceTest.java b/source/java/org/alfresco/repo/node/NodeServiceTest.java
index 8b57bb6255..6828ffd175 100644
--- a/source/java/org/alfresco/repo/node/NodeServiceTest.java
+++ b/source/java/org/alfresco/repo/node/NodeServiceTest.java
@@ -22,12 +22,15 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import java.io.Serializable;
+import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
+import java.util.Random;
import java.util.Set;
import junit.framework.TestCase;
@@ -35,8 +38,11 @@ import junit.framework.TestCase;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.repo.domain.node.Node;
+import org.alfresco.repo.domain.node.NodeDAO;
+import org.alfresco.repo.domain.node.NodeEntity;
import org.alfresco.repo.domain.node.NodeVersionKey;
import org.alfresco.repo.domain.node.ParentAssocsInfo;
+import org.alfresco.repo.domain.query.CannedQueryDAO;
import org.alfresco.repo.node.NodeServicePolicies.BeforeCreateNodePolicy;
import org.alfresco.repo.node.NodeServicePolicies.BeforeSetNodeTypePolicy;
import org.alfresco.repo.node.NodeServicePolicies.BeforeUpdateNodePolicy;
@@ -66,7 +72,10 @@ import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper;
import org.alfresco.util.GUID;
+import org.alfresco.util.Pair;
import org.alfresco.util.PropertyMap;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.extensions.surf.util.I18NUtil;
@@ -86,10 +95,13 @@ public class NodeServiceTest extends TestCase
private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
+ private static Log logger = LogFactory.getLog(NodeServiceTest.class);
+
protected ServiceRegistry serviceRegistry;
protected NodeService nodeService;
private TransactionService txnService;
private PolicyComponent policyComponent;
+ private CannedQueryDAO cannedQueryDAO;
private SimpleCache nodesCache;
private SimpleCache propsCache;
private SimpleCache aspectsCache;
@@ -108,6 +120,7 @@ public class NodeServiceTest extends TestCase
nodeService = serviceRegistry.getNodeService();
txnService = serviceRegistry.getTransactionService();
policyComponent = (PolicyComponent) ctx.getBean("policyComponent");
+ cannedQueryDAO = (CannedQueryDAO) ctx.getBean("cannedQueryDAO");
// Get the caches for later testing
nodesCache = (SimpleCache) ctx.getBean("node.nodesSharedCache");
@@ -306,11 +319,13 @@ public class NodeServiceTest extends TestCase
/**
* Tests that two separate node trees can be deleted concurrently at the database level.
- * This is not a concurren thread issue; instead we delete a hierarchy and hold the txn
+ * This is not a concurrent thread issue; instead we delete a hierarchy and hold the txn
* open while we delete another in a new txn, thereby testing that DB locks don't prevent
* concurrent deletes.
*
* See: ALF-5714
+ *
+ * Note: if this test hangs for MySQL then check if 'innodb_locks_unsafe_for_binlog = true' (and restart MySQL + test)
*/
public void testConcurrentArchive() throws Exception
{
@@ -1039,4 +1054,296 @@ public class NodeServiceTest extends TestCase
new JavaBehaviour(policy, policyQName.getLocalName()));
return policy;
}
+
+ /**
+ * Ensure that nodes cannot be linked to deleted nodes.
+ *
+ * Conditions that might cause this are:
+ *
+ *
Node created within a parent node that is being deleted
+ *
The node cache is temporarily incorrect when the association is made
+ *
+ *
+ * Concurrency: Possible to create association references to deleted nodes
+ */
+ public void testConcurrentLinkToDeletedNode() throws Throwable
+ {
+ // First find any broken links to start with
+ final NodeEntity params = new NodeEntity();
+ params.setId(0L);
+ params.setDeleted(true);
+
+ List ids = getChildNodesWithDeletedParentNode(params, 0);
+ logger.debug("Found child nodes with deleted parent node (before): " + ids);
+
+ final int idsToSkip = ids.size();
+
+ final NodeRef[] nodeRefs = new NodeRef[10];
+ final NodeRef workspaceRootNodeRef = nodeService.getRootNode(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
+ buildNodeHierarchy(workspaceRootNodeRef, nodeRefs);
+
+ // Fire off a bunch of threads that create random nodes within the hierarchy created above
+ final RetryingTransactionCallback createChildCallback = new RetryingTransactionCallback()
+ {
+ @Override
+ public NodeRef execute() throws Throwable
+ {
+ String randomName = getName() + "-" + System.nanoTime();
+ QName randomQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, randomName);
+ Map props = new HashMap();
+ props.put(ContentModel.PROP_NAME, randomName);
+ // Choose a random parent node from the hierarchy
+ int random = new Random().nextInt(10);
+ return nodeService.createNode(
+ nodeRefs[random],
+ ContentModel.ASSOC_CONTAINS,
+ randomQName,
+ ContentModel.TYPE_CONTAINER,
+ props).getChildRef();
+ }
+ };
+ final Runnable[] runnables = new Runnable[20];
+ final List nodesAtRisk = Collections.synchronizedList(new ArrayList(100));
+
+ final List threads = new ArrayList();
+ for (int i = 0; i < runnables.length; i++)
+ {
+ runnables[i] = new Runnable()
+ {
+ @Override
+ public synchronized void run()
+ {
+ AuthenticationUtil.setRunAsUserSystem();
+ try
+ {
+ wait(1000L); // A short wait before we kick off (should be notified)
+ for (int i = 0; i < 200; i++)
+ {
+ NodeRef nodeRef = txnService.getRetryingTransactionHelper().doInTransaction(createChildCallback);
+ // Store the node for later checks
+ nodesAtRisk.add(nodeRef);
+ // Wait to give other threads a chance
+ wait(1L);
+ }
+ }
+ catch (Throwable e)
+ {
+ // This is expected i.e. we'll just keep doing it until failure
+ logger.debug("Got exception adding child node: ", e);
+ }
+ }
+ };
+ Thread thread = new Thread(runnables[i]);
+ threads.add(thread);
+ thread.start();
+ }
+
+ final RetryingTransactionCallback deleteWithNestedCallback = new RetryingTransactionCallback()
+ {
+ @Override
+ public NodeRef execute() throws Throwable
+ {
+ // Notify the threads to kick off
+ for (int i = 0; i < runnables.length; i++)
+ {
+ // Notify the threads to stop waiting
+ synchronized(runnables[i])
+ {
+ runnables[i].notify();
+ }
+ // Short wait to give thread a chance to run
+ synchronized(this) { try { wait(10L); } catch (Throwable e) {} };
+ }
+ // Delete the parent node
+ nodeService.deleteNode(nodeRefs[0]);
+ return null;
+ }
+ };
+ txnService.getRetryingTransactionHelper().doInTransaction(deleteWithNestedCallback);
+
+ // Wait for the threads to finish
+ for (Thread t : threads)
+ {
+ t.join();
+ }
+
+ logger.info("All threads should have finished");
+
+ // Now need to identify the problem nodes
+ final List childNodeIds = getChildNodesWithDeletedParentNode(params, idsToSkip);
+
+ if (childNodeIds.isEmpty())
+ {
+ // nothing more to test
+ return;
+ }
+
+ logger.debug("Found child nodes with deleted parent node (after): " + childNodeIds);
+
+ // workaround recovery: force collection of any orphan nodes (ALF-12358 + ALF-13066)
+ for (NodeRef nodeRef : nodesAtRisk)
+ {
+ if (nodeService.exists(nodeRef))
+ {
+ nodeService.getPath(nodeRef); // ignore return
+ }
+ }
+
+ // check again ...
+ ids = getChildNodesWithDeletedParentNode(params, idsToSkip);
+ assertTrue("The following child nodes have deleted parent node: " + ids, ids.isEmpty());
+
+ // check lost_found ...
+ List lostAndFoundNodeRefs = getLostAndFoundNodes();
+ assertFalse(lostAndFoundNodeRefs.isEmpty());
+
+ List lostAndFoundNodeIds = new ArrayList(lostAndFoundNodeRefs.size());
+ for (NodeRef nodeRef : lostAndFoundNodeRefs)
+ {
+ lostAndFoundNodeIds.add((Long)nodeService.getProperty(nodeRef, ContentModel.PROP_NODE_DBID));
+ }
+
+ for (Long childNodeId : childNodeIds)
+ {
+ assertTrue("Not found: "+childNodeId, lostAndFoundNodeIds.contains(childNodeId));
+ }
+ }
+
+ /**
+ * Pending repeatable test - force issue ALF-ALF-13066 (non-root node with no parent)
+ */
+ public void testForceNonRootNodeWithNoParentNode() throws Throwable
+ {
+ final NodeEntity params = new NodeEntity();
+ params.setId(0L);
+ params.setDeleted(true);
+
+ List ids = getChildNodesWithNoParentNode(params, 0);
+ logger.debug("Found child nodes with deleted parent node (before): " + ids);
+
+ final int idsToSkip = ids.size();
+
+ final NodeRef[] nodeRefs = new NodeRef[10];
+ final NodeRef workspaceRootNodeRef = nodeService.getRootNode(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
+ buildNodeHierarchy(workspaceRootNodeRef, nodeRefs);
+
+ int cnt = 5;
+ List childNodeRefs = new ArrayList(cnt);
+
+ final NodeDAO nodeDAO = (NodeDAO)ctx.getBean("nodeDAO");
+
+ for (int i = 0; i < cnt; i++)
+ {
+ // create some pseudo- thumnails
+ String randomName = getName() + "-" + System.nanoTime();
+ QName randomQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, randomName);
+ Map props = new HashMap();
+ props.put(ContentModel.PROP_NAME, randomName);
+
+ // Choose a random parent node from the hierarchy
+ int random = new Random().nextInt(10);
+ NodeRef parentNodeRef = nodeRefs[random];
+
+ NodeRef childNodeRef = nodeService.createNode(
+ parentNodeRef,
+ ContentModel.ASSOC_CONTAINS,
+ randomQName,
+ ContentModel.TYPE_THUMBNAIL,
+ props).getChildRef();
+
+ childNodeRefs.add(childNodeRef);
+
+ // forcefully remove the primary parent assoc
+ final Long childNodeId = (Long)nodeService.getProperty(childNodeRef, ContentModel.PROP_NODE_DBID);
+ txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback()
+ {
+ @Override
+ public Void execute() throws Throwable
+ {
+ Pair assocPair = nodeDAO.getPrimaryParentAssoc(childNodeId);
+ nodeDAO.deleteChildAssoc(assocPair.getFirst());
+ return null;
+ }
+ });
+ }
+
+ // Now need to identify the problem nodes
+ final List childNodeIds = getChildNodesWithNoParentNode(params, idsToSkip);
+ assertFalse(childNodeIds.isEmpty());
+ logger.debug("Found child nodes with deleted parent node (after): " + childNodeIds);
+
+ // workaround recovery: force collection of any orphan nodes (ALF-12358 + ALF-13066)
+ for (NodeRef nodeRef : childNodeRefs)
+ {
+ if (nodeService.exists(nodeRef))
+ {
+ nodeService.getPath(nodeRef); // ignore return
+ }
+ }
+
+ // check again ...
+ ids = getChildNodesWithNoParentNode(params, idsToSkip);
+ assertTrue("The following child nodes have no parent node: " + ids, ids.isEmpty());
+
+ // check lost_found ...
+ List lostAndFoundNodeRefs = getLostAndFoundNodes();
+ assertFalse(lostAndFoundNodeRefs.isEmpty());
+
+ List lostAndFoundNodeIds = new ArrayList(lostAndFoundNodeRefs.size());
+ for (NodeRef nodeRef : lostAndFoundNodeRefs)
+ {
+ lostAndFoundNodeIds.add((Long)nodeService.getProperty(nodeRef, ContentModel.PROP_NODE_DBID));
+ }
+
+ for (Long childNodeId : childNodeIds)
+ {
+ assertTrue("Not found: "+childNodeId, lostAndFoundNodeIds.contains(childNodeId));
+ }
+ }
+
+ private List getChildNodesWithDeletedParentNode(NodeEntity params, int idsToSkip)
+ {
+ return cannedQueryDAO.executeQuery(
+ "alfresco.query.test",
+ "select_NodeServiceTest_testConcurrentLinkToDeletedNode_GetChildNodesWithDeletedParentNodeCannedQuery",
+ params,
+ idsToSkip,
+ Integer.MAX_VALUE);
+ }
+
+ private List getChildNodesWithNoParentNode(NodeEntity params, int idsToSkip)
+ {
+ return cannedQueryDAO.executeQuery(
+ "alfresco.query.test",
+ "select_NodeServiceTest_testForceNonRootNodeWithNoParentNode_GetChildNodesWithNoParentNodeCannedQuery",
+ params,
+ idsToSkip,
+ Integer.MAX_VALUE);
+ }
+
+ private List getLostAndFoundNodes()
+ {
+ Set childNodeTypeQNames = new HashSet(1);
+ childNodeTypeQNames.add(ContentModel.TYPE_LOST_AND_FOUND);
+
+ List childAssocRefs = nodeService.getChildAssocs(nodeService.getRootNode(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE), childNodeTypeQNames);
+
+ List lostNodeRefs = null;
+
+ if (childAssocRefs.size() > 0)
+ {
+ List lostNodeChildAssocRefs = nodeService.getChildAssocs(childAssocRefs.get(0).getChildRef());
+ lostNodeRefs = new ArrayList(lostNodeChildAssocRefs.size());
+ for(ChildAssociationRef lostNodeChildAssocRef : lostNodeChildAssocRefs)
+ {
+ lostNodeRefs.add(lostNodeChildAssocRef.getChildRef());
+ }
+ }
+ else
+ {
+ lostNodeRefs = Collections.emptyList();
+ }
+
+ return lostNodeRefs;
+ }
}
diff --git a/source/java/org/alfresco/repo/node/getchildren/FilterSortNodeEntity.java b/source/java/org/alfresco/repo/node/getchildren/FilterSortNodeEntity.java
index f535b111ab..dad56b5798 100644
--- a/source/java/org/alfresco/repo/node/getchildren/FilterSortNodeEntity.java
+++ b/source/java/org/alfresco/repo/node/getchildren/FilterSortNodeEntity.java
@@ -19,6 +19,7 @@
package org.alfresco.repo.node.getchildren;
import java.util.List;
+import java.util.Set;
import org.alfresco.repo.domain.node.NodeEntity;
import org.alfresco.repo.domain.node.NodePropertyEntity;
@@ -46,6 +47,7 @@ public class FilterSortNodeEntity
private Long prop2qnameId;
private Long prop3qnameId;
private List childNodeTypeQNameIds;
+ private Set assocTypeQNameIds;
private String pattern;
private Long namePropertyQNameId;
private boolean auditableProps;
@@ -106,6 +108,16 @@ public class FilterSortNodeEntity
}
}
+ public void setAssocTypeQNameIds(Set assocTypeQNameIds)
+ {
+ this.assocTypeQNameIds = assocTypeQNameIds;
+ }
+
+ public Set getAssocTypeQNameIds()
+ {
+ return assocTypeQNameIds;
+ }
+
public Long getNamePropertyQNameId()
{
return namePropertyQNameId;
diff --git a/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQuery.java b/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQuery.java
index fe31d14c81..5f1bc98a5d 100644
--- a/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQuery.java
+++ b/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQuery.java
@@ -151,6 +151,7 @@ public class GetChildrenCannedQuery extends AbstractCannedQueryPermissions childNodeTypeQNames = paramBean.getChildTypeQNames();
+ Set assocTypeQNames = paramBean.getAssocTypeQNames();
final List filterProps = paramBean.getFilterProps();
String pattern = paramBean.getPattern();
@@ -158,7 +159,7 @@ public class GetChildrenCannedQuery extends AbstractCannedQueryPermissions> sortPairs = (List)sortDetails.getSortPairs();
-
+
// Set sort / filter params
// Note - need to keep the sort properties in their requested order
List sortFilterProps = new ArrayList(filterProps.size() + sortPairs.size());
@@ -199,6 +200,15 @@ public class GetChildrenCannedQuery extends AbstractCannedQueryPermissions assocTypeQNameIds = qnameDAO.convertQNamesToIds(assocTypeQNames, false);
+ if (assocTypeQNameIds.size() > 0)
+ {
+ params.setAssocTypeQNameIds(assocTypeQNameIds);
+ }
+ }
+
if (pattern != null)
{
// TODO, check that we should be tied to the content model in this way. Perhaps a configurable property
diff --git a/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQueryFactory.java b/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQueryFactory.java
index fc11f8ed4c..09c51ec5ef 100644
--- a/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQueryFactory.java
+++ b/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQueryFactory.java
@@ -117,6 +117,7 @@ public class GetChildrenCannedQueryFactory extends AbstractCannedQueryFactory getCannedQuery(NodeRef parentRef, String pattern, Set childTypeQNames, List filterProps, List> sortProps, PagingRequest pagingRequest)
+ public CannedQuery getCannedQuery(NodeRef parentRef, String pattern, Set assocTypeQNames, Set childTypeQNames, List filterProps, List> sortProps, PagingRequest pagingRequest)
{
ParameterCheck.mandatory("parentRef", parentRef);
ParameterCheck.mandatory("pagingRequest", pagingRequest);
@@ -132,7 +133,7 @@ public class GetChildrenCannedQueryFactory extends AbstractCannedQueryFactory getCannedQuery(NodeRef parentRef, String pattern,Set childTypeQNames, PagingRequest pagingRequest)
+ public CannedQuery getCannedQuery(NodeRef parentRef, String pattern, Set assocTypeQNames, Set childTypeQNames, PagingRequest pagingRequest)
{
- return getCannedQuery(parentRef, pattern, childTypeQNames, null, null, pagingRequest);
+ return getCannedQuery(parentRef, pattern, assocTypeQNames, childTypeQNames, null, null, pagingRequest);
}
@Override
diff --git a/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQueryParams.java b/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQueryParams.java
index 4d47520f01..81243c9748 100644
--- a/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQueryParams.java
+++ b/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQueryParams.java
@@ -37,15 +37,18 @@ public class GetChildrenCannedQueryParams
private Set childTypeQNames = Collections.emptySet();
private List filterProps = Collections.emptyList();
+ private Set assocTypeQNames = null;
private String pattern = null;
public GetChildrenCannedQueryParams(
NodeRef parentRef,
+ Set assocTypeQNames,
Set childTypeQNames,
List filterProps,
String pattern)
{
this.parentRef = parentRef;
+ this.assocTypeQNames = assocTypeQNames;
if (childTypeQNames != null) { this.childTypeQNames = childTypeQNames; }
if (filterProps != null) { this.filterProps = filterProps; }
@@ -65,7 +68,12 @@ public class GetChildrenCannedQueryParams
return childTypeQNames;
}
- public List getFilterProps()
+ public Set getAssocTypeQNames()
+ {
+ return assocTypeQNames;
+ }
+
+ public List getFilterProps()
{
return filterProps;
}
diff --git a/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQueryTest.java b/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQueryTest.java
index de770f13a1..87c7072826 100644
--- a/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQueryTest.java
+++ b/source/java/org/alfresco/repo/node/getchildren/GetChildrenCannedQueryTest.java
@@ -40,6 +40,8 @@ import org.alfresco.query.CannedQueryFactory;
import org.alfresco.query.CannedQueryResults;
import org.alfresco.query.PagingRequest;
import org.alfresco.query.PagingResults;
+import org.alfresco.repo.dictionary.DictionaryBootstrap;
+import org.alfresco.repo.dictionary.DictionaryDAO;
import org.alfresco.repo.domain.contentdata.ContentDataDAO;
import org.alfresco.repo.domain.locale.LocaleDAO;
import org.alfresco.repo.domain.node.NodeDAO;
@@ -94,6 +96,8 @@ public class GetChildrenCannedQueryTest extends TestCase
private MutableAuthenticationService authenticationService;
private PermissionService permissionService;
private RatingService ratingService;
+ private TenantService tenantService;
+ private DictionaryDAO dictionaryDAO;
private RatingScheme fiveStarRatingScheme;
private RatingScheme likesRatingScheme;
@@ -113,6 +117,8 @@ public class GetChildrenCannedQueryTest extends TestCase
private Set permHits = new HashSet(100);
private Set permMisses = new HashSet(100);
+ private NodeRef testFolder;
+
@SuppressWarnings({ "rawtypes" })
private NamedObjectRegistry cannedQueryRegistry;
@@ -130,6 +136,9 @@ public class GetChildrenCannedQueryTest extends TestCase
authenticationService = (MutableAuthenticationService)ctx.getBean("AuthenticationService");
permissionService = (PermissionService)ctx.getBean("PermissionService");
ratingService = (RatingService)ctx.getBean("RatingService");
+
+ dictionaryDAO = (DictionaryDAO) ctx.getBean("dictionaryDAO");
+ tenantService = (TenantService) ctx.getBean("tenantService");
cannedQueryRegistry = new NamedObjectRegistry();
cannedQueryRegistry.setStorageType(CannedQueryFactory.class);
@@ -160,6 +169,17 @@ public class GetChildrenCannedQueryTest extends TestCase
{
AuthenticationUtil.setFullyAuthenticatedUser(AuthenticationUtil.getAdminUserName());
+ // Load test model
+ DictionaryBootstrap bootstrap = new DictionaryBootstrap();
+ List bootstrapModels = new ArrayList();
+ bootstrapModels.add("org/alfresco/repo/node/getchildren/testModel.xml");
+ List labels = new ArrayList();
+ bootstrap.setModels(bootstrapModels);
+ bootstrap.setLabels(labels);
+ bootstrap.setDictionaryDAO(dictionaryDAO);
+ bootstrap.setTenantService(tenantService);
+ bootstrap.bootstrap();
+
createUser(TEST_USER_PREFIX, TEST_USER, TEST_USER);
createUser(TEST_USER_PREFIX+"aaaa", TEST_USER_PREFIX+"bbbb", TEST_USER_PREFIX+"cccc");
@@ -221,7 +241,12 @@ public class GetChildrenCannedQueryTest extends TestCase
assertTrue(permissionService.hasPermission(nodeRef, PermissionService.READ) == AccessStatus.ALLOWED);
}
}
-
+
+ AuthenticationUtil.setFullyAuthenticatedUser(AuthenticationUtil.getAdminUserName());
+ testFolder = createFolder(repositoryHelper.getCompanyHome(), "testFolder1", QName.createQName("http://www.alfresco.org/test/getchildrentest/1.0", "folder"));
+ createContent(testFolder, "textContent1", ContentModel.TYPE_CONTENT);
+ createContent(testFolder, QName.createQName("http://www.alfresco.org/test/getchildrentest/1.0", "contains1"), "textContent2", ContentModel.TYPE_CONTENT);
+
AuthenticationUtil.setFullyAuthenticatedUser(TEST_USER);
}
@@ -689,6 +714,34 @@ public class GetChildrenCannedQueryTest extends TestCase
assertEquals("", 0, totalCnt);
}
+ public void testRestrictByAssocType()
+ {
+ Set assocTypeQNames = new HashSet(3);
+ Set childTypeQNames = new HashSet(3);
+
+ assocTypeQNames.clear();
+ assocTypeQNames.add(ContentModel.ASSOC_CONTAINS);
+ childTypeQNames.clear();
+ childTypeQNames.add(ContentModel.TYPE_CONTENT);
+ List children = filterByAssocTypeAndCheck(testFolder, assocTypeQNames, childTypeQNames);
+ assertEquals(1, children.size());
+
+ assocTypeQNames.clear();
+ assocTypeQNames.add(QName.createQName("http://www.alfresco.org/test/getchildrentest/1.0", "contains1"));
+ childTypeQNames.clear();
+ childTypeQNames.add(ContentModel.TYPE_CONTENT);
+ children = filterByAssocTypeAndCheck(testFolder, assocTypeQNames, childTypeQNames);
+ assertEquals(1, children.size());
+
+ assocTypeQNames.clear();
+ assocTypeQNames.add(QName.createQName("http://www.alfresco.org/test/getchildrentest/1.0", "contains1"));
+ assocTypeQNames.add(ContentModel.ASSOC_CONTAINS);
+ childTypeQNames.clear();
+ childTypeQNames.add(ContentModel.TYPE_CONTENT);
+ children = filterByAssocTypeAndCheck(testFolder, assocTypeQNames, childTypeQNames);
+ assertEquals(2, children.size());
+ }
+
// test helper method - optional filtering/sorting
private PagingResults list(NodeRef parentNodeRef, final int skipCount, final int maxItems, final int requestTotalCountMax, String pattern, List> sortProps)
{
@@ -697,7 +750,7 @@ public class GetChildrenCannedQueryTest extends TestCase
// get canned query
GetChildrenCannedQueryFactory getChildrenCannedQueryFactory = (GetChildrenCannedQueryFactory)cannedQueryRegistry.getNamedObject("getChildrenCannedQueryFactory");
- GetChildrenCannedQuery cq = (GetChildrenCannedQuery)getChildrenCannedQueryFactory.getCannedQuery(parentNodeRef, pattern, null, null, sortProps, pagingRequest);
+ GetChildrenCannedQuery cq = (GetChildrenCannedQuery)getChildrenCannedQueryFactory.getCannedQuery(parentNodeRef, pattern, null, null, null, sortProps, pagingRequest);
// execute canned query
CannedQueryResults results = cq.execute();
@@ -748,6 +801,15 @@ public class GetChildrenCannedQueryTest extends TestCase
}
}
+ private List filterByAssocTypeAndCheck(NodeRef parentNodeRef, Set assocTypeQNames, Set childTypeQNames)
+ {
+ PagingResults results = list(parentNodeRef, -1, -1, 0, assocTypeQNames, childTypeQNames, null, null);
+ assertTrue(results.getPage().size() > 0);
+
+ List childNodeRefs = results.getPage();
+ return childNodeRefs;
+ }
+
private void filterByPropAndCheck(NodeRef parentNodeRef, QName filterPropQName, String filterVal, FilterTypeString filterType, int expectedCount)
{
FilterProp filter = new FilterPropString(filterPropQName, filterVal, filterType);
@@ -955,7 +1017,31 @@ public class GetChildrenCannedQueryTest extends TestCase
// get canned query
GetChildrenCannedQueryFactory getChildrenCannedQueryFactory = (GetChildrenCannedQueryFactory)cannedQueryRegistry.getNamedObject("getChildrenCannedQueryFactory");
- GetChildrenCannedQuery cq = (GetChildrenCannedQuery)getChildrenCannedQueryFactory.getCannedQuery(parentNodeRef, null, childTypeQNames, filterProps, sortProps, pagingRequest);
+ GetChildrenCannedQuery cq = (GetChildrenCannedQuery)getChildrenCannedQueryFactory.getCannedQuery(parentNodeRef, null, null, childTypeQNames, filterProps, sortProps, pagingRequest);
+
+ // execute canned query
+ CannedQueryResults results = cq.execute();
+
+ List nodeRefs = results.getPages().get(0);
+
+ Integer totalCount = null;
+ if (requestTotalCountMax > 0)
+ {
+ totalCount = results.getTotalResultCount().getFirst();
+ }
+
+ return new PagingNodeRefResultsImpl(nodeRefs, results.hasMoreItems(), totalCount, false);
+ }
+
+ // test helper method - optional filtering/sorting
+ private PagingResults list(NodeRef parentNodeRef, final int skipCount, final int maxItems, final int requestTotalCountMax, Set assocTypeQNames, Set childTypeQNames, List filterProps, List> sortProps)
+ {
+ PagingRequest pagingRequest = new PagingRequest(skipCount, maxItems, null);
+ pagingRequest.setRequestTotalCountMax(requestTotalCountMax);
+
+ // get canned query
+ GetChildrenCannedQueryFactory getChildrenCannedQueryFactory = (GetChildrenCannedQueryFactory)cannedQueryRegistry.getNamedObject("getChildrenCannedQueryFactory");
+ GetChildrenCannedQuery cq = (GetChildrenCannedQuery)getChildrenCannedQueryFactory.getCannedQuery(parentNodeRef, null, assocTypeQNames, childTypeQNames, filterProps, sortProps, pagingRequest);
// execute canned query
CannedQueryResults results = cq.execute();
@@ -1009,7 +1095,7 @@ public class GetChildrenCannedQueryTest extends TestCase
}
}
- private void createFolder(NodeRef parentNodeRef, String folderName, QName folderType) throws IOException
+ private NodeRef createFolder(NodeRef parentNodeRef, String folderName, QName folderType) throws IOException
{
Map properties = new HashMap();
properties.put(ContentModel.PROP_NAME, folderName);
@@ -1025,6 +1111,7 @@ public class GetChildrenCannedQueryTest extends TestCase
QName.createQName(folderName),
folderType,
properties).getChildRef();
+ return nodeRef;
}
private NodeRef createContent(NodeRef parentNodeRef, String fileName, QName contentType) throws IOException
@@ -1053,6 +1140,32 @@ public class GetChildrenCannedQueryTest extends TestCase
return nodeRef;
}
+ private NodeRef createContent(NodeRef parentNodeRef, QName childAssocType, String fileName, QName contentType) throws IOException
+ {
+ Map properties = new HashMap();
+ properties.put(ContentModel.PROP_NAME, fileName);
+ properties.put(ContentModel.PROP_TITLE, fileName+" my title");
+ properties.put(ContentModel.PROP_DESCRIPTION, fileName+" my description");
+
+ NodeRef nodeRef = nodeService.getChildByName(parentNodeRef, childAssocType, fileName);
+ if (nodeRef != null)
+ {
+ nodeService.deleteNode(nodeRef);
+ }
+
+ nodeRef = nodeService.createNode(parentNodeRef,
+ childAssocType,
+ QName.createQName(fileName),
+ contentType,
+ properties).getChildRef();
+
+ ContentWriter writer = contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
+ writer.setMimetype(mimetypeService.guessMimetype(fileName));
+ writer.putContent("my text content");
+
+ return nodeRef;
+ }
+
private void loadContent(NodeRef parentNodeRef, String inFileName, String title, String description, boolean readAllowed, Set results) throws IOException
{
String newFileName = TEST_FILE_PREFIX + inFileName;
diff --git a/source/java/org/alfresco/repo/node/getchildren/testModel.xml b/source/java/org/alfresco/repo/node/getchildren/testModel.xml
new file mode 100644
index 0000000000..e981d8d6bf
--- /dev/null
+++ b/source/java/org/alfresco/repo/node/getchildren/testModel.xml
@@ -0,0 +1,51 @@
+
+
+ Alfresco Content Model
+ Alfresco
+ 20012-02-23
+ 1.0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Test Folder
+ cm:folder
+ true
+
+
+
+ false
+ true
+
+
+ sys:base
+ false
+ true
+
+ false
+ true
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/source/java/org/alfresco/repo/rendition/executer/AbstractTransformationRenderingEngine.java b/source/java/org/alfresco/repo/rendition/executer/AbstractTransformationRenderingEngine.java
index a82db6d390..279d7e16b9 100644
--- a/source/java/org/alfresco/repo/rendition/executer/AbstractTransformationRenderingEngine.java
+++ b/source/java/org/alfresco/repo/rendition/executer/AbstractTransformationRenderingEngine.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005-2011 Alfresco Software Limited.
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -86,6 +86,7 @@ public abstract class AbstractTransformationRenderingEngine extends AbstractRend
protected void render(RenderingContext context)
{
ContentReader contentReader = context.makeContentReader();
+ // There will have been an exception if there is no content data so contentReader is not null.
String sourceUrl = contentReader.getContentUrl();
String sourceMimeType = contentReader.getMimetype();
String targetMimeType = getTargetMimeType(context);
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerImpl.java b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerImpl.java
index a048659133..9c48ebc522 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerImpl.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerImpl.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005-2011 Alfresco Software Limited.
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -1237,9 +1237,9 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp
// Content is always tokenised
ContentData contentData = DefaultTypeConverter.INSTANCE.convert(ContentData.class, serializableValue);
- if (!index || contentData.getMimetype() == null)
+ if (!index || contentData == null || contentData.getMimetype() == null)
{
- // no mimetype or property not indexed
+ // no content, mimetype or property not indexed
continue;
}
// store mimetype in index - even if content does not index it is useful
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/AVMLuceneIndexerImpl.java b/source/java/org/alfresco/repo/search/impl/lucene/AVMLuceneIndexerImpl.java
index c9b6c6482d..e8511695b8 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/AVMLuceneIndexerImpl.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/AVMLuceneIndexerImpl.java
@@ -1,5 +1,5 @@
/**
- * Copyright (C) 2005-2010 Alfresco Software Limited.
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -1096,9 +1096,9 @@ public class AVMLuceneIndexerImpl extends AbstractLuceneIndexerImpl impl
// Content is always tokenised
ContentData contentData = DefaultTypeConverter.INSTANCE.convert(ContentData.class, serializableValue);
- if (!index || contentData.getMimetype() == null)
+ if (!index || contentData == null || contentData.getMimetype() == null)
{
- // no mimetype or property not indexed
+ // no content, mimetype or property not indexed
continue;
}
// store mimetype in index - even if content does not index it is useful
diff --git a/source/java/org/alfresco/repo/security/person/PersonServiceImpl.java b/source/java/org/alfresco/repo/security/person/PersonServiceImpl.java
index 4ce7245a2d..65308a8544 100644
--- a/source/java/org/alfresco/repo/security/person/PersonServiceImpl.java
+++ b/source/java/org/alfresco/repo/security/person/PersonServiceImpl.java
@@ -1224,7 +1224,7 @@ public class PersonServiceImpl extends TransactionListenerAdapter implements Per
}
}
- GetChildrenCannedQuery cq = (GetChildrenCannedQuery)getChildrenCannedQueryFactory.getCannedQuery(contextNodeRef, null, childTypeQNames, filterProps, sortProps, pagingRequest);
+ GetChildrenCannedQuery cq = (GetChildrenCannedQuery)getChildrenCannedQueryFactory.getCannedQuery(contextNodeRef, null, null, childTypeQNames, filterProps, sortProps, pagingRequest);
// execute canned query
final CannedQueryResults results = cq.execute();
diff --git a/source/java/org/alfresco/repo/site/SiteServiceImpl.java b/source/java/org/alfresco/repo/site/SiteServiceImpl.java
index 9c90a5f377..d67b97e982 100644
--- a/source/java/org/alfresco/repo/site/SiteServiceImpl.java
+++ b/source/java/org/alfresco/repo/site/SiteServiceImpl.java
@@ -924,7 +924,7 @@ public class SiteServiceImpl extends AbstractLifecycleBean implements SiteServic
final String cQBeanName = "siteGetChildrenCannedQueryFactory";
GetChildrenCannedQueryFactory getChildrenCannedQueryFactory = (GetChildrenCannedQueryFactory)cannedQueryRegistry.getNamedObject(cQBeanName);
- GetChildrenCannedQuery cq = (GetChildrenCannedQuery)getChildrenCannedQueryFactory.getCannedQuery(getSiteRoot(), null, searchTypeQNames,
+ GetChildrenCannedQuery cq = (GetChildrenCannedQuery)getChildrenCannedQueryFactory.getCannedQuery(getSiteRoot(), null, null, searchTypeQNames,
filterProps, sortProps, pagingRequest);
// execute canned query
diff --git a/source/java/org/alfresco/repo/template/BaseContentNode.java b/source/java/org/alfresco/repo/template/BaseContentNode.java
index baf660b8f4..21cd898b27 100644
--- a/source/java/org/alfresco/repo/template/BaseContentNode.java
+++ b/source/java/org/alfresco/repo/template/BaseContentNode.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005-2010 Alfresco Software Limited.
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -593,6 +593,10 @@ public abstract class BaseContentNode implements TemplateContent
// get the content reader
ContentService contentService = services.getContentService();
ContentReader reader = contentService.getReader(getNodeRef(), property);
+ if (reader == null)
+ {
+ return ""; // Caller of this method returns "" if there is an IOException
+ }
// get the writer and set it up for text convert
ContentWriter writer = contentService.getTempWriter();
diff --git a/source/java/org/alfresco/repo/tenant/MultiTAdminServiceImpl.java b/source/java/org/alfresco/repo/tenant/MultiTAdminServiceImpl.java
index e899aa69e6..d48fdb3fde 100644
--- a/source/java/org/alfresco/repo/tenant/MultiTAdminServiceImpl.java
+++ b/source/java/org/alfresco/repo/tenant/MultiTAdminServiceImpl.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005-2010 Alfresco Software Limited.
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -44,7 +44,7 @@ import org.alfresco.repo.security.authentication.AuthenticationContext;
import org.alfresco.repo.security.authentication.AuthenticationException;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
-import org.alfresco.repo.site.SiteAVMBootstrap;
+import org.alfresco.repo.thumbnail.ThumbnailRegistry;
import org.alfresco.repo.usage.UserUsageTrackingComponent;
import org.alfresco.repo.workflow.WorkflowDeployer;
import org.alfresco.service.cmr.admin.RepoAdminService;
@@ -90,6 +90,7 @@ public class MultiTAdminServiceImpl implements TenantAdminService, ApplicationCo
private AttributeService attributeService;
private PasswordEncoder passwordEncoder;
private TenantRoutingFileContentStore tenantFileContentStore;
+ private ThumbnailRegistry thumbnailRegistry;
private WorkflowService workflowService;
private RepositoryExporterService repositoryExporterService;
private ModuleService moduleService;
@@ -197,6 +198,11 @@ public class MultiTAdminServiceImpl implements TenantAdminService, ApplicationCo
this.moduleService = moduleService;
}
+ public void setThumbnailRegistry(ThumbnailRegistry thumbnailRegistry)
+ {
+ this.thumbnailRegistry = thumbnailRegistry;
+ }
+
public void setBaseAdminUsername(String baseAdminUsername)
{
this.baseAdminUsername = baseAdminUsername;
@@ -365,6 +371,8 @@ public class MultiTAdminServiceImpl implements TenantAdminService, ApplicationCo
ImporterBootstrap spacesImporterBootstrap = (ImporterBootstrap)ctx.getBean("spacesBootstrap-mt");
bootstrapSpacesTenantStore(spacesImporterBootstrap, tenantDomain);
+ thumbnailRegistry.initThumbnailDefinitions();
+
// notify listeners that tenant has been created & hence enabled
for (TenantDeployer tenantDeployer : tenantDeployers)
{
@@ -433,6 +441,8 @@ public class MultiTAdminServiceImpl implements TenantAdminService, ApplicationCo
importBootstrapSpacesModelsTenantStore(tenantDomain, directorySource);
importBootstrapSpacesTenantStore(tenantDomain, directorySource);
+ thumbnailRegistry.initThumbnailDefinitions();
+
// notify listeners that tenant has been created & hence enabled
for (TenantDeployer tenantDeployer : tenantDeployers)
{
diff --git a/source/java/org/alfresco/repo/thumbnail/SimpleThumbnailer.java b/source/java/org/alfresco/repo/thumbnail/SimpleThumbnailer.java
index a8ac7b6967..96e5af4bf1 100644
--- a/source/java/org/alfresco/repo/thumbnail/SimpleThumbnailer.java
+++ b/source/java/org/alfresco/repo/thumbnail/SimpleThumbnailer.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005-2010 Alfresco Software Limited.
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -156,48 +156,51 @@ public class SimpleThumbnailer extends TransactionListenerAdapter implements
}
Serializable value = this.nodeService.getProperty(nodeRef, ContentModel.PROP_CONTENT);
ContentData contentData = DefaultTypeConverter.INSTANCE.convert(ContentData.class, value);
- List thumbnailDefinitions = this.thumbnailService.getThumbnailRegistry()
- .getThumbnailDefinitions(contentData.getMimetype(), contentData.getSize());
- for (final ThumbnailDefinition thumbnailDefinition : thumbnailDefinitions)
+ if (contentData != null)
{
- final NodeRef existingThumbnail = this.thumbnailService.getThumbnailByName(nodeRef,
- ContentModel.PROP_CONTENT, thumbnailDefinition.getName());
- try
+ List thumbnailDefinitions = this.thumbnailService.getThumbnailRegistry()
+ .getThumbnailDefinitions(contentData.getMimetype(), contentData.getSize());
+ for (final ThumbnailDefinition thumbnailDefinition : thumbnailDefinitions)
{
- // Generate each thumbnail in its own transaction, so that we can recover if one of them goes wrong
- this.transactionService.getRetryingTransactionHelper().doInTransaction(
- new RetryingTransactionCallback