diff --git a/config/alfresco/authentication-services-context.xml b/config/alfresco/authentication-services-context.xml index 18953cd5f0..98a1a96bc6 100644 --- a/config/alfresco/authentication-services-context.xml +++ b/config/alfresco/authentication-services-context.xml @@ -317,7 +317,7 @@ - + Consumer @@ -346,12 +346,12 @@ false - + All - + All @@ -374,12 +374,12 @@ false - + All - + All diff --git a/config/alfresco/bootstrap-context.xml b/config/alfresco/bootstrap-context.xml index 9977930c08..fa96fbb8a0 100644 --- a/config/alfresco/bootstrap-context.xml +++ b/config/alfresco/bootstrap-context.xml @@ -39,6 +39,12 @@ ${db.schema.update} + + ${db.schema.update.lockRetryCount} + + + ${db.schema.update.lockRetryWaitSeconds} + classpath:alfresco/dbscripts/create/2.2/${db.script.dialect}/AlfrescoPostCreate-2.2-MappedFKIndexes.sql diff --git a/config/alfresco/core-services-context.xml b/config/alfresco/core-services-context.xml index f03fb5c945..8776440911 100644 --- a/config/alfresco/core-services-context.xml +++ b/config/alfresco/core-services-context.xml @@ -990,7 +990,7 @@ - + diff --git a/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.MySQLInnoDBDialect/AlfrescoSchemaUpdate-2.2-ACL.sql b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.MySQLInnoDBDialect/AlfrescoSchemaUpdate-2.2-ACL.sql index c1d1028424..f7869b0d8c 100644 --- a/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.MySQLInnoDBDialect/AlfrescoSchemaUpdate-2.2-ACL.sql +++ b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.MySQLInnoDBDialect/AlfrescoSchemaUpdate-2.2-ACL.sql @@ -139,7 +139,7 @@ UPDATE alf_acl_member mem -- Remove duplicate aces the mysql way (as you can not use the deleted table in the where clause ...) CREATE TABLE tmp_to_delete SELECT ace.id FROM alf_acl_member mem RIGHT OUTER JOIN alf_access_control_entry ace ON mem.ace_id = ace.id WHERE mem.ace_id IS NULL; -DELETE FROM alf_access_control_entry ace USING alf_access_control_entry ace JOIN tmp_to_delete t ON ace.id = t.id; +DELETE FROM alf_access_control_entry USING alf_access_control_entry JOIN tmp_to_delete t ON alf_access_control_entry.id = t.id; DROP TABLE tmp_to_delete; -- Add constraint for duplicate acls diff --git a/config/alfresco/messages/content-service.properties b/config/alfresco/messages/content-service.properties index 4641308752..5104a7a94b 100644 --- a/config/alfresco/messages/content-service.properties +++ b/config/alfresco/messages/content-service.properties @@ -4,6 +4,9 @@ content.content_missing=The node''s content is missing: \n node: {0} \n read content.runtime_exec.property_moved=The property ''errorCodes'' has moved down onto the RuntimeExec class index.recovery.out_of_date=The indexes are not synchronized with the database. +index.tracking.starting=Index recovery started. +index.tracking.complete=Index recovery completed. +index.tracking.progress=\tProcessing transactions around {0}. index.recovery.starting=Index recovery started: {0} transactions. index.recovery.complete=Index recovery completed. index.recovery.progress=\t{0} % complete. diff --git a/config/alfresco/messages/schema-update.properties b/config/alfresco/messages/schema-update.properties index 6afd8b1618..3608dddda3 100644 --- a/config/alfresco/messages/schema-update.properties +++ b/config/alfresco/messages/schema-update.properties @@ -11,7 +11,7 @@ schema.update.msg.optional_statement_failed=Optional statement execution failed: schema.update.warn.dialect_unsupported=Alfresco should not be used with database dialect {0}. schema.update.warn.dialect_hsql=Alfresco is using the HSQL default database. Please only use this while evaluating Alfresco, it is NOT recommended for production or deployment! schema.update.err.found_multiple=\nMore than one Alfresco schema was found when querying the database metadata.\n Limit the database user's permissions or set the 'hibernate.default_schema' property in 'custom-hibernate-dialect.properties'. -schema.update.err.previous_failed=A previous schema upgrade failed. Revert to the original database before attempting the upgrade again. +schema.update.err.previous_failed=A previous schema upgrade failed or was not completed. Revert to the original database before attempting the upgrade again. schema.update.err.statement_failed=Statement execution failed:\n SQL: {0}\n Error: {1}\n File: {2}\n Line: {3} schema.update.err.update_failed=Schema auto-update failed schema.update.err.validation_failed=Schema validation failed diff --git a/config/alfresco/repository.properties b/config/alfresco/repository.properties index 71ed3bc488..a55fa22303 100644 --- a/config/alfresco/repository.properties +++ b/config/alfresco/repository.properties @@ -127,6 +127,8 @@ lucene.lock.poll.interval=100 # Database configuration db.schema.update=true +db.schema.update.lockRetryCount=24 +db.schema.update.lockRetryWaitSeconds=5 db.driver=org.gjt.mm.mysql.Driver db.name=alfresco db.url=jdbc:mysql:///${db.name} diff --git a/source/java/org/alfresco/filesys/repo/ContentDiskDriver.java b/source/java/org/alfresco/filesys/repo/ContentDiskDriver.java index 85453bdf20..28e64abc6c 100644 --- a/source/java/org/alfresco/filesys/repo/ContentDiskDriver.java +++ b/source/java/org/alfresco/filesys/repo/ContentDiskDriver.java @@ -2130,6 +2130,9 @@ public class ContentDiskDriver extends AlfrescoDiskDriver implements DiskInterfa if ( permissionService.hasPermission(nodeRef, PermissionService.WRITE) == AccessStatus.DENIED) throw new AccessDeniedException("No write access to " + name); + if ( permissionService.hasPermission(nodeRef, PermissionService.DELETE) == AccessStatus.DENIED) + throw new AccessDeniedException("No delete access to " + name); + // Check if the file is being marked for deletion, if so then check if the file is locked if ( info.hasSetFlag(FileInfo.SetDeleteOnClose) && info.hasDeleteOnClose()) diff --git a/source/java/org/alfresco/repo/admin/patch/PatchServiceImpl.java b/source/java/org/alfresco/repo/admin/patch/PatchServiceImpl.java index f7f9cad131..64c3e2a4f9 100644 --- a/source/java/org/alfresco/repo/admin/patch/PatchServiceImpl.java +++ b/source/java/org/alfresco/repo/admin/patch/PatchServiceImpl.java @@ -229,7 +229,7 @@ public class PatchServiceImpl implements PatchService // We bypass the patch if it was executed successfully if (appliedPatch != null) { - if (appliedPatch.getWasExecuted() && appliedPatch.getSucceeded()) + if (appliedPatch.getSucceeded()) { // It has already been successfully applied if (logger.isDebugEnabled()) @@ -239,23 +239,6 @@ public class PatchServiceImpl implements PatchService } return appliedPatch; } - else if (patch.getTargetSchema() != appliedPatch.getTargetSchema()) - { - // The target schema of the defined patch has changed. - // The patch applicability was changed for some reason, usually as a result of - // merges between branches. We need to detect new patches in clean installs. - if (appliedPatch.getAppliedToSchema() == appliedPatch.getTargetSchema()) - { - // The patch applicability changed, but it was originally not executed because - // it was a new patch in a clean install - if (logger.isDebugEnabled()) - { - logger.debug("Patch not applied to a previously clean install: \n" + - " patch: " + appliedPatch); - } - return appliedPatch; - } - } } // the execution report String report = null; diff --git a/source/java/org/alfresco/repo/audit/AuditableAspect.java b/source/java/org/alfresco/repo/audit/AuditableAspect.java index 792bd2e4b2..8a8ae02e6f 100644 --- a/source/java/org/alfresco/repo/audit/AuditableAspect.java +++ b/source/java/org/alfresco/repo/audit/AuditableAspect.java @@ -277,15 +277,14 @@ public class AuditableAspect */ public Boolean doWork() throws Exception { - // Set all the properties in one nodeService call to avoid multiple calls to onUpdateProperties - Map allProps = nodeService.getProperties(nodeRef); + // Callback here must be done on a per-property basis to avoid the cm:name updates to the child association - for (QName propertyQName : properties.keySet()) - { - Serializable property = properties.get(propertyQName); - allProps.put(propertyQName, property); + for (Map.Entry entry : properties.entrySet()) + { + QName propertyQName = entry.getKey(); + Serializable propertyValue = entry.getValue(); + nodeService.setProperty(nodeRef, propertyQName, propertyValue); } - nodeService.setProperties(nodeRef, allProps); return Boolean.TRUE; } diff --git a/source/java/org/alfresco/repo/avm/AVMInterpreter.java b/source/java/org/alfresco/repo/avm/AVMInterpreter.java index c9246d594b..6745f784b6 100644 --- a/source/java/org/alfresco/repo/avm/AVMInterpreter.java +++ b/source/java/org/alfresco/repo/avm/AVMInterpreter.java @@ -26,14 +26,19 @@ package org.alfresco.repo.avm; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.io.InputStreamReader; -import java.io.PrintStream; import java.io.InputStream; +import java.io.InputStreamReader; import java.io.OutputStream; +import java.io.PrintStream; +import java.io.Serializable; import java.util.ArrayList; +import java.util.Collection; import java.util.Date; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.regex.Pattern; import org.alfresco.repo.avm.util.BulkLoader; import org.alfresco.repo.domain.PropertyValue; @@ -45,6 +50,8 @@ import org.alfresco.service.cmr.avm.locking.AVMLock; import org.alfresco.service.cmr.avm.locking.AVMLockingService; import org.alfresco.service.cmr.avmsync.AVMDifference; import org.alfresco.service.cmr.avmsync.AVMSyncService; +import org.alfresco.service.cmr.dictionary.DataTypeDefinition; +import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.namespace.QName; import org.springframework.context.support.FileSystemXmlApplicationContext; @@ -56,6 +63,11 @@ import org.springframework.context.support.FileSystemXmlApplicationContext; */ public class AVMInterpreter { + private static final Pattern collectionPattern = Pattern.compile("^\\[(.*)\\]$"); + private static final Pattern nodeRefPattern = Pattern.compile("^\\w+://\\w+\\w+$"); + private static final Pattern integerPattern = Pattern.compile("^\\d+$"); + private static final Pattern dateTimePattern = Pattern.compile("^\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}$"); + /** * The service interface. */ @@ -169,263 +181,263 @@ public class AVMInterpreter } } - /** - * Interpret a single command using the BufferedReader passed in for any data needed. - * @param line The unparsed command - * @param in A Reader to be used for commands that need input data. - * @return The textual output of the command. - */ - public String interpretCommand(String line, BufferedReader in) - { - String[] command = line.split(",\\s+"); - if (command.length == 0) - { - command = new String[1]; - command[0] = line; - } - try - { - ByteArrayOutputStream bout = new ByteArrayOutputStream(); - PrintStream out = new PrintStream(bout); - if (command[0].equals("ls")) - { - if (command.length != 3) - { - return "Syntax Error."; - } - AVMNodeDescriptor desc = fService.lookup(Integer.parseInt(command[2]), - command[1]); - if (desc == null) - { - return "Not Found."; - } - Map listing = - fService.getDirectoryListing(desc, true); - for (String name : listing.keySet()) - { - out.println(name + " " + listing.get(name)); - } - } - else if (command[0].equals("lsr")) - { - if (command.length != 3) - { - return "Syntax Error."; - } - AVMNodeDescriptor desc = fService.lookup(Integer.parseInt(command[2]), - command[1]); - recursiveList(out, desc, 0); - } - else if (command[0].equals("lsrep")) - { - List repos = fService.getStores(); - for (AVMStoreDescriptor repo : repos) - { - out.println(repo); - } - } - else if (command[0].equals("lsver")) - { - if (command.length != 2) - { - return "Syntax Error."; - } - List listing = fService.getStoreVersions(command[1]); - for (VersionDescriptor desc : listing) - { - out.println(desc); - } - } - else if (command[0].equals("mkrep")) - { - if (command.length != 2) - { - return "Syntax Error."; - } - fService.createStore(command[1]); - } - else if (command[0].equals("load")) - { - if (command.length != 3) - { - return "Syntax Error."; - } - fLoader.recursiveLoad(command[1], command[2]); - } - else if (command[0].equals("mkdir")) - { - if (command.length != 3) - { - return "Syntax Error."; - } - fService.createDirectory(command[1], command[2]); - } - else if (command[0].equals("mkbr")) - { - if (command.length != 5) - { - return "Syntax Error."; - } - fService.createBranch(Integer.parseInt(command[4]), command[1], command[2], command[3]); - } - else if (command[0].equals("mkldir")) - { - if (command.length != 4) - { - return "Syntax Error."; - } - fService.createLayeredDirectory(command[1], command[2], command[3]); - } - else if (command[0].equals("rename")) - { - if (command.length != 5) - { - return "Syntax Error."; - } - fService.rename(command[1], command[2], command[3], command[4]); - } - else if (command[0].equals("cp")) - { - if (command.length != 5) - { - return "Syntax Error."; - } - InputStream fin = fService.getFileInputStream(Integer.parseInt(command[2]), command[1]); - OutputStream fout = fService.createFile(command[3], command[4]); - byte [] buff = new byte[8192]; - int read; - while ((read = fin.read(buff)) != -1) - { - fout.write(buff, 0, read); - } - fin.close(); - fout.close(); - } - else if (command[0].equals("retarget")) - { - if (command.length != 3) - { - return "Syntax Error."; - } - fService.retargetLayeredDirectory(command[1], command[2]); - } - else if (command[0].equals("mkprimary")) - { - if (command.length != 2) - { - return "Syntax Error."; - } - fService.makePrimary(command[1]); - } - else if (command[0].equals("mklfile")) - { - if (command.length != 4) - { - return "Syntax Error."; - } - fService.createLayeredFile(command[1], command[2], command[3]); - } - else if (command[0].equals("snap")) - { - if (command.length != 2) - { - return "Syntax Error."; - } - fService.createSnapshot(command[1], null, null); - } - else if (command[0].equals("cat")) - { - if (command.length != 3) - { - return "Syntax Error."; - } - BufferedReader reader = - new BufferedReader( - new InputStreamReader(fService.getFileInputStream(Integer.parseInt(command[2]), - command[1]))); - String l; - while ((l = reader.readLine()) != null) - { - out.println(l); - } - reader.close(); - } - else if (command[0].equals("rm")) - { - if (command.length != 3) - { - return "Syntax Error."; - } - fService.removeNode(command[1], command[2]); - } - else if (command[0].equals("rmrep")) - { - if (command.length != 2) - { - return "Syntax Error."; - } - fService.purgeStore(command[1]); - } - else if (command[0].equals("rmver")) - { - if (command.length != 3) - { - return "Syntax Error."; - } - fService.purgeVersion(Integer.parseInt(command[2]), command[1]); - } - else if (command[0].equals("write")) - { - if (command.length != 2) - { - return "Syntax Error."; - } - PrintStream ps = - new PrintStream(fService.getFileOutputStream(command[1])); - String l; - while (!(l = in.readLine()).equals("")) - { - ps.println(l); - } - ps.close(); - } - else if (command[0].equals("create")) - { - if (command.length != 3) - { - return "Syntax Error."; - } - PrintStream ps = - new PrintStream(fService.createFile(command[1], command[2])); - String l; - while (!(l = in.readLine()).equals("")) - { - ps.println(l); - } - ps.close(); - } - else if (command[0].equals("stat")) - { - if (command.length != 3) - { - return "Syntax Error."; - } - AVMNodeDescriptor desc = fService.lookup(Integer.parseInt(command[2]), command[1]); - out.println(desc); - out.println("Version: " + desc.getVersionID()); - out.println("Owner: " + desc.getOwner()); - out.println("Mod Time: " + new Date(desc.getModDate())); - } - else if (command[0].equals("getnodeproperties")) - { - if (command.length != 3) - { - return "Syntax Error."; - } - final Map properties = fService.getNodeProperties(Integer.parseInt(command[2]), command[1]); - for (final Map.Entry p : properties.entrySet()) - { - out.println(p.getKey() + ": " + p.getValue()); + /** + * Interpret a single command using the BufferedReader passed in for any data needed. + * @param line The unparsed command + * @param in A Reader to be used for commands that need input data. + * @return The textual output of the command. + */ + public String interpretCommand(String line, BufferedReader in) + { + String[] command = line.split(",\\s+"); + if (command.length == 0) + { + command = new String[1]; + command[0] = line; + } + try + { + ByteArrayOutputStream bout = new ByteArrayOutputStream(); + PrintStream out = new PrintStream(bout); + if (command[0].equals("ls")) + { + if (command.length != 3) + { + return "Syntax Error."; + } + AVMNodeDescriptor desc = fService.lookup(Integer.parseInt(command[2]), + command[1]); + if (desc == null) + { + return "Not Found."; + } + Map listing = + fService.getDirectoryListing(desc, true); + for (String name : listing.keySet()) + { + out.println(name + " " + listing.get(name)); + } + } + else if (command[0].equals("lsr")) + { + if (command.length != 3) + { + return "Syntax Error."; + } + AVMNodeDescriptor desc = fService.lookup(Integer.parseInt(command[2]), + command[1]); + recursiveList(out, desc, 0); + } + else if (command[0].equals("lsrep")) + { + List repos = fService.getStores(); + for (AVMStoreDescriptor repo : repos) + { + out.println(repo); + } + } + else if (command[0].equals("lsver")) + { + if (command.length != 2) + { + return "Syntax Error."; + } + List listing = fService.getStoreVersions(command[1]); + for (VersionDescriptor desc : listing) + { + out.println(desc); + } + } + else if (command[0].equals("mkrep")) + { + if (command.length != 2) + { + return "Syntax Error."; + } + fService.createStore(command[1]); + } + else if (command[0].equals("load")) + { + if (command.length != 3) + { + return "Syntax Error."; + } + fLoader.recursiveLoad(command[1], command[2]); + } + else if (command[0].equals("mkdir")) + { + if (command.length != 3) + { + return "Syntax Error."; + } + fService.createDirectory(command[1], command[2]); + } + else if (command[0].equals("mkbr")) + { + if (command.length != 5) + { + return "Syntax Error."; + } + fService.createBranch(Integer.parseInt(command[4]), command[1], command[2], command[3]); + } + else if (command[0].equals("mkldir")) + { + if (command.length != 4) + { + return "Syntax Error."; + } + fService.createLayeredDirectory(command[1], command[2], command[3]); + } + else if (command[0].equals("rename")) + { + if (command.length != 5) + { + return "Syntax Error."; + } + fService.rename(command[1], command[2], command[3], command[4]); + } + else if (command[0].equals("cp")) + { + if (command.length != 5) + { + return "Syntax Error."; + } + InputStream fin = fService.getFileInputStream(Integer.parseInt(command[2]), command[1]); + OutputStream fout = fService.createFile(command[3], command[4]); + byte [] buff = new byte[8192]; + int read; + while ((read = fin.read(buff)) != -1) + { + fout.write(buff, 0, read); + } + fin.close(); + fout.close(); + } + else if (command[0].equals("retarget")) + { + if (command.length != 3) + { + return "Syntax Error."; + } + fService.retargetLayeredDirectory(command[1], command[2]); + } + else if (command[0].equals("mkprimary")) + { + if (command.length != 2) + { + return "Syntax Error."; + } + fService.makePrimary(command[1]); + } + else if (command[0].equals("mklfile")) + { + if (command.length != 4) + { + return "Syntax Error."; + } + fService.createLayeredFile(command[1], command[2], command[3]); + } + else if (command[0].equals("snap")) + { + if (command.length != 2) + { + return "Syntax Error."; + } + fService.createSnapshot(command[1], null, null); + } + else if (command[0].equals("cat")) + { + if (command.length != 3) + { + return "Syntax Error."; + } + BufferedReader reader = + new BufferedReader( + new InputStreamReader(fService.getFileInputStream(Integer.parseInt(command[2]), + command[1]))); + String l; + while ((l = reader.readLine()) != null) + { + out.println(l); + } + reader.close(); + } + else if (command[0].equals("rm")) + { + if (command.length != 3) + { + return "Syntax Error."; + } + fService.removeNode(command[1], command[2]); + } + else if (command[0].equals("rmrep")) + { + if (command.length != 2) + { + return "Syntax Error."; + } + fService.purgeStore(command[1]); + } + else if (command[0].equals("rmver")) + { + if (command.length != 3) + { + return "Syntax Error."; + } + fService.purgeVersion(Integer.parseInt(command[2]), command[1]); + } + else if (command[0].equals("write")) + { + if (command.length != 2) + { + return "Syntax Error."; + } + PrintStream ps = + new PrintStream(fService.getFileOutputStream(command[1])); + String l; + while (!(l = in.readLine()).equals("")) + { + ps.println(l); + } + ps.close(); + } + else if (command[0].equals("create")) + { + if (command.length != 3) + { + return "Syntax Error."; + } + PrintStream ps = + new PrintStream(fService.createFile(command[1], command[2])); + String l; + while (!(l = in.readLine()).equals("")) + { + ps.println(l); + } + ps.close(); + } + else if (command[0].equals("stat")) + { + if (command.length != 3) + { + return "Syntax Error."; + } + AVMNodeDescriptor desc = fService.lookup(Integer.parseInt(command[2]), command[1]); + out.println(desc); + out.println("Version: " + desc.getVersionID()); + out.println("Owner: " + desc.getOwner()); + out.println("Mod Time: " + new Date(desc.getModDate())); + } + else if (command[0].equals("getnodeproperties")) + { + if (command.length != 3) + { + return "Syntax Error."; + } + final Map properties = fService.getNodeProperties(Integer.parseInt(command[2]), command[1]); + for (final Map.Entry p : properties.entrySet()) + { + out.println(p.getKey() + ": " + p.getValue()); } } else if (command[0].equals("descnode")) @@ -602,6 +614,73 @@ public class AVMInterpreter return "Syntax Error."; } fSyncService.flatten(command[1], command[2]); + } + else if (command[0].equals("getnodeaspects")) + { + if (command.length != 3) + { + return "Syntax Error."; + } + + final Set aspects = fService.getAspects(Integer.parseInt(command[2]), command[1]); + for (final QName qn : aspects) + { + out.println(qn.toString()); + } + } + else if (command[0].equals("addnodeaspect")) + { + if (command.length != 3) + { + return "Syntax Error."; + } + + fService.addAspect(command[1], QName.createQName(command[2])); + } + else if (command[0].equals("deletenodeaspect")) + { + if (command.length != 3) + { + return "Syntax Error."; + } + + fService.removeAspect(command[1], QName.createQName(command[2])); + } + else if (command[0].equals("setnodeproperty")) + { + if (command.length < 4) + { + return "Syntax Error."; + } + + QName valueQName = QName.createQName(command[2]); + + String propertyValue = ""; + + // If multiple values are specified then concatenate the values + if (command.length > 4) + { + StringBuffer sb = new StringBuffer(); + for (int i=3; i propValues = new HashSet(elements.length); + for (int i=0; i callback, Serializable key, Object expectedValue) throws Throwable { TransactionService transactionService = serviceRegistry.getTransactionService(); UserTransaction txn = transactionService.getUserTransaction(); @@ -368,7 +406,7 @@ public class CacheTest extends TestCase */ public void testConcurrentAddAgainstAdd()throws Throwable { - RetryingTransactionCallback callback = new RetryingTransactionCallback() + RetryingTransactionCallback callback = new RetryingTransactionCallback() { public Object execute() throws Throwable { @@ -389,7 +427,7 @@ public class CacheTest extends TestCase public void testConcurrentAddAgainstAddSame()throws Throwable { final Object commonValue = "AAA"; - RetryingTransactionCallback callback = new RetryingTransactionCallback() + RetryingTransactionCallback callback = new RetryingTransactionCallback() { public Object execute() throws Throwable { @@ -409,7 +447,7 @@ public class CacheTest extends TestCase */ public void testConcurrentAddAgainstClear()throws Throwable { - RetryingTransactionCallback callback = new RetryingTransactionCallback() + RetryingTransactionCallback callback = new RetryingTransactionCallback() { public Object execute() throws Throwable { @@ -430,7 +468,7 @@ public class CacheTest extends TestCase */ public void testConcurrentUpdateAgainstUpdate()throws Throwable { - RetryingTransactionCallback callback = new RetryingTransactionCallback() + RetryingTransactionCallback callback = new RetryingTransactionCallback() { public Object execute() throws Throwable { @@ -452,7 +490,7 @@ public class CacheTest extends TestCase */ public void testConcurrentUpdateAgainstUpdateNull()throws Throwable { - RetryingTransactionCallback callback = new RetryingTransactionCallback() + RetryingTransactionCallback callback = new RetryingTransactionCallback() { public Object execute() throws Throwable { @@ -474,7 +512,7 @@ public class CacheTest extends TestCase */ public void testConcurrentUpdateNullAgainstUpdate()throws Throwable { - RetryingTransactionCallback callback = new RetryingTransactionCallback() + RetryingTransactionCallback callback = new RetryingTransactionCallback() { public Object execute() throws Throwable { @@ -496,7 +534,7 @@ public class CacheTest extends TestCase */ public void testConcurrentUpdateAgainstRemove()throws Throwable { - RetryingTransactionCallback callback = new RetryingTransactionCallback() + RetryingTransactionCallback callback = new RetryingTransactionCallback() { public Object execute() throws Throwable { @@ -518,7 +556,7 @@ public class CacheTest extends TestCase */ public void testConcurrentUpdateAgainstClear()throws Throwable { - RetryingTransactionCallback callback = new RetryingTransactionCallback() + RetryingTransactionCallback callback = new RetryingTransactionCallback() { public Object execute() throws Throwable { @@ -540,7 +578,7 @@ public class CacheTest extends TestCase */ public void testConcurrentRemoveAgainstUpdate()throws Throwable { - RetryingTransactionCallback callback = new RetryingTransactionCallback() + RetryingTransactionCallback callback = new RetryingTransactionCallback() { public Object execute() throws Throwable { @@ -562,7 +600,7 @@ public class CacheTest extends TestCase */ public void testConcurrentRemoveAgainstRemove()throws Throwable { - RetryingTransactionCallback callback = new RetryingTransactionCallback() + RetryingTransactionCallback callback = new RetryingTransactionCallback() { public Object execute() throws Throwable { @@ -584,7 +622,7 @@ public class CacheTest extends TestCase */ public void testConcurrentRemoveAgainstClear()throws Throwable { - RetryingTransactionCallback callback = new RetryingTransactionCallback() + RetryingTransactionCallback callback = new RetryingTransactionCallback() { public Object execute() throws Throwable { diff --git a/source/java/org/alfresco/repo/cache/TransactionalCache.java b/source/java/org/alfresco/repo/cache/TransactionalCache.java index 5fa0fd03f9..6306066975 100644 --- a/source/java/org/alfresco/repo/cache/TransactionalCache.java +++ b/source/java/org/alfresco/repo/cache/TransactionalCache.java @@ -565,22 +565,14 @@ public class TransactionalCache } /** - * NO-OP + * Merge the transactional caches into the shared cache */ @SuppressWarnings("unchecked") public void beforeCommit(boolean readOnly) - { - } - - /** - * Merge the transactional caches into the shared cache - */ - @SuppressWarnings("unchecked") - public void afterCommit() { if (isDebugEnabled) { - logger.debug("Processing post-commit"); + logger.debug("Processing before-commit"); } TransactionData txnData = getTransactionData(); @@ -634,6 +626,14 @@ public class TransactionalCache } } + /** + * NO-OP + */ + @SuppressWarnings("unchecked") + public void afterCommit() + { + } + /** * Just allow the transactional caches to be thrown away */ diff --git a/source/java/org/alfresco/repo/domain/PropertyValue.java b/source/java/org/alfresco/repo/domain/PropertyValue.java index fe35bdbe2a..aa828c1392 100644 --- a/source/java/org/alfresco/repo/domain/PropertyValue.java +++ b/source/java/org/alfresco/repo/domain/PropertyValue.java @@ -259,6 +259,18 @@ public class PropertyValue implements Cloneable, Serializable @Override protected ValueType getPersistedType(Serializable value) { + if (value instanceof MLText) + { + MLText mlText = (MLText) value; + if (mlText.getDefaultValue() == null) + { + return ValueType.NULL; + } + else if (mlText.size() == 1) + { + return ValueType.STRING; + } + } return ValueType.DB_ATTRIBUTE; } diff --git a/source/java/org/alfresco/repo/domain/PropertyValueTest.java b/source/java/org/alfresco/repo/domain/PropertyValueTest.java index a526f23f87..1ed400039b 100644 --- a/source/java/org/alfresco/repo/domain/PropertyValueTest.java +++ b/source/java/org/alfresco/repo/domain/PropertyValueTest.java @@ -45,8 +45,15 @@ public class PropertyValueTest extends TestCase public void testMLText() { + // single language MLText mlText = new MLText(Locale.FRENCH, "bonjour"); PropertyValue propertyValue = new PropertyValue(DataTypeDefinition.MLTEXT, mlText); + assertNotNull("MLText not persisted as a string", propertyValue.getStringValue()); + + // multiple languages + mlText = new MLText(Locale.GERMAN, "hallo"); + mlText.addValue(Locale.ITALIAN, "ciao"); + propertyValue = new PropertyValue(DataTypeDefinition.MLTEXT, mlText); assertNotNull("MLText not persisted as an attribute", propertyValue.getAttributeValue()); } } diff --git a/source/java/org/alfresco/repo/domain/hibernate/AbstractPermissionsDaoComponentImpl.java b/source/java/org/alfresco/repo/domain/hibernate/AbstractPermissionsDaoComponentImpl.java index d547b1a8ba..46a0ffe791 100644 --- a/source/java/org/alfresco/repo/domain/hibernate/AbstractPermissionsDaoComponentImpl.java +++ b/source/java/org/alfresco/repo/domain/hibernate/AbstractPermissionsDaoComponentImpl.java @@ -245,7 +245,7 @@ public abstract class AbstractPermissionsDaoComponentImpl implements Permissions if (acl == null) { // there isn't an access control list for the node - spoof a null one - SimpleNodePermissionEntry snpe = new SimpleNodePermissionEntry(nodeRef, true, Collections. emptySet()); + SimpleNodePermissionEntry snpe = new SimpleNodePermissionEntry(nodeRef, true, Collections. emptyList()); npe = snpe; } else @@ -280,20 +280,28 @@ public abstract class AbstractPermissionsDaoComponentImpl implements Permissions if (acl == null) { // there isn't an access control list for the node - spoof a null one - SimpleNodePermissionEntry snpe = new SimpleNodePermissionEntry(nodeRef, true, Collections. emptySet()); + SimpleNodePermissionEntry snpe = new SimpleNodePermissionEntry(nodeRef, true, Collections. emptyList()); return snpe; } else { AccessControlList info = aclDaoComponent.getAccessControlList(acl.getId()); - HashSet spes = new HashSet(info.getEntries().size(), 1.0f); + SimpleNodePermissionEntry cached = info.getCachedSimpleNodePermissionEntry(); + if(cached != null) + { + return cached; + } + + ArrayList spes = new ArrayList(info.getEntries().size()); for (AccessControlEntry entry : info.getEntries()) { - SimplePermissionEntry spe = new SimplePermissionEntry(nodeRef, entry.getPermission(), entry.getAuthority(), entry.getAccessStatus()); + SimplePermissionEntry spe = new SimplePermissionEntry(nodeRef, entry.getPermission(), entry.getAuthority(), entry.getAccessStatus(), entry.getPosition()); spes.add(spe); } SimpleNodePermissionEntry snpe = new SimpleNodePermissionEntry(nodeRef, acl.getInherits(), spes); + + info.setCachedSimpleNodePermissionEntry(snpe); return snpe; } } @@ -304,17 +312,17 @@ public abstract class AbstractPermissionsDaoComponentImpl implements Permissions if (acl == null) { // there isn't an access control list for the node - spoof a null one - SimpleNodePermissionEntry snpe = new SimpleNodePermissionEntry(null, true, Collections. emptySet()); + SimpleNodePermissionEntry snpe = new SimpleNodePermissionEntry(null, true, Collections. emptyList()); return snpe; } else { AccessControlList info = aclDaoComponent.getAccessControlList(acl.getId()); - HashSet spes = new HashSet(info.getEntries().size(), 1.0f); + ArrayList spes = new ArrayList(info.getEntries().size()); for (AccessControlEntry entry : info.getEntries()) { - SimplePermissionEntry spe = new SimplePermissionEntry(null, entry.getPermission(), entry.getAuthority(), entry.getAccessStatus()); + SimplePermissionEntry spe = new SimplePermissionEntry(null, entry.getPermission(), entry.getAuthority(), entry.getAccessStatus(), entry.getPosition()); spes.add(spe); } SimpleNodePermissionEntry snpe = new SimpleNodePermissionEntry(null, acl.getInherits(), spes); @@ -376,6 +384,7 @@ public abstract class AbstractPermissionsDaoComponentImpl implements Permissions CreationReport report = getMutableAccessControlList(nodeRef); SimpleAccessControlEntry pattern = new SimpleAccessControlEntry(); pattern.setAuthority(authority); + pattern.setPosition(Integer.valueOf(0)); List changes = aclDaoComponent.deleteAccessControlEntries(report.getCreated().getId(), pattern); getACLDAO(nodeRef).updateChangedAcls(nodeRef, changes); break; @@ -412,6 +421,7 @@ public abstract class AbstractPermissionsDaoComponentImpl implements Permissions SimpleAccessControlEntry pattern = new SimpleAccessControlEntry(); pattern.setAuthority(authority); pattern.setPermission(permission); + pattern.setPosition(Integer.valueOf(0)); List changes = aclDaoComponent.deleteAccessControlEntries(report.getCreated().getId(), pattern); getACLDAO(nodeRef).updateChangedAcls(nodeRef, changes); break; @@ -437,6 +447,7 @@ public abstract class AbstractPermissionsDaoComponentImpl implements Permissions entry.setPermission(permission); entry.setAccessStatus(allow ? AccessStatus.ALLOWED : AccessStatus.DENIED); entry.setAceType(ACEType.ALL); + entry.setPosition(Integer.valueOf(0)); List changes = aclDaoComponent.setAccessControlEntry(report.getCreated().getId(), entry); List all = new ArrayList(changes.size() + report.getChanges().size()); all.addAll(report.getChanges()); @@ -474,6 +485,7 @@ public abstract class AbstractPermissionsDaoComponentImpl implements Permissions entry.setPermission(pe.getPermissionReference()); entry.setAccessStatus(pe.isAllowed() ? AccessStatus.ALLOWED : AccessStatus.DENIED); entry.setAceType(ACEType.ALL); + entry.setPosition(Integer.valueOf(0)); List changes = aclDaoComponent.setAccessControlEntry(report.getCreated().getId(), entry); List all = new ArrayList(changes.size() + report.getChanges().size()); all.addAll(report.getChanges()); @@ -525,6 +537,7 @@ public abstract class AbstractPermissionsDaoComponentImpl implements Permissions SimpleAccessControlEntry pattern = new SimpleAccessControlEntry(); pattern.setAuthority(authority); pattern.setPermission(permission); + pattern.setPosition(Integer.valueOf(0)); aclDaoComponent.deleteAccessControlEntries(acl.getId(), pattern); } @@ -570,6 +583,7 @@ public abstract class AbstractPermissionsDaoComponentImpl implements Permissions acl = getMutableAccessControlList(storeRef); SimpleAccessControlEntry pattern = new SimpleAccessControlEntry(); pattern.setAuthority(authority); + pattern.setPosition(Integer.valueOf(0)); aclDaoComponent.deleteAccessControlEntries(acl.getId(), pattern); } @@ -587,6 +601,7 @@ public abstract class AbstractPermissionsDaoComponentImpl implements Permissions entry.setPermission(permission); entry.setAccessStatus(allow ? AccessStatus.ALLOWED : AccessStatus.DENIED); entry.setAceType(ACEType.ALL); + entry.setPosition(Integer.valueOf(0)); aclDaoComponent.setAccessControlEntry(acl.getId(), entry); } @@ -612,7 +627,7 @@ public abstract class AbstractPermissionsDaoComponentImpl implements Permissions if (acl == null) { // there isn't an access control list for the node - spoof a null one - SimpleNodePermissionEntry snpe = new SimpleNodePermissionEntry(null, true, Collections. emptySet()); + SimpleNodePermissionEntry snpe = new SimpleNodePermissionEntry(null, true, Collections. emptyList()); npe = snpe; } else diff --git a/source/java/org/alfresco/repo/domain/hibernate/AclDaoComponentImpl.java b/source/java/org/alfresco/repo/domain/hibernate/AclDaoComponentImpl.java index 0631c64777..fd1a2ecb40 100644 --- a/source/java/org/alfresco/repo/domain/hibernate/AclDaoComponentImpl.java +++ b/source/java/org/alfresco/repo/domain/hibernate/AclDaoComponentImpl.java @@ -30,6 +30,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.zip.CRC32; @@ -45,6 +46,7 @@ import org.alfresco.repo.domain.Node; import org.alfresco.repo.domain.QNameDAO; import org.alfresco.repo.domain.QNameEntity; import org.alfresco.repo.node.db.hibernate.HibernateNodeDaoServiceImpl; +import org.alfresco.repo.security.permissions.ACEType; import org.alfresco.repo.security.permissions.ACLCopyMode; import org.alfresco.repo.security.permissions.ACLType; import org.alfresco.repo.security.permissions.AccessControlEntry; @@ -63,6 +65,7 @@ import org.alfresco.service.namespace.QName; import org.alfresco.util.GUID; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.hibernate.CacheMode; import org.hibernate.Criteria; import org.hibernate.Query; import org.hibernate.Session; @@ -93,6 +96,8 @@ public class AclDaoComponentImpl extends HibernateDaoSupport implements AclDaoCo static String QUERY_GET_ACES_FOR_ACL = "permission.GetAcesForAcl"; + static String QUERY_LOAD_ACL = "permission.LoadAcl"; + static String QUERY_GET_ACLS_THAT_INHERIT_FROM_THIS_ACL = "permission.GetAclsThatInheritFromThisAcl"; static String QUERY_GET_AVM_NODES_BY_ACL = "permission.FindAvmNodesByACL"; @@ -102,7 +107,7 @@ public class AclDaoComponentImpl extends HibernateDaoSupport implements AclDaoCo static String QUERY_GET_LAYERED_DIRECTORIES = "permission.GetLayeredDirectories"; static String QUERY_GET_LAYERED_FILES = "permission.GetLayeredFiles"; - + static String QUERY_GET_NEW_IN_STORE = "permission.GetNewInStore"; /** Access to QName entities */ @@ -573,24 +578,71 @@ public class AclDaoComponentImpl extends HibernateDaoSupport implements AclDaoCo * @param depth */ @SuppressWarnings("unchecked") - private void removeAcesFromAcl(final Long id, AccessControlEntry exclude, int depth) + private void removeAcesFromAcl(final Long id, final AccessControlEntry exclude, final int depth) { AcePatternMatcher excluder = new AcePatternMatcher(exclude); HibernateCallback callback = new HibernateCallback() { public Object doInHibernate(Session session) { - Query query = session.getNamedQuery(QUERY_GET_ACES_FOR_ACL); - query.setParameter("id", id); - return query.list(); + if (exclude == null) + { + Criteria criteria = session.createCriteria(DbAccessControlListMemberImpl.class, "member"); + criteria.createAlias("accessControlList", "acl"); + criteria.add(Restrictions.eq("acl.id", id)); + criteria.createAlias("accessControlEntry", "ace"); + criteria.createAlias("ace.authority", "authority"); + criteria.createAlias("ace.permission", "permission"); + criteria.setResultTransformer(Criteria.ALIAS_TO_ENTITY_MAP); + return criteria.list(); + } + else + { + Criteria criteria = session.createCriteria(DbAccessControlListMemberImpl.class, "member"); + criteria.createAlias("accessControlList", "acl"); + criteria.add(Restrictions.eq("acl.id", id)); + if ((exclude.getPosition() != null) && exclude.getPosition() >= 0) + { + criteria.add(Restrictions.eq("position", Integer.valueOf(depth))); + } + if ((exclude.getAccessStatus() != null) || (exclude.getAceType() != null) || (exclude.getAuthority() != null) || (exclude.getPermission() != null)) + { + criteria.createAlias("accessControlEntry", "ace"); + if (exclude.getAccessStatus() != null) + { + criteria.add(Restrictions.eq("ace.allowed", exclude.getAccessStatus() == AccessStatus.ALLOWED ? Boolean.TRUE : Boolean.FALSE)); + } + if (exclude.getAceType() != null) + { + criteria.add(Restrictions.eq("ace.applies", Integer.valueOf(exclude.getAceType().getId()))); + } + if (exclude.getAuthority() != null) + { + criteria.createAlias("ace.authority", "authority"); + criteria.add(Restrictions.eq("authority.authority", exclude.getAuthority())); + } + if (exclude.getPermission() != null) + { + criteria.createAlias("ace.permission", "permission"); + criteria.add(Restrictions.eq("permission.name", exclude.getPermission().getName())); + // TODO: Add typeQname + } + } + + criteria.setResultTransformer(Criteria.ALIAS_TO_ENTITY_MAP); + return criteria.list(); + + } } }; - List members = (List) getHibernateTemplate().execute(callback); + + List> results = (List>) getHibernateTemplate().execute(callback); boolean removed = false; - for (DbAccessControlListMember member : members) + for (Map result : results) { - if ((exclude != null) && excluder.matches(member.getAccessControlEntry(), depth, member.getPosition())) + DbAccessControlListMember member = (DbAccessControlListMember) result.get("member"); + if ((exclude != null) && excluder.matches(result, depth)) { getHibernateTemplate().delete(member); removed = true; @@ -1066,34 +1118,42 @@ public class AclDaoComponentImpl extends HibernateDaoSupport implements AclDaoCo { public Object doInHibernate(Session session) { - Query query = session.getNamedQuery(QUERY_GET_ACES_FOR_ACL); + Query query = session.getNamedQuery(QUERY_LOAD_ACL); query.setParameter("id", id); + query.setCacheMode(CacheMode.IGNORE); return query.list(); } }; - List members = (List) getHibernateTemplate().execute(callback); + List results = (List) getHibernateTemplate().execute(callback); - List entries = new ArrayList(); - for (DbAccessControlListMember member : members) + List entries = new ArrayList(results.size()); + for (Object[] result : results) + // for (DbAccessControlListMember member : members) { - SimpleAccessControlEntry entry = new SimpleAccessControlEntry(); - entry.setAccessStatus(member.getAccessControlEntry().isAllowed() ? AccessStatus.ALLOWED : AccessStatus.DENIED); - entry.setAceType(member.getAccessControlEntry().getAceType()); - entry.setAuthority(member.getAccessControlEntry().getAuthority().getAuthority()); - if (member.getAccessControlEntry().getContext() != null) - { - SimpleAccessControlEntryContext context = new SimpleAccessControlEntryContext(); - context.setClassContext(member.getAccessControlEntry().getContext().getClassContext()); - context.setKVPContext(member.getAccessControlEntry().getContext().getKvpContext()); - context.setPropertyContext(member.getAccessControlEntry().getContext().getPropertyContext()); - entry.setContext(context); - } - DbPermission perm = member.getAccessControlEntry().getPermission(); + Boolean aceIsAllowed = (Boolean)result[0]; + Integer aceType = (Integer)result[1]; + String authority = (String)result[2]; + Long permissionId = (Long)result[3]; + Integer position = (Integer)result[4]; + + SimpleAccessControlEntry sacEntry = new SimpleAccessControlEntry(); + sacEntry.setAccessStatus(aceIsAllowed ? AccessStatus.ALLOWED : AccessStatus.DENIED); + sacEntry.setAceType(ACEType.getACETypeFromId(aceType)); + sacEntry.setAuthority(authority); + // if (entry.getContext() != null) + // { + // SimpleAccessControlEntryContext context = new SimpleAccessControlEntryContext(); + // context.setClassContext(entry.getContext().getClassContext()); + // context.setKVPContext(entry.getContext().getKvpContext()); + // context.setPropertyContext(entry.getContext().getPropertyContext()); + // sacEntry.setContext(context); + // } + DbPermission perm = (DbPermission)getSession().get(DbPermissionImpl.class, permissionId); SimplePermissionReference permissionRefernce = SimplePermissionReference.getPermissionReference(perm.getTypeQName().getQName(), perm.getName()); - entry.setPermission(permissionRefernce); - entry.setPosition(member.getPosition()); + sacEntry.setPermission(permissionRefernce); + sacEntry.setPosition(position); - entries.add(entry); + entries.add(sacEntry); } @@ -1714,13 +1774,16 @@ public class AclDaoComponentImpl extends HibernateDaoSupport implements AclDaoCo this.pattern = pattern; } - boolean matches(DbAccessControlEntry entry, int position, int memberPosition) + boolean matches(Map result, int position) { if (pattern == null) { return true; } + DbAccessControlListMember member = (DbAccessControlListMember) result.get("member"); + DbAccessControlEntry entry = (DbAccessControlEntry) result.get("ace"); + if (pattern.getAccessStatus() != null) { if (pattern.getAccessStatus() != (entry.isAllowed() ? AccessStatus.ALLOWED : AccessStatus.DENIED)) @@ -1739,7 +1802,8 @@ public class AclDaoComponentImpl extends HibernateDaoSupport implements AclDaoCo if (pattern.getAuthority() != null) { - if (!pattern.getAuthority().equals(entry.getAuthority().getAuthority())) + DbAuthority authority = (DbAuthority) result.get("authority"); + if (!pattern.getAuthority().equals(authority.getAuthority())) { return false; } @@ -1752,13 +1816,14 @@ public class AclDaoComponentImpl extends HibernateDaoSupport implements AclDaoCo if (pattern.getPermission() != null) { + DbPermission permission = (DbPermission) result.get("permission"); final QName patternQName = pattern.getPermission().getQName(); - if ((patternQName != null) && (!patternQName.equals(entry.getPermission().getTypeQName().getQName()))) + if ((patternQName != null) && (!patternQName.equals(permission.getTypeQName().getQName()))) { return false; } final String patternName = pattern.getPermission().getName(); - if ((patternName != null) && (!patternName.equals(entry.getPermission().getName()))) + if ((patternName != null) && (!patternName.equals(permission.getName()))) { return false; } @@ -1768,14 +1833,14 @@ public class AclDaoComponentImpl extends HibernateDaoSupport implements AclDaoCo { if (pattern.getPosition().intValue() >= 0) { - if (memberPosition != position) + if (member.getPosition() != position) { return false; } } else if (pattern.getPosition().intValue() == -1) { - if (memberPosition <= position) + if (member.getPosition() <= position) { return false; } @@ -2000,7 +2065,7 @@ public class AclDaoComponentImpl extends HibernateDaoSupport implements AclDaoCo throw new AlfrescoRuntimeException("Failed to set TX isolation level", e); } } - + /** * How many nodes are noew in store (approximate) * @return - the number fo new nodes - approximate diff --git a/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml b/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml index 661bd60506..4311d58add 100644 --- a/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml +++ b/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml @@ -58,8 +58,8 @@ class="org.alfresco.repo.domain.hibernate.DbAccessControlListImpl" column="acl_id" foreign-key="fk_alf_n_acl" - lazy="proxy" - fetch="select" + lazy="false" + fetch="join" unique="false" not-null="false" cascade="none" /> @@ -68,7 +68,8 @@ name="properties" table="alf_node_properties" lazy="true" - fetch="select" + fetch="select" + batch-size="128" sort="unsorted" inverse="false" optimistic-lock="true" @@ -92,8 +93,9 @@ + + select + ace.allowed, ace.applies, authority.authority, ace.permission.id, aclmem.position + from + org.alfresco.repo.domain.hibernate.DbAccessControlListMemberImpl as aclmem + join aclmem.accessControlList as acl + join aclmem.accessControlEntry as ace + join ace.authority as authority + where + acl.id = :id + + select acl.id diff --git a/source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java b/source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java index e1c9a0b3a9..52563c123a 100644 --- a/source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java +++ b/source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java @@ -111,6 +111,9 @@ public class SchemaBootstrap extends AbstractLifecycleBean private static final String ERR_SCRIPT_NOT_FOUND = "schema.update.err.script_not_found"; private static final String ERR_STATEMENT_TERMINATOR = "schema.update.err.statement_terminator"; + public static final int DEFAULT_LOCK_RETRY_COUNT = 24; + public static final int DEFAULT_LOCK_RETRY_WAIT_SECONDS = 5; + public static final int DEFAULT_MAX_STRING_LENGTH = 1024; private static volatile int maxStringLength = DEFAULT_MAX_STRING_LENGTH; @@ -147,6 +150,8 @@ public class SchemaBootstrap extends AbstractLifecycleBean private List validateUpdateScriptPatches; private List preUpdateScriptPatches; private List postUpdateScriptPatches; + private int schemaUpdateLockRetryCount = DEFAULT_LOCK_RETRY_COUNT; + private int schemaUpdateLockRetryWaitSeconds = DEFAULT_LOCK_RETRY_WAIT_SECONDS; private int maximumStringLength; private ThreadLocal executedStatementsThreadLocal = new ThreadLocal(); @@ -237,6 +242,27 @@ public class SchemaBootstrap extends AbstractLifecycleBean this.postUpdateScriptPatches = scriptPatches; } + /** + * Set the number times that the DB must be checked for the presence of the table + * indicating that a schema change is in progress. + * + * @param schemaUpdateLockRetryCount the number of times to retry (default 24) + */ + public void setSchemaUpdateLockRetryCount(int schemaUpdateLockRetryCount) + { + this.schemaUpdateLockRetryCount = schemaUpdateLockRetryCount; + } + + /** + * Set the wait time (seconds) between checks for the schema update lock. + * + * @param schemaUpdateLockRetryWaitSeconds the number of seconds between checks (default 5 seconds) + */ + public void setSchemaUpdateLockRetryWaitSeconds(int schemaUpdateLockRetryWaitSeconds) + { + this.schemaUpdateLockRetryWaitSeconds = schemaUpdateLockRetryWaitSeconds; + } + /** * Optionally override the system's default maximum string length. Some databases have * limitations on how long the string_value columns can be while other do not. @@ -480,34 +506,34 @@ public class SchemaBootstrap extends AbstractLifecycleBean } } + private static class LockFailedException extends Exception + { + private static final long serialVersionUID = -6676398230191205456L; + } + + /** * Records that the bootstrap process has started */ private synchronized void setBootstrapStarted(Connection connection) throws Exception { - // We wait a for a minute to give other instances starting against the same database a - // chance to get through this process - for (int i = 0; i < 12; i++) + // Create the marker table + Statement stmt = connection.createStatement(); + try { - // Create the marker table - Statement stmt = connection.createStatement(); - try - { - stmt.executeUpdate("create table alf_bootstrap_lock (charval CHAR(1) NOT NULL)"); - // Success - return; - } - catch (Throwable e) - { - // Table exists - wait a bit - try { this.wait(5000L); } catch (InterruptedException ee) {} - } - finally - { - try { stmt.close(); } catch (Throwable e) {} - } + stmt.executeUpdate("create table alf_bootstrap_lock (charval CHAR(1) NOT NULL)"); + // Success + return; + } + catch (Throwable e) + { + // We throw a well-known exception to be handled by retrying code if required + throw new LockFailedException(); + } + finally + { + try { stmt.close(); } catch (Throwable e) {} } - throw AlfrescoRuntimeException.create(ERR_PREVIOUS_FAILED_BOOTSTRAP); } /** @@ -727,6 +753,15 @@ public class SchemaBootstrap extends AbstractLifecycleBean File scriptFile, String scriptUrl) throws Exception { + StringBuilder executedStatements = executedStatementsThreadLocal.get(); + if (executedStatements == null) + { + // There is no lock at this stage. This process can fall out if the lock can't be applied. + setBootstrapStarted(connection); + executedStatements = new StringBuilder(8094); + executedStatementsThreadLocal.set(executedStatements); + } + if (scriptUrl == null) { LogUtil.info(logger, MSG_EXECUTING_GENERATED_SCRIPT, scriptFile); @@ -818,6 +853,12 @@ public class SchemaBootstrap extends AbstractLifecycleBean */ private void executeStatement(Connection connection, String sql, boolean optional, int line, File file) throws Exception { + StringBuilder executedStatements = executedStatementsThreadLocal.get(); + if (executedStatements == null) + { + throw new IllegalArgumentException("The executedStatementsThreadLocal must be populated"); + } + Statement stmt = connection.createStatement(); try { @@ -826,12 +867,8 @@ public class SchemaBootstrap extends AbstractLifecycleBean LogUtil.debug(logger, MSG_EXECUTING_STATEMENT, sql); } stmt.execute(sql); - // Write the statement to the file, if necessary - StringBuilder executedStatements = executedStatementsThreadLocal.get(); - if (executedStatements != null) - { - executedStatements.append(sql).append(";\n"); - } + // Record the statement + executedStatements.append(sql).append(";\n"); } catch (SQLException e) { @@ -923,13 +960,8 @@ public class SchemaBootstrap extends AbstractLifecycleBean } @Override - protected void onBootstrap(ApplicationEvent event) + protected synchronized void onBootstrap(ApplicationEvent event) { -// System.out.println("\n" + -// "=============================================================================\n" + -// "= WARNING: USE OF THIS BUILD IS LIKELY TO BREAK CURRENT OR FUTURE UPGRADES. =\n" + -// "============================================================================="); -// // do everything in a transaction Session session = getSessionFactory().openSession(); try @@ -949,14 +981,33 @@ public class SchemaBootstrap extends AbstractLifecycleBean cfg.setProperty(Environment.CONNECTION_PROVIDER, SchemaBootstrapConnectionProvider.class.getName()); SchemaBootstrapConnectionProvider.setBootstrapConnection(connection); - // update the schema, if required + // Update the schema, if required. if (updateSchema) { - // Check and record that the bootstrap has started - setBootstrapStarted(connection); + // Retries are required here as the DB lock will be applied lazily upon first statement execution. + // So if the schema is up to date (no statements executed) then the LockFailException cannot be + // thrown. If it is thrown, the the update needs to be rerun as it will probably generate no SQL + // statements the second time around. + boolean updatedSchema = false; + for (int i = 0; i < schemaUpdateLockRetryCount; i++) + { + try + { + updateSchema(cfg, session, connection); + updatedSchema = true; + break; + } + catch (LockFailedException e) + { + try { this.wait(schemaUpdateLockRetryWaitSeconds * 1000L); } catch (InterruptedException ee) {} + } + } - // Allocate buffer for executed statements - executedStatementsThreadLocal.set(new StringBuilder(1024)); + if (!updatedSchema) + { + // The retries were exceeded + throw new AlfrescoRuntimeException(ERR_PREVIOUS_FAILED_BOOTSTRAP); + } boolean create = updateSchema(cfg, session, connection); @@ -970,8 +1021,8 @@ public class SchemaBootstrap extends AbstractLifecycleBean { schemaOutputFile = TempFileProvider.createTempFile("AlfrescoSchemaUpdate-All_Statements-", ".sql"); } - String executedStatements = executedStatementsThreadLocal.get().toString(); - if (executedStatements.length() == 0) + StringBuilder executedStatements = executedStatementsThreadLocal.get(); + if (executedStatements == null) { LogUtil.info(logger, MSG_NO_CHANGES); } @@ -979,7 +1030,8 @@ public class SchemaBootstrap extends AbstractLifecycleBean { FileContentWriter writer = new FileContentWriter(schemaOutputFile); writer.setEncoding("UTF-8"); - writer.putContent(executedStatements); + String executedStatementsStr = executedStatements.toString(); + writer.putContent(executedStatementsStr); LogUtil.info(logger, MSG_ALL_STATEMENTS, schemaOutputFile.getPath()); } @@ -991,8 +1043,11 @@ public class SchemaBootstrap extends AbstractLifecycleBean checkSchemaPatchScripts(cfg, session, connection, postUpdateScriptPatches, false); // check scripts } - // Remove the flag indicating a running bootstrap - setBootstrapCompleted(connection); + if (executedStatements != null) + { + // Remove the flag indicating a running bootstrap + setBootstrapCompleted(connection); + } } else { diff --git a/source/java/org/alfresco/repo/importer/ImporterBootstrap.java b/source/java/org/alfresco/repo/importer/ImporterBootstrap.java index 2d0d074185..f872885d1d 100644 --- a/source/java/org/alfresco/repo/importer/ImporterBootstrap.java +++ b/source/java/org/alfresco/repo/importer/ImporterBootstrap.java @@ -39,13 +39,12 @@ import java.util.Locale; import java.util.Properties; import java.util.ResourceBundle; -import javax.transaction.UserTransaction; - import net.sf.acegisecurity.Authentication; import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.i18n.I18NUtil; import org.alfresco.repo.security.authentication.AuthenticationComponent; +import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.repository.StoreRef; import org.alfresco.service.cmr.view.ImporterBinding; @@ -58,6 +57,7 @@ import org.alfresco.service.namespace.NamespaceService; import org.alfresco.service.namespace.QName; import org.alfresco.service.transaction.TransactionService; import org.alfresco.util.AbstractLifecycleBean; +import org.alfresco.util.PropertyCheck; import org.alfresco.util.TempFileProvider; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -298,161 +298,29 @@ public class ImporterBootstrap extends AbstractLifecycleBean */ public void bootstrap() { - if (transactionService == null) - { - throw new ImporterException("Transaction Service must be provided"); - } - if (namespaceService == null) - { - throw new ImporterException("Namespace Service must be provided"); - } - if (nodeService == null) - { - throw new ImporterException("Node Service must be provided"); - } - if (importerService == null) - { - throw new ImporterException("Importer Service must be provided"); - } - if (storeRef == null) - { - if (logger.isDebugEnabled()) - { - logger.debug("No Store URL - bootstrap import ignored"); - } - return; - } + PropertyCheck.mandatory(this, "transactionService", transactionService); + PropertyCheck.mandatory(this, "namespaceService", namespaceService); + PropertyCheck.mandatory(this, "nodeService", nodeService); + PropertyCheck.mandatory(this, "importerService", importerService); + PropertyCheck.mandatory(this, "storeRef", storeRef); - UserTransaction userTransaction = transactionService.getUserTransaction(); - Authentication authentication = authenticationComponent.getCurrentAuthentication(); - if (authenticationComponent.getCurrentUserName() == null) + Authentication authentication = authenticationComponent.setSystemUserAsCurrentUser(); + + RetryingTransactionCallback doImportCallback = new RetryingTransactionCallback() { - authenticationComponent.setCurrentUser(authenticationComponent.getSystemUserName()); - } - + public Object execute() throws Throwable + { + doImport(); + return null; + } + }; try { - userTransaction.begin(); - - // check the repository exists, create if it doesn't - if (!performBootstrap()) - { - if (logger.isDebugEnabled()) - logger.debug("Store exists - bootstrap ignored: " + storeRef); - } - else if (!allowWrite) - { - // we're in read-only node - logger.warn("Store does not exist, but mode is read-only: " + storeRef); - } - else - { - // create the store if necessary - if (!nodeService.exists(storeRef)) - { - storeRef = nodeService.createStore(storeRef.getProtocol(), storeRef.getIdentifier()); - if (logger.isDebugEnabled()) - logger.debug("Created store: " + storeRef); - } - - // bootstrap the store contents - if (bootstrapViews != null) - { - // add-in any extended views - if (extensionBootstrapViews != null) - { - bootstrapViews.addAll(extensionBootstrapViews); - } - - for (Properties bootstrapView : bootstrapViews) - { - String view = bootstrapView.getProperty(VIEW_LOCATION_VIEW); - if (view == null || view.length() == 0) - { - throw new ImporterException("View file location must be provided"); - } - String encoding = bootstrapView.getProperty(VIEW_ENCODING); - - // Create appropriate view reader - Reader viewReader = null; - ACPImportPackageHandler acpHandler = null; - if (view.endsWith(".acp")) - { - File viewFile = getFile(view); - acpHandler = new ACPImportPackageHandler(viewFile, encoding); - } - else - { - viewReader = getReader(view, encoding); - } - - // Create import location - Location importLocation = new Location(storeRef); - String path = bootstrapView.getProperty(VIEW_PATH_PROPERTY); - if (path != null && path.length() > 0) - { - importLocation.setPath(path); - } - String childAssocType = bootstrapView.getProperty(VIEW_CHILDASSOCTYPE_PROPERTY); - if (childAssocType != null && childAssocType.length() > 0) - { - importLocation.setChildAssocType(QName.createQName(childAssocType, namespaceService)); - } - - // Create import binding - BootstrapBinding binding = new BootstrapBinding(); - binding.setConfiguration(configuration); - binding.setLocation(importLocation); - String messages = bootstrapView.getProperty(VIEW_MESSAGES_PROPERTY); - if (messages != null && messages.length() > 0) - { - Locale bindingLocale = (locale == null) ? I18NUtil.getLocale() : locale; - ResourceBundle bundle = ResourceBundle.getBundle(messages, bindingLocale); - binding.setResourceBundle(bundle); - } - - String uuidBinding = bootstrapView.getProperty(VIEW_UUID_BINDING); - if (uuidBinding != null && uuidBinding.length() > 0) - { - try - { - binding.setUUIDBinding(UUID_BINDING.valueOf(UUID_BINDING.class, uuidBinding)); - } - catch(IllegalArgumentException e) - { - throw new ImporterException("The value " + uuidBinding + " is an invalid uuidBinding"); - } - } - - // Now import... - ImporterProgress importProgress = null; - if (logger.isDebugEnabled()) - { - importProgress = new ImportTimerProgress(logger); - logger.debug("Importing " + view); - } - - if (viewReader != null) - { - importerService.importView(viewReader, importLocation, binding, importProgress); - } - else - { - importerService.importView(acpHandler, importLocation, binding, importProgress); - } - } - } - - // a bootstrap was performed - bootstrapPerformed = !useExistingStore; - } - userTransaction.commit(); + transactionService.getRetryingTransactionHelper().doInTransaction(doImportCallback); } catch(Throwable e) { - // rollback the transaction - try { if (userTransaction != null) {userTransaction.rollback();} } catch (Throwable ex) {} throw new AlfrescoRuntimeException("Bootstrap failed", e); } finally @@ -461,6 +329,125 @@ public class ImporterBootstrap extends AbstractLifecycleBean } } + /** + * Perform the actual import work. This is just separated to allow for simpler TXN demarcation. + */ + private void doImport() throws Throwable + { + // check the repository exists, create if it doesn't + if (!performBootstrap()) + { + if (logger.isDebugEnabled()) + logger.debug("Store exists - bootstrap ignored: " + storeRef); + } + else if (!allowWrite) + { + // we're in read-only node + logger.warn("Store does not exist, but mode is read-only: " + storeRef); + } + else + { + // create the store if necessary + if (!nodeService.exists(storeRef)) + { + storeRef = nodeService.createStore(storeRef.getProtocol(), storeRef.getIdentifier()); + if (logger.isDebugEnabled()) + logger.debug("Created store: " + storeRef); + } + + // bootstrap the store contents + if (bootstrapViews != null) + { + // add-in any extended views + if (extensionBootstrapViews != null) + { + bootstrapViews.addAll(extensionBootstrapViews); + } + + for (Properties bootstrapView : bootstrapViews) + { + String view = bootstrapView.getProperty(VIEW_LOCATION_VIEW); + if (view == null || view.length() == 0) + { + throw new ImporterException("View file location must be provided"); + } + String encoding = bootstrapView.getProperty(VIEW_ENCODING); + + // Create appropriate view reader + Reader viewReader = null; + ACPImportPackageHandler acpHandler = null; + if (view.endsWith(".acp")) + { + File viewFile = getFile(view); + acpHandler = new ACPImportPackageHandler(viewFile, encoding); + } + else + { + viewReader = getReader(view, encoding); + } + + // Create import location + Location importLocation = new Location(storeRef); + String path = bootstrapView.getProperty(VIEW_PATH_PROPERTY); + if (path != null && path.length() > 0) + { + importLocation.setPath(path); + } + String childAssocType = bootstrapView.getProperty(VIEW_CHILDASSOCTYPE_PROPERTY); + if (childAssocType != null && childAssocType.length() > 0) + { + importLocation.setChildAssocType(QName.createQName(childAssocType, namespaceService)); + } + + // Create import binding + BootstrapBinding binding = new BootstrapBinding(); + binding.setConfiguration(configuration); + binding.setLocation(importLocation); + String messages = bootstrapView.getProperty(VIEW_MESSAGES_PROPERTY); + if (messages != null && messages.length() > 0) + { + Locale bindingLocale = (locale == null) ? I18NUtil.getLocale() : locale; + ResourceBundle bundle = ResourceBundle.getBundle(messages, bindingLocale); + binding.setResourceBundle(bundle); + } + + String uuidBinding = bootstrapView.getProperty(VIEW_UUID_BINDING); + if (uuidBinding != null && uuidBinding.length() > 0) + { + try + { + binding.setUUIDBinding(UUID_BINDING.valueOf(UUID_BINDING.class, uuidBinding)); + } + catch(IllegalArgumentException e) + { + throw new ImporterException("The value " + uuidBinding + " is an invalid uuidBinding"); + } + } + + // Now import... + ImporterProgress importProgress = null; + if (logger.isDebugEnabled()) + { + importProgress = new ImportTimerProgress(logger); + logger.debug("Importing " + view); + } + + if (viewReader != null) + { + importerService.importView(viewReader, importLocation, binding, importProgress); + } + else + { + importerService.importView(acpHandler, importLocation, binding, importProgress); + } + } + } + + // a bootstrap was performed + bootstrapPerformed = !useExistingStore; + } + } + /** * Get a Reader onto an XML view * diff --git a/source/java/org/alfresco/repo/jscript/ValueConverter.java b/source/java/org/alfresco/repo/jscript/ValueConverter.java index 1e96cc44c7..c6269ec087 100644 --- a/source/java/org/alfresco/repo/jscript/ValueConverter.java +++ b/source/java/org/alfresco/repo/jscript/ValueConverter.java @@ -208,14 +208,14 @@ public class ValueConverter } else { - // convert JavaScript map to values to a Map of Serializable objects + // convert Scriptable object of values to a Map of Serializable objects Object[] propIds = values.getIds(); Map propValues = new HashMap(propIds.length); for (int i=0; i propValues = new HashMap(propIds.length); + for (int i=0; i properties = nodeService.getProperties(rootNodeRef); + // Set an ML value to null + properties.put(BaseNodeServiceTest.PROP_QNAME_ML_TEXT_VALUE, null); + nodeService.setProperty(rootNodeRef, BaseNodeServiceTest.PROP_QNAME_ML_TEXT_VALUE, null); + // Get them again + Serializable mlTextSer = nodeService.getProperty(rootNodeRef, BaseNodeServiceTest.PROP_QNAME_ML_TEXT_VALUE); + MLText mlText = DefaultTypeConverter.INSTANCE.convert(MLText.class, mlTextSer); + assertNull("Value returned is not null", mlText); + } /** * {@inheritDoc} diff --git a/source/java/org/alfresco/repo/node/NodeRefPropertyMethodInterceptorTest.java b/source/java/org/alfresco/repo/node/NodeRefPropertyMethodInterceptorTest.java index f5f0d216fe..e1d9d923aa 100644 --- a/source/java/org/alfresco/repo/node/NodeRefPropertyMethodInterceptorTest.java +++ b/source/java/org/alfresco/repo/node/NodeRefPropertyMethodInterceptorTest.java @@ -236,21 +236,24 @@ public class NodeRefPropertyMethodInterceptorTest extends BaseSpringTest assertNotNull(nodeService.getProperty(n1, aspectNoderefProp)); assertNotNull(nodeService.getProperty(n1, aspectNoderefsProp)); assertEquals(1, ((Collection) nodeService.getProperty(n1, aspectNoderefsProp)).size()); + assertTrue(((Collection) nodeService.getProperty(n1, aspectNoderefsProp)).iterator().next() instanceof NodeRef); assertNull(nodeService.getProperties(n1).get(aspectCategoryProp)); assertNotNull(nodeService.getProperties(n1).get(aspectCategoriesProp)); assertEquals(0, ((Collection) nodeService.getProperties(n1).get(aspectCategoriesProp)).size()); assertNotNull(nodeService.getProperties(n1).get(aspectNoderefProp)); assertNotNull(nodeService.getProperties(n1).get(aspectNoderefsProp)); assertEquals(1, ((Collection) nodeService.getProperties(n1).get(aspectNoderefsProp)).size()); + assertTrue(((Collection) nodeService.getProperties(n1).get(aspectNoderefsProp)).iterator().next() instanceof NodeRef); // Set valid cat - mlAwareNodeService.setProperty(n1, aspectCategoryProp, cat); + mlAwareNodeService.setProperty(n1, aspectCategoryProp, cat.toString()); mlAwareNodeService.setProperty(n1, aspectCategoriesProp, cat); mlAwareNodeService.setProperty(n1, aspectNoderefProp, cat); mlAwareNodeService.setProperty(n1, aspectNoderefsProp, cat); assertNotNull(mlAwareNodeService.getProperty(n1, aspectCategoryProp)); + assertTrue(nodeService.getProperty(n1, aspectCategoryProp) instanceof NodeRef); assertNotNull(mlAwareNodeService.getProperty(n1, aspectCategoriesProp)); assertEquals(1, ((Collection) mlAwareNodeService.getProperty(n1, aspectCategoriesProp)).size()); assertNotNull(mlAwareNodeService.getProperty(n1, aspectNoderefProp)); @@ -263,17 +266,22 @@ public class NodeRefPropertyMethodInterceptorTest extends BaseSpringTest assertNotNull(mlAwareNodeService.getProperties(n1).get(aspectNoderefsProp)); assertEquals(1, ((Collection) mlAwareNodeService.getProperties(n1).get(aspectNoderefsProp)).size()); assertNotNull(nodeService.getProperty(n1, aspectCategoryProp)); + assertTrue(nodeService.getProperty(n1, aspectCategoryProp) instanceof NodeRef); assertNotNull(nodeService.getProperty(n1, aspectCategoriesProp)); assertEquals(1, ((Collection) nodeService.getProperty(n1, aspectCategoriesProp)).size()); + assertTrue(((Collection) nodeService.getProperty(n1, aspectCategoriesProp)).iterator().next() instanceof NodeRef); assertNotNull(nodeService.getProperty(n1, aspectNoderefProp)); assertNotNull(nodeService.getProperty(n1, aspectNoderefsProp)); assertEquals(1, ((Collection) nodeService.getProperty(n1, aspectNoderefsProp)).size()); + assertTrue(((Collection) nodeService.getProperty(n1, aspectNoderefsProp)).iterator().next() instanceof NodeRef); assertNotNull(nodeService.getProperties(n1).get(aspectCategoryProp)); assertNotNull(nodeService.getProperties(n1).get(aspectCategoriesProp)); assertEquals(1, ((Collection) nodeService.getProperties(n1).get(aspectCategoriesProp)).size()); + assertTrue(((Collection) nodeService.getProperties(n1).get(aspectCategoriesProp)).iterator().next() instanceof NodeRef); assertNotNull(nodeService.getProperties(n1).get(aspectNoderefProp)); assertNotNull(nodeService.getProperties(n1).get(aspectNoderefsProp)); assertEquals(1, ((Collection) nodeService.getProperties(n1).get(aspectNoderefsProp)).size()); + assertTrue(((Collection) nodeService.getProperties(n1).get(aspectNoderefsProp)).iterator().next() instanceof NodeRef); // Set empty list @@ -340,10 +348,12 @@ public class NodeRefPropertyMethodInterceptorTest extends BaseSpringTest assertEquals(0, ((Collection) nodeService.getProperty(n1, aspectCategoriesProp)).size()); assertNotNull(nodeService.getProperty(n1, aspectNoderefsProp)); assertEquals(1, ((Collection) nodeService.getProperty(n1, aspectNoderefsProp)).size()); + assertTrue(((Collection) nodeService.getProperty(n1, aspectNoderefsProp)).iterator().next() instanceof NodeRef); assertNotNull(nodeService.getProperties(n1).get(aspectCategoriesProp)); assertEquals(0, ((Collection) nodeService.getProperties(n1).get(aspectCategoriesProp)).size()); assertNotNull(nodeService.getProperties(n1).get(aspectNoderefsProp)); assertEquals(1, ((Collection) nodeService.getProperties(n1).get(aspectNoderefsProp)).size()); + assertTrue(((Collection) nodeService.getProperties(n1).get(aspectNoderefsProp)).iterator().next() instanceof NodeRef); // set valid cat in list @@ -362,12 +372,16 @@ public class NodeRefPropertyMethodInterceptorTest extends BaseSpringTest assertEquals(1, ((Collection) mlAwareNodeService.getProperties(n1).get(aspectNoderefsProp)).size()); assertNotNull(nodeService.getProperty(n1, aspectCategoriesProp)); assertEquals(1, ((Collection) nodeService.getProperty(n1, aspectCategoriesProp)).size()); + assertTrue( ((Collection) nodeService.getProperty(n1, aspectCategoriesProp)).iterator().next() instanceof NodeRef); assertNotNull(nodeService.getProperty(n1, aspectNoderefsProp)); assertEquals(1, ((Collection) nodeService.getProperty(n1, aspectNoderefsProp)).size()); + assertTrue(((Collection) nodeService.getProperty(n1, aspectNoderefsProp)).iterator().next() instanceof NodeRef); assertNotNull(nodeService.getProperties(n1).get(aspectCategoriesProp)); assertEquals(1, ((Collection) nodeService.getProperties(n1).get(aspectCategoriesProp)).size()); + assertTrue(((Collection) nodeService.getProperties(n1).get(aspectCategoriesProp)).iterator().next() instanceof NodeRef); assertNotNull(nodeService.getProperties(n1).get(aspectNoderefsProp)); assertEquals(1, ((Collection) nodeService.getProperties(n1).get(aspectNoderefsProp)).size()); + assertTrue(((Collection) nodeService.getProperties(n1).get(aspectNoderefsProp)).iterator().next() instanceof NodeRef); // Test list with invalid, noderef and cat @@ -883,13 +897,13 @@ public class NodeRefPropertyMethodInterceptorTest extends BaseSpringTest NodeRef n1 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}one"), ContentModel.TYPE_FOLDER, properties).getChildRef(); - assertNull(mlAwareNodeService.getProperty(n1, aspectCategoryProp)); + assertNotNull(mlAwareNodeService.getProperty(n1, aspectCategoryProp)); assertNotNull(mlAwareNodeService.getProperty(n1, aspectCategoriesProp)); - assertNull(mlAwareNodeService.getProperty(n1, aspectNoderefProp)); + assertNotNull(mlAwareNodeService.getProperty(n1, aspectNoderefProp)); assertNotNull(mlAwareNodeService.getProperty(n1, aspectNoderefsProp)); - assertNull(mlAwareNodeService.getProperties(n1).get(aspectCategoryProp)); + assertNotNull(mlAwareNodeService.getProperties(n1).get(aspectCategoryProp)); assertNotNull(mlAwareNodeService.getProperties(n1).get(aspectCategoriesProp)); - assertNull(mlAwareNodeService.getProperties(n1).get(aspectNoderefProp)); + assertNotNull(mlAwareNodeService.getProperties(n1).get(aspectNoderefProp)); assertNotNull(mlAwareNodeService.getProperties(n1).get(aspectNoderefsProp)); assertNull(nodeService.getProperty(n1, aspectCategoryProp)); assertNotNull(nodeService.getProperty(n1, aspectCategoriesProp)); @@ -914,11 +928,11 @@ public class NodeRefPropertyMethodInterceptorTest extends BaseSpringTest NodeRef n1 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}one"), ContentModel.TYPE_FOLDER, properties).getChildRef(); - assertNull(mlAwareNodeService.getProperty(n1, aspectCategoryProp)); + assertNotNull(mlAwareNodeService.getProperty(n1, aspectCategoryProp)); assertNotNull(mlAwareNodeService.getProperty(n1, aspectCategoriesProp)); assertNotNull(mlAwareNodeService.getProperty(n1, aspectNoderefProp)); assertNotNull(mlAwareNodeService.getProperty(n1, aspectNoderefsProp)); - assertNull(mlAwareNodeService.getProperties(n1).get(aspectCategoryProp)); + assertNotNull(mlAwareNodeService.getProperties(n1).get(aspectCategoryProp)); assertNotNull(mlAwareNodeService.getProperties(n1).get(aspectCategoriesProp)); assertNotNull(mlAwareNodeService.getProperties(n1).get(aspectNoderefProp)); assertNotNull(mlAwareNodeService.getProperties(n1).get(aspectNoderefsProp)); @@ -1009,13 +1023,13 @@ public class NodeRefPropertyMethodInterceptorTest extends BaseSpringTest NodeRef n1 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}one"), ContentModel.TYPE_FOLDER, properties).getChildRef(); assertNotNull(mlAwareNodeService.getProperty(n1, aspectCategoriesProp)); - assertEquals(0, ((Collection) mlAwareNodeService.getProperty(n1, aspectCategoriesProp)).size()); + assertEquals(1, ((Collection) mlAwareNodeService.getProperty(n1, aspectCategoriesProp)).size()); assertNotNull(mlAwareNodeService.getProperty(n1, aspectNoderefsProp)); - assertEquals(0, ((Collection) mlAwareNodeService.getProperty(n1, aspectNoderefsProp)).size()); + assertEquals(1, ((Collection) mlAwareNodeService.getProperty(n1, aspectNoderefsProp)).size()); assertNotNull(mlAwareNodeService.getProperty(n1, aspectCategoriesProp)); - assertEquals(0, ((Collection) mlAwareNodeService.getProperty(n1, aspectCategoriesProp)).size()); + assertEquals(1, ((Collection) mlAwareNodeService.getProperty(n1, aspectCategoriesProp)).size()); assertNotNull(mlAwareNodeService.getProperty(n1, aspectNoderefsProp)); - assertEquals(0, ((Collection) mlAwareNodeService.getProperty(n1, aspectNoderefsProp)).size()); + assertEquals(1, ((Collection) mlAwareNodeService.getProperty(n1, aspectNoderefsProp)).size()); assertNotNull(nodeService.getProperties(n1).get(aspectCategoriesProp)); assertEquals(0, ((Collection) nodeService.getProperties(n1).get(aspectCategoriesProp)).size()); assertNotNull(nodeService.getProperties(n1).get(aspectNoderefsProp)); @@ -1041,11 +1055,11 @@ public class NodeRefPropertyMethodInterceptorTest extends BaseSpringTest NodeRef n1 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}one"), ContentModel.TYPE_FOLDER, properties).getChildRef(); assertNotNull(mlAwareNodeService.getProperty(n1, aspectCategoriesProp)); - assertEquals(0, ((Collection) mlAwareNodeService.getProperty(n1, aspectCategoriesProp)).size()); + assertEquals(1, ((Collection) mlAwareNodeService.getProperty(n1, aspectCategoriesProp)).size()); assertNotNull(mlAwareNodeService.getProperty(n1, aspectNoderefsProp)); assertEquals(1, ((Collection) mlAwareNodeService.getProperty(n1, aspectNoderefsProp)).size()); assertNotNull(mlAwareNodeService.getProperty(n1, aspectCategoriesProp)); - assertEquals(0, ((Collection) mlAwareNodeService.getProperty(n1, aspectCategoriesProp)).size()); + assertEquals(1, ((Collection) mlAwareNodeService.getProperty(n1, aspectCategoriesProp)).size()); assertNotNull(mlAwareNodeService.getProperty(n1, aspectNoderefsProp)); assertEquals(1, ((Collection) mlAwareNodeService.getProperty(n1, aspectNoderefsProp)).size()); assertNotNull(nodeService.getProperties(n1).get(aspectCategoriesProp)); @@ -1107,13 +1121,13 @@ public class NodeRefPropertyMethodInterceptorTest extends BaseSpringTest NodeRef n1 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}one"), ContentModel.TYPE_FOLDER, properties).getChildRef(); assertNotNull(mlAwareNodeService.getProperty(n1, aspectCategoriesProp)); - assertEquals(1, ((Collection) mlAwareNodeService.getProperty(n1, aspectCategoriesProp)).size()); + assertEquals(3, ((Collection) mlAwareNodeService.getProperty(n1, aspectCategoriesProp)).size()); assertNotNull(mlAwareNodeService.getProperty(n1, aspectNoderefsProp)); - assertEquals(2, ((Collection) mlAwareNodeService.getProperty(n1, aspectNoderefsProp)).size()); + assertEquals(3, ((Collection) mlAwareNodeService.getProperty(n1, aspectNoderefsProp)).size()); assertNotNull(mlAwareNodeService.getProperty(n1, aspectCategoriesProp)); - assertEquals(1, ((Collection) mlAwareNodeService.getProperty(n1, aspectCategoriesProp)).size()); + assertEquals(3, ((Collection) mlAwareNodeService.getProperty(n1, aspectCategoriesProp)).size()); assertNotNull(mlAwareNodeService.getProperty(n1, aspectNoderefsProp)); - assertEquals(2, ((Collection) mlAwareNodeService.getProperty(n1, aspectNoderefsProp)).size()); + assertEquals(3, ((Collection) mlAwareNodeService.getProperty(n1, aspectNoderefsProp)).size()); assertNotNull(nodeService.getProperties(n1).get(aspectCategoriesProp)); assertEquals(1, ((Collection) nodeService.getProperties(n1).get(aspectCategoriesProp)).size()); assertNotNull(nodeService.getProperties(n1).get(aspectNoderefsProp)); diff --git a/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java b/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java index b57e5cc4e0..3db5cdc8c7 100644 --- a/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java +++ b/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java @@ -231,10 +231,16 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl try { if (tenantService.isEnabled()) - { - tenantService.checkDomain(storeRef.getIdentifier()); - storeRef = tenantService.getBaseName(storeRef); - } + { + String currentUser = AuthenticationUtil.getCurrentUserName(); + + // MT: return tenant stores only (although for super System return all stores - as used by ConfigurationChecker, IndexRecovery, IndexBackup etc) + if ((currentUser == null) || (! currentUser.equals(AuthenticationUtil.getSystemUserName()))) + { + tenantService.checkDomain(storeRef.getIdentifier()); + storeRef = tenantService.getBaseName(storeRef); + } + } storeRefs.add(storeRef); } diff --git a/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponent.java b/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponent.java index 78e1cf41e3..690c89e2d4 100644 --- a/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponent.java +++ b/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponent.java @@ -26,11 +26,13 @@ package org.alfresco.repo.node.index; import java.util.ArrayList; import java.util.Collections; +import java.util.Date; import java.util.List; import org.alfresco.i18n.I18NUtil; import org.alfresco.model.ContentModel; import org.alfresco.repo.domain.Transaction; +import org.alfresco.repo.node.index.IndexTransactionTracker.IndexTransactionTrackerListener; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.service.cmr.repository.ChildAssociationRef; import org.alfresco.service.cmr.repository.NodeRef; @@ -52,6 +54,9 @@ import org.apache.commons.logging.LogFactory; public class FullIndexRecoveryComponent extends AbstractReindexComponent { private static final String ERR_INDEX_OUT_OF_DATE = "index.recovery.out_of_date"; + private static final String MSG_TRACKING_STARTING = "index.tracking.starting"; + private static final String MSG_TRACKING_COMPLETE = "index.tracking.complete"; + private static final String MSG_TRACKING_PROGRESS = "index.tracking.progress"; private static final String MSG_RECOVERY_STARTING = "index.recovery.starting"; private static final String MSG_RECOVERY_COMPLETE = "index.recovery.complete"; private static final String MSG_RECOVERY_PROGRESS = "index.recovery.progress"; @@ -190,8 +195,7 @@ public class FullIndexRecoveryComponent extends AbstractReindexComponent } else if (!endAllPresent) { - // Trigger the tracker, which will top up the indexes - indexTracker.reindex(); + performPartialRecovery(); } break; case VALIDATE: @@ -215,6 +219,43 @@ public class FullIndexRecoveryComponent extends AbstractReindexComponent } + private void performPartialRecovery() + { + // Log the AUTO recovery + IndexTransactionTrackerListener trackerListener = new IndexTransactionTrackerListener() + { + long lastLogged = 0L; + public void indexedTransactions(long fromTimeInclusive, long toTimeExclusive) + { + long now = System.currentTimeMillis(); + if (now - lastLogged < 10000L) + { + // Don't log more than once a minute + return; + } + lastLogged = now; + // Log it + Date toTimeDate = new Date(toTimeExclusive); + String msgAutoProgress = I18NUtil.getMessage(MSG_TRACKING_PROGRESS, toTimeDate.toString()); + logger.info(msgAutoProgress); + } + }; + try + { + // Register the listener + indexTracker.setListener(trackerListener); + // Trigger the tracker, which will top up the indexes + logger.info(I18NUtil.getMessage(MSG_TRACKING_STARTING)); + indexTracker.reindex(); + logger.info(I18NUtil.getMessage(MSG_TRACKING_COMPLETE)); + } + finally + { + // Remove the listener + indexTracker.setListener(null); + } + } + private static final int MAX_TRANSACTIONS_PER_ITERATION = 1000; private void performFullRecovery() { diff --git a/source/java/org/alfresco/repo/node/index/IndexTransactionTracker.java b/source/java/org/alfresco/repo/node/index/IndexTransactionTracker.java index 80aea03e93..f8879ab43c 100644 --- a/source/java/org/alfresco/repo/node/index/IndexTransactionTracker.java +++ b/source/java/org/alfresco/repo/node/index/IndexTransactionTracker.java @@ -39,6 +39,8 @@ public class IndexTransactionTracker extends AbstractReindexComponent { private static Log logger = LogFactory.getLog(IndexTransactionTracker.class); + private IndexTransactionTrackerListener listener; + private long maxTxnDurationMs; private long reindexLagMs; private int maxRecordSetSize; @@ -68,6 +70,11 @@ public class IndexTransactionTracker extends AbstractReindexComponent voids = new TreeMap(); } + public synchronized void setListener(IndexTransactionTrackerListener listener) + { + this.listener = listener; + } + /** * Set the expected maximum duration of transaction supported. This value is used to adjust the * look-back used to detect transactions that committed. Values must be greater than zero. @@ -149,6 +156,15 @@ public class IndexTransactionTracker extends AbstractReindexComponent // updated by this method. reindexTransactions(txns); + // Call the listener + synchronized (this) + { + if (listener != null) + { + listener.indexedTransactions(fromTimeInclusive, toTimeExclusive); + } + } + // Move the time on. // Note the subtraction here. Yes, it's odd. But the results of the getNextTransactions // may be limited by recordset size and it is possible to have multiple transactions share @@ -420,4 +436,15 @@ found: { private long txnCommitTime; } + + /** + * A callback that can be set to provide logging and other record keeping + * + * @author Derek Hulley + * @since 2.1.4 + */ + public interface IndexTransactionTrackerListener + { + void indexedTransactions(long fromTimeInclusive, long toTimeExclusive); + } } \ No newline at end of file diff --git a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerImpl.java b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerImpl.java index be02a6439a..05826a72ca 100644 --- a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerImpl.java +++ b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerImpl.java @@ -183,6 +183,10 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp try { NodeRef childRef = relationshipRef.getChildRef(); + if(!childRef.getStoreRef().equals(store)) + { + throw new LuceneIndexException("Create node failed - node is not in the required store"); + } // If we have the root node we delete all other root nodes first if ((relationshipRef.getParentRef() == null) && tenantService.getBaseName(childRef).equals(nodeService.getRootNode(childRef.getStoreRef()))) @@ -249,6 +253,10 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp checkAbleToDoWork(IndexUpdateStatus.SYNCRONOUS); try { + if(!nodeRef.getStoreRef().equals(store)) + { + throw new LuceneIndexException("Update node failed - node is not in the required store"); + } reindex(nodeRef, false); } catch (LuceneIndexException e) @@ -267,6 +275,10 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp checkAbleToDoWork(IndexUpdateStatus.SYNCRONOUS); try { + if(!relationshipRef.getChildRef().getStoreRef().equals(store)) + { + throw new LuceneIndexException("Delete node failed - node is not in the required store"); + } // The requires a reindex - a delete may remove too much from under this node - that also lives under // other nodes via secondary associations. All the nodes below require reindex. // This is true if the deleted node is via secondary or primary assoc. @@ -290,6 +302,10 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp { // TODO: Optimise // reindex(relationshipRef.getParentRef()); + if(!relationshipRef.getChildRef().getStoreRef().equals(store)) + { + throw new LuceneIndexException("Create child relationship failed - node is not in the required store"); + } reindex(relationshipRef.getChildRef(), true); } catch (LuceneIndexException e) @@ -309,6 +325,14 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp try { // TODO: Optimise + if(!relationshipBeforeRef.getChildRef().getStoreRef().equals(store)) + { + throw new LuceneIndexException("Update child relationship failed - node is not in the required store"); + } + if(!relationshipAfterRef.getChildRef().getStoreRef().equals(store)) + { + throw new LuceneIndexException("Update child relationship failed - node is not in the required store"); + } if (relationshipBeforeRef.getParentRef() != null) { // reindex(relationshipBeforeRef.getParentRef()); @@ -331,6 +355,10 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp checkAbleToDoWork(IndexUpdateStatus.SYNCRONOUS); try { + if(!relationshipRef.getChildRef().getStoreRef().equals(store)) + { + throw new LuceneIndexException("Delete child relationship failed - node is not in the required store"); + } // TODO: Optimise if (relationshipRef.getParentRef() != null) { @@ -547,8 +575,8 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp paths.addAll(categoryPaths); Document xdoc = new Document(); - xdoc.add(new Field("ID", nodeRef.toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); - xdoc.add(new Field("TX", nodeStatus.getChangeTxnId(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + xdoc.add(new Field("ID", nodeRef.toString(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO)); + xdoc.add(new Field("TX", nodeStatus.getChangeTxnId(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO)); boolean isAtomic = true; for (QName propertyName : properties.keySet()) { @@ -604,9 +632,9 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp qNameBuffer.append(";/"); } qNameBuffer.append(ISO9075.getXPathName(qNameRef.getQName())); - xdoc.add(new Field("PARENT", tenantService.getName(qNameRef.getParentRef()).toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + xdoc.add(new Field("PARENT", tenantService.getName(qNameRef.getParentRef()).toString(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO)); xdoc.add(new Field("ASSOCTYPEQNAME", ISO9075.getXPathName(qNameRef.getTypeQName()), Field.Store.YES, Field.Index.NO, Field.TermVector.NO)); - xdoc.add(new Field("LINKASPECT", (pair.getSecond() == null) ? "" : ISO9075.getXPathName(pair.getSecond()), Field.Store.YES, Field.Index.UN_TOKENIZED, + xdoc.add(new Field("LINKASPECT", (pair.getSecond() == null) ? "" : ISO9075.getXPathName(pair.getSecond()), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO)); } } @@ -625,17 +653,17 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp if (directPaths.contains(pair.getFirst())) { Document directoryEntry = new Document(); - directoryEntry.add(new Field("ID", nodeRef.toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + directoryEntry.add(new Field("ID", nodeRef.toString(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO)); directoryEntry.add(new Field("PATH", pathString, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO)); for (NodeRef parent : getParents(pair.getFirst())) { - directoryEntry.add(new Field("ANCESTOR", tenantService.getName(parent).toString(), Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + directoryEntry.add(new Field("ANCESTOR", tenantService.getName(parent).toString(), Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO)); } - directoryEntry.add(new Field("ISCONTAINER", "T", Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + directoryEntry.add(new Field("ISCONTAINER", "T", Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO)); if (isCategory(getDictionaryService().getType(nodeService.getType(nodeRef)))) { - directoryEntry.add(new Field("ISCATEGORY", "T", Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + directoryEntry.add(new Field("ISCATEGORY", "T", Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO)); } docs.add(directoryEntry); @@ -649,12 +677,12 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp if (isRoot) { // TODO: Does the root element have a QName? - xdoc.add(new Field("ISCONTAINER", "T", Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + xdoc.add(new Field("ISCONTAINER", "T", Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO)); xdoc.add(new Field("PATH", "", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO)); xdoc.add(new Field("QNAME", "", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO)); - xdoc.add(new Field("ISROOT", "T", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + xdoc.add(new Field("ISROOT", "T", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO)); xdoc.add(new Field("PRIMARYASSOCTYPEQNAME", ISO9075.getXPathName(ContentModel.ASSOC_CHILDREN), Field.Store.YES, Field.Index.NO, Field.TermVector.NO)); - xdoc.add(new Field("ISNODE", "T", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + xdoc.add(new Field("ISNODE", "T", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO)); docs.add(xdoc); } @@ -666,31 +694,31 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp // true)); ChildAssociationRef primary = nodeService.getPrimaryParent(nodeRef); - xdoc.add(new Field("PRIMARYPARENT", tenantService.getName(primary.getParentRef()).toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + xdoc.add(new Field("PRIMARYPARENT", tenantService.getName(primary.getParentRef()).toString(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO)); xdoc.add(new Field("PRIMARYASSOCTYPEQNAME", ISO9075.getXPathName(primary.getTypeQName()), Field.Store.YES, Field.Index.NO, Field.TermVector.NO)); QName typeQName = nodeService.getType(nodeRef); - xdoc.add(new Field("TYPE", ISO9075.getXPathName(typeQName), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + xdoc.add(new Field("TYPE", ISO9075.getXPathName(typeQName), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO)); for (QName classRef : nodeService.getAspects(nodeRef)) { - xdoc.add(new Field("ASPECT", ISO9075.getXPathName(classRef), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + xdoc.add(new Field("ASPECT", ISO9075.getXPathName(classRef), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO)); } - xdoc.add(new Field("ISROOT", "F", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); - xdoc.add(new Field("ISNODE", "T", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + xdoc.add(new Field("ISROOT", "F", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO)); + xdoc.add(new Field("ISNODE", "T", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO)); if (isAtomic || indexAllProperties) { - xdoc.add(new Field("FTSSTATUS", "Clean", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + xdoc.add(new Field("FTSSTATUS", "Clean", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO)); } else { if (isNew) { - xdoc.add(new Field("FTSSTATUS", "New", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + xdoc.add(new Field("FTSSTATUS", "New", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO)); } else { - xdoc.add(new Field("FTSSTATUS", "Dirty", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); + xdoc.add(new Field("FTSSTATUS", "Dirty", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO)); } } diff --git a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneTest.java b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneTest.java index 927467bc3d..7a17398264 100644 --- a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneTest.java +++ b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneTest.java @@ -58,6 +58,7 @@ import org.alfresco.repo.search.QueryParameterDefImpl; import org.alfresco.repo.search.QueryRegisterComponent; import org.alfresco.repo.search.impl.lucene.analysis.DateTimeAnalyser; import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer; +import org.alfresco.repo.search.impl.lucene.index.IndexInfo; import org.alfresco.repo.search.results.ChildAssocRefResultSet; import org.alfresco.repo.search.results.DetachedResultSet; import org.alfresco.repo.security.authentication.AuthenticationComponent; @@ -461,6 +462,49 @@ public class ADMLuceneTest extends TestCase super(arg0); } + public void testOverWritetoZeroSize() throws Exception + { + testTX.commit(); + testTX = transactionService.getUserTransaction(); + testTX.begin(); + luceneFTS.pause(); + buildBaseIndex(); + runBaseTests(); + luceneFTS.resume(); + testTX.commit(); + + for (int i = 0; i < 100; i++) + { + testTX = transactionService.getUserTransaction(); + testTX.begin(); + runBaseTests(); + nodeService.setProperty(rootNodeRef, QName.createQName("{namespace}property-A"), "A"); + nodeService.setProperty(n1, QName.createQName("{namespace}property-A"), "A"); + nodeService.setProperty(n2, QName.createQName("{namespace}property-A"), "A"); + nodeService.setProperty(n3, QName.createQName("{namespace}property-A"), "A"); + nodeService.setProperty(n4, QName.createQName("{namespace}property-A"), "A"); + nodeService.setProperty(n5, QName.createQName("{namespace}property-A"), "A"); + nodeService.setProperty(n6, QName.createQName("{namespace}property-A"), "A"); + nodeService.setProperty(n7, QName.createQName("{namespace}property-A"), "A"); + runBaseTests(); + testTX.commit(); + + testTX = transactionService.getUserTransaction(); + testTX.begin(); + runBaseTests(); + nodeService.setProperty(n8, QName.createQName("{namespace}property-A"), "A"); + nodeService.setProperty(n9, QName.createQName("{namespace}property-A"), "A"); + nodeService.setProperty(n10, QName.createQName("{namespace}property-A"), "A"); + nodeService.setProperty(n11, QName.createQName("{namespace}property-A"), "A"); + nodeService.setProperty(n12, QName.createQName("{namespace}property-A"), "A"); + nodeService.setProperty(n13, QName.createQName("{namespace}property-A"), "A"); + nodeService.setProperty(n14, QName.createQName("{namespace}property-A"), "A"); + runBaseTests(); + testTX.commit(); + } + + } + /** * Test bug fix * @@ -1538,7 +1582,7 @@ public class ADMLuceneTest extends TestCase sp7.addStore(rootNodeRef.getStoreRef()); sp7.setLanguage(SearchService.LANGUAGE_LUCENE); sp7.setQuery("PATH:\"//.\""); - sp7.addSort("@" + createdDate, true); + sp7.addSort("@" + createdDate.getPrefixedQName(namespacePrefixResolver), true); results = searcher.query(sp7); Date date = null; @@ -1573,7 +1617,7 @@ public class ADMLuceneTest extends TestCase date = currentBun; } results.close(); - + SearchParameters sp_7 = new SearchParameters(); sp_7.addStore(rootNodeRef.getStoreRef()); sp_7.setLanguage(SearchService.LANGUAGE_LUCENE); @@ -1613,7 +1657,6 @@ public class ADMLuceneTest extends TestCase date = currentBun; } results.close(); - // sort by double @@ -2571,8 +2614,7 @@ public class ADMLuceneTest extends TestCase { String startDate = df.format(new Date(testDate.getTime() - i)); // System.out.println("\tStart = " + startDate); - - + String endDate = df.format(new Date(testDate.getTime() + i)); // System.out.println("\tEnd = " + endDate); diff --git a/source/java/org/alfresco/repo/search/impl/lucene/AbstractLuceneIndexerAndSearcherFactory.java b/source/java/org/alfresco/repo/search/impl/lucene/AbstractLuceneIndexerAndSearcherFactory.java index a48acedc4d..d081bb1ae9 100644 --- a/source/java/org/alfresco/repo/search/impl/lucene/AbstractLuceneIndexerAndSearcherFactory.java +++ b/source/java/org/alfresco/repo/search/impl/lucene/AbstractLuceneIndexerAndSearcherFactory.java @@ -1071,7 +1071,7 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI } /** - * Makes a backup of the source directory via a temporary folder + * Makes a backup of the source directory via a temporary folder. */ private void backupDirectory(File sourceDir, File tempDir, File targetDir) throws Exception { @@ -1111,6 +1111,14 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI } } + /** + * Note files can alter due to background processes so file not found is Ok + * + * @param srcDir + * @param destDir + * @param preserveFileDate + * @throws IOException + */ private void copyDirectory(File srcDir, File destDir, boolean preserveFileDate) throws IOException { if (destDir.exists()) @@ -1125,6 +1133,7 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI } if (preserveFileDate) { + // OL if file not found so does not need to check destDir.setLastModified(srcDir.lastModified()); } } @@ -1134,83 +1143,100 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI } File[] files = srcDir.listFiles(); - if (files == null) + if (files != null) { - throw new IOException(" No Access to " + srcDir); - } - for (int i = 0; i < files.length; i++) - { - File currentCopyTarget = new File(destDir, files[i].getName()); - if (files[i].isDirectory()) + for (int i = 0; i < files.length; i++) { - // Skip any temp index file - if (files[i].getName().equals(tempDir.getName())) + File currentCopyTarget = new File(destDir, files[i].getName()); + if (files[i].isDirectory()) { - // skip any temp back up directories - } - else if (files[i].getName().equals(targetDir.getName())) - { - // skip any back up directories + // Skip any temp index file + if (files[i].getName().equals(tempDir.getName())) + { + // skip any temp back up directories + } + else if (files[i].getName().equals(targetDir.getName())) + { + // skip any back up directories + } + else + { + copyDirectory(files[i], currentCopyTarget, preserveFileDate); + } } else { - copyDirectory(files[i], currentCopyTarget, preserveFileDate); + copyFile(files[i], currentCopyTarget, preserveFileDate); } } - else + } + else + { + if (logger.isDebugEnabled()) { - copyFile(files[i], currentCopyTarget, preserveFileDate); + logger.debug("Skipping transient directory " + srcDir); } } } private void copyFile(File srcFile, File destFile, boolean preserveFileDate) throws IOException { - if (destFile.exists()) - { - throw new IOException("File shoud not exist " + destFile); - } - - FileInputStream input = new FileInputStream(srcFile); try { - FileOutputStream output = new FileOutputStream(destFile); + if (destFile.exists()) + { + throw new IOException("File shoud not exist " + destFile); + } + + FileInputStream input = new FileInputStream(srcFile); try { - copy(input, output); + FileOutputStream output = new FileOutputStream(destFile); + try + { + copy(input, output); + } + finally + { + try + { + output.close(); + } + catch (IOException io) + { + + } + } } finally { try { - output.close(); + input.close(); } catch (IOException io) { } } - } - finally - { - try - { - input.close(); - } - catch (IOException io) - { + // check copy + if (srcFile.length() != destFile.length()) + { + throw new IOException("Failed to copy full from '" + srcFile + "' to '" + destFile + "'"); + } + if (preserveFileDate) + { + destFile.setLastModified(srcFile.lastModified()); } } - - // check copy - if (srcFile.length() != destFile.length()) + catch (FileNotFoundException fnfe) { - throw new IOException("Failed to copy full from '" + srcFile + "' to '" + destFile + "'"); - } - if (preserveFileDate) - { - destFile.setLastModified(srcFile.lastModified()); + // ignore as files can go + if (logger.isDebugEnabled()) + { + logger.debug("Skipping transient file " + srcFile); + } } } @@ -1247,7 +1273,7 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI for (int i = 0; i < files.length; i++) { File file = files[i]; - + if (file.isDirectory()) { deleteDirectory(file); @@ -1392,6 +1418,11 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI { return indexer.doWithWriteLock(lockWork); } + + public boolean canRetry() + { + return false; + } } private static class CoreLockWork implements IndexInfo.LockWork @@ -1430,21 +1461,31 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI } } } + + public boolean canRetry() + { + return false; + } }); } + + public boolean canRetry() + { + return false; + } } public static void main(String[] args) throws IOException { // delete a directory .... - if(args.length != 1) + if (args.length != 1) { return; } File file = new File(args[0]); deleteDirectory(file); } - + public static void deleteDirectory(File directory) throws IOException { if (!directory.exists()) @@ -1465,9 +1506,9 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI for (int i = 0; i < files.length; i++) { File file = files[i]; - + System.out.println("."); - //System.out.println("Deleting "+file.getCanonicalPath()); + // System.out.println("Deleting "+file.getCanonicalPath()); if (file.isDirectory()) { deleteDirectory(file); diff --git a/source/java/org/alfresco/repo/search/impl/lucene/index/IndexInfo.java b/source/java/org/alfresco/repo/search/impl/lucene/index/IndexInfo.java index a7c91bc897..23e5c63a96 100644 --- a/source/java/org/alfresco/repo/search/impl/lucene/index/IndexInfo.java +++ b/source/java/org/alfresco/repo/search/impl/lucene/index/IndexInfo.java @@ -48,6 +48,7 @@ import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Random; import java.util.Set; import java.util.Timer; import java.util.TimerTask; @@ -229,12 +230,16 @@ public class IndexInfo */ private ConcurrentLinkedQueue deleteQueue = new ConcurrentLinkedQueue(); + private ConcurrentLinkedQueue deleteFails = new ConcurrentLinkedQueue(); + /** * A queue of reference counting index readers. We wait for these to become unused (ref count falls to zero) then * the data can be removed. */ private ConcurrentLinkedQueue deletableReaders = new ConcurrentLinkedQueue(); + private ConcurrentLinkedQueue waitingReaders = new ConcurrentLinkedQueue(); + /** * The call that is responsible for deleting old index information from disk. */ @@ -476,6 +481,11 @@ public class IndexInfo } return null; } + + public boolean canRetry() + { + return false; + } }); } finally @@ -577,6 +587,11 @@ public class IndexInfo return null; } + public boolean canRetry() + { + return false; + } + }); } finally @@ -611,7 +626,10 @@ public class IndexInfo private class DeleteUnknownGuidDirectories implements LockWork { - + public boolean canRetry() + { + return true; + } public Object doWork() throws Exception { setStatusFromFile(); @@ -720,11 +738,11 @@ public class IndexInfo { indexEntries.put(id, new IndexEntry(IndexType.DELTA, id, "", TransactionStatus.ACTIVE, "", 0, 0, false)); } + // Downgrade lock + getReadLock(); } finally { - // Downgrade lock - getReadLock(); releaseWriteLock(); } } @@ -857,8 +875,7 @@ public class IndexInfo } /** - * Get the deletions for a given index (there is not check if thery should be applied that is up to the calling - * layer) + * Get the deletions for a given index (there is no check if they should be applied that is up to the calling layer) * * @param id * @return @@ -980,10 +997,10 @@ public class IndexInfo try { mainIndexReader = null; + getReadLock(); } finally { - getReadLock(); releaseWriteLock(); } } @@ -1005,14 +1022,19 @@ public class IndexInfo return null; } + public boolean canRetry() + { + return true; + } + }); mainIndexReader = createMainIndexReader(); } + getReadLock(); } finally { - getReadLock(); releaseWriteLock(); } } @@ -1056,10 +1078,10 @@ public class IndexInfo try { mainIndexReader = null; + getReadLock(); } finally { - getReadLock(); releaseWriteLock(); } } @@ -1080,14 +1102,19 @@ public class IndexInfo return null; } + public boolean canRetry() + { + return true; + } + }); mainIndexReader = createMainIndexReader(); } + getReadLock(); } finally { - getReadLock(); releaseWriteLock(); } } @@ -1162,6 +1189,11 @@ public class IndexInfo return null; } + public boolean canRetry() + { + return true; + } + }); } else @@ -1178,10 +1210,10 @@ public class IndexInfo } dumpInfo(); } + getReadLock(); } finally { - getReadLock(); releaseWriteLock(); } } @@ -1392,12 +1424,26 @@ public class IndexInfo tl.set(buildReferenceCountingIndexReader(id)); } + /** + * This has to be protected to allow for retry + */ public void transition(String id, Set toDelete, Set read) throws IOException { IndexEntry entry = indexEntries.get(id); if (entry == null) { - throw new IndexerException("Unknown transaction " + id); + // We could be retrying - see if the index reader is known or the directory is left + if (referenceCountingReadOnlyIndexReaders.get(id) == null) + { + File location = new File(indexDirectory, id).getCanonicalFile(); + if (!location.exists()) + { + throw new IndexerException("Unknown transaction " + id); + } + } + + clearOldReaders(); + cleaner.schedule(); } if (TransactionStatus.COMMITTED.follows(entry.getStatus())) @@ -1494,7 +1540,18 @@ public class IndexInfo IndexEntry entry = indexEntries.get(id); if (entry == null) { - throw new IndexerException("Unknown transaction " + id); + // We could be retrying - see if the index reader is known or the directory is left + if (referenceCountingReadOnlyIndexReaders.get(id) == null) + { + File location = new File(indexDirectory, id).getCanonicalFile(); + if (!location.exists()) + { + throw new IndexerException("Unknown transaction " + id); + } + } + + clearOldReaders(); + cleaner.schedule(); } if (TransactionStatus.ROLLEDBACK.follows(entry.getStatus())) @@ -1535,7 +1592,18 @@ public class IndexInfo IndexEntry entry = indexEntries.get(id); if (entry == null) { - throw new IndexerException("Unknown transaction " + id); + // We could be retrying - see if the index reader is known or the directory is left + if (referenceCountingReadOnlyIndexReaders.get(id) == null) + { + File location = new File(indexDirectory, id).getCanonicalFile(); + if (!location.exists()) + { + throw new IndexerException("Unknown transaction " + id); + } + } + + clearOldReaders(); + cleaner.schedule(); } if (TransactionStatus.DELETABLE.follows(entry.getStatus())) @@ -2072,6 +2140,8 @@ public class IndexInfo public interface LockWork { public Result doWork() throws Exception; + + public boolean canRetry(); } public R doWithWriteLock(LockWork lockWork) @@ -2087,15 +2157,76 @@ public class IndexInfo } } + private static final int CHANNEL_OPEN_RETRIES = 5; + private R doWithFileLock(LockWork lockWork) + { + try + { + return doWithFileLock(lockWork, CHANNEL_OPEN_RETRIES); + } + catch (Throwable e) + { + // Re-throw the exception + if (e instanceof RuntimeException) + { + throw (RuntimeException) e; + } + else + { + throw new RuntimeException("Error during run with lock.", e); + } + } + } + + /** + * Specific exception to catch channel close issues. + * + * @author Derek Hulley + * @since 2.1.3 + */ + private static class IndexInfoChannelException extends IOException + { + /** + * + */ + private static final long serialVersionUID = 1588898991653057286L; + + public IndexInfoChannelException(String msg) + { + super(msg); + } + } + + /** + * An iterative method that retries the operation in the event of the channel being closed. + * + * @param retriesRemaining + * the number of retries remaining + * @return Returns the lock work result + */ + private R doWithFileLock(LockWork lockWork, int retriesRemaining) throws Throwable { FileLock fileLock = null; R result = null; + long start = 0L; try { + // Check that the channel is open + if (!indexInfoChannel.isOpen()) + { + if (lockWork.canRetry()) + { + throw new IndexInfoChannelException("Channel is closed. Manually triggering reopen attempts"); + } + else + { + reopenChannels(); + } + } + if (indexIsShared) { - long start = 0l; if (s_logger.isDebugEnabled()) { s_logger.debug(" ... waiting for file lock"); @@ -2115,17 +2246,39 @@ public class IndexInfo result = lockWork.doWork(); return result; } - catch (Throwable exception) + catch (IOException e) { - - // Re-throw the exception - if (exception instanceof RuntimeException) + if (!lockWork.canRetry()) { - throw (RuntimeException) exception; + // We've done our best + s_logger.warn("This operation can not retry upon an IOException - it has to roll back to its previous state"); + throw e; + } + if (retriesRemaining == 0) + { + // We've done our best + s_logger.warn("No more channel open retries remaining"); + throw e; } else { - throw new RuntimeException("Error during run with lock.", exception); + // Attempt to reopen the channel + if (s_logger.isDebugEnabled()) + { + s_logger.debug("\n" + "Channel is closed. Will attempt to open it. \n" + " Retries remaining: " + retriesRemaining); + } + try + { + reopenChannels(); + // Loop around and try again + return doWithFileLock(lockWork, --retriesRemaining); + } + catch (Throwable ee) + { + // Report this error, but throw the original + s_logger.error("Channel reopen failed on index info files in: " + this.indexDirectory, ee); + throw e; + } } } finally @@ -2135,18 +2288,52 @@ public class IndexInfo try { fileLock.release(); + long end = System.nanoTime(); if (s_logger.isDebugEnabled()) { - s_logger.debug(" ... released file lock"); + s_logger.debug(" ... released file lock after " + ((end - start) / 10e6f) + " ms"); } } catch (IOException e) { + s_logger.warn("Failed to release file lock: " + e.getMessage(), e); } } } } + /** + * Reopens all the channels. The channels are closed first. This method is synchronized. + */ + private synchronized void reopenChannels() throws Throwable + { + try + { + indexInfoRAF.close(); + } + catch (IOException e) + { + s_logger.warn("Failed to close indexInfoRAF", e); + } + try + { + indexInfoBackupRAF.close(); + } + catch (IOException e) + { + s_logger.warn("Failed to close indexInfoRAF", e); + } + File indexInfoFile = new File(this.indexDirectory, INDEX_INFO); + File indexInfoBackupFile = new File(this.indexDirectory, INDEX_INFO_BACKUP); + + // Open the files and channels for the index info file and the backup + this.indexInfoRAF = openFile(indexInfoFile); + this.indexInfoChannel = this.indexInfoRAF.getChannel(); + + this.indexInfoBackupRAF = openFile(indexInfoBackupFile); + this.indexInfoBackupChannel = this.indexInfoBackupRAF.getChannel(); + } + /** * Helper to print out index information * @@ -2196,13 +2383,16 @@ public class IndexInfo private class Cleaner extends AbstractSchedulable { - public void run() + String getLogName() + { + return "Index cleaner"; + } + + ExitState runImpl() { - // Add any closed index readers we were waiting for - HashSet waiting = new HashSet(); IndexReader reader; - while ((reader = deletableReaders.poll()) != null) + while ((reader = deletableReaders.peek()) != null) { ReferenceCounting refCounting = (ReferenceCounting) reader; if (refCounting.getReferenceCount() == 0) @@ -2229,14 +2419,15 @@ public class IndexInfo } else { - waiting.add(reader); + waitingReaders.add(reader); } + deletableReaders.remove(); } - deletableReaders.addAll(waiting); + deletableReaders.addAll(waitingReaders); + waitingReaders.clear(); String id = null; - HashSet fails = new HashSet(); - while ((id = deleteQueue.poll()) != null) + while ((id = deleteQueue.peek()) != null) { try { @@ -2254,18 +2445,24 @@ public class IndexInfo s_logger.debug("DELETE FAILED"); } // try again later - fails.add(id); + deleteFails.add(id); } + deleteQueue.remove(); } catch (IOException ioe) { s_logger.warn("Failed to delete file - invalid canonical file", ioe); - fails.add(id); + deleteFails.add(id); } } - deleteQueue.addAll(fails); + deleteQueue.addAll(deleteFails); + deleteFails.clear(); + return ExitState.DONE; + } - done(); + ExitState recoverImpl() + { + return ExitState.DONE; } private boolean deleteDirectory(File file) @@ -2314,182 +2511,401 @@ public class IndexInfo NONE, MERGE_INDEX, APPLY_DELTA_DELETION, MERGE_DELTA } + private enum ScheduledState + { + UN_SCHEDULED, SCHEDULED, FAILED, RECOVERY_SCHEDULED + } + + private enum ExitState + { + DONE, RESCHEDULE; + } + private abstract class AbstractSchedulable implements Schedulable, Runnable { - - boolean scheduled = false; + ScheduledState scheduledState = ScheduledState.UN_SCHEDULED; public synchronized void schedule() { - if (!scheduled) + switch (scheduledState) { + case FAILED: + scheduledState = ScheduledState.RECOVERY_SCHEDULED; threadPoolExecutor.execute(this); - scheduled = true; - } - else - { - // already done + break; + case UN_SCHEDULED: + scheduledState = ScheduledState.SCHEDULED; + threadPoolExecutor.execute(this); + break; + case RECOVERY_SCHEDULED: + case SCHEDULED: + default: + // Nothing to do + break; } } - public synchronized void done() + private synchronized void done() { - if (scheduled) - { - scheduled = false; - } - else + switch (scheduledState) { + case RECOVERY_SCHEDULED: + case SCHEDULED: + scheduledState = ScheduledState.UN_SCHEDULED; + break; + case FAILED: + case UN_SCHEDULED: + default: throw new IllegalStateException(); } } - public synchronized void reschedule() + private synchronized void reschedule() { - if (scheduled) + switch (scheduledState) { + case RECOVERY_SCHEDULED: + scheduledState = ScheduledState.SCHEDULED; + case SCHEDULED: threadPoolExecutor.execute(this); - } - else - { + break; + case FAILED: + case UN_SCHEDULED: + default: + throw new IllegalStateException(); + } + } + + private synchronized void rescheduleRecovery() + { + switch (scheduledState) + { + case RECOVERY_SCHEDULED: + threadPoolExecutor.execute(this); + break; + case SCHEDULED: + case FAILED: + case UN_SCHEDULED: + default: throw new IllegalStateException(); } } - } - private class Merger extends AbstractSchedulable - { + private synchronized void fail() + { + switch (scheduledState) + { + case RECOVERY_SCHEDULED: + case SCHEDULED: + scheduledState = ScheduledState.FAILED; + break; + case FAILED: + case UN_SCHEDULED: + default: + throw new IllegalStateException(); + } + } public void run() { - try { - // Get the read local to decide what to do - // Single JVM to start with - MergeAction action = MergeAction.NONE; - - getReadLock(); - try + ExitState reschedule; + switch (scheduledState) { - if (indexIsShared && !checkVersion()) + case RECOVERY_SCHEDULED: + reschedule = recoverImpl(); + s_logger.error(getLogName() + " has recovered - resuming ... "); + if (reschedule == ExitState.RESCHEDULE) { - releaseReadLock(); - getWriteLock(); - try - { - // Sync with disk image if required - doWithFileLock(new LockWork() - { - public Object doWork() throws Exception - { - return null; - } - }); - } - finally - { - try - { - getReadLock(); - } - finally - { - releaseWriteLock(); - } - } + rescheduleRecovery(); + break; } - - int indexes = 0; - boolean mergingIndexes = false; - int deltas = 0; - boolean applyingDeletions = false; - - for (IndexEntry entry : indexEntries.values()) + case SCHEDULED: + reschedule = runImpl(); + if (reschedule == ExitState.RESCHEDULE) { - if (entry.getType() == IndexType.INDEX) - { - indexes++; - if (entry.getStatus() == TransactionStatus.MERGE) - { - mergingIndexes = true; - } - } - else if (entry.getType() == IndexType.DELTA) - { - if (entry.getStatus() == TransactionStatus.COMMITTED) - { - deltas++; - } - if (entry.getStatus() == TransactionStatus.COMMITTED_DELETING) - { - applyingDeletions = true; - } - } + reschedule(); } - - if (s_logger.isDebugEnabled()) + else { - s_logger.debug("Indexes = " + indexes); - s_logger.debug("Merging = " + mergingIndexes); - s_logger.debug("Deltas = " + deltas); - s_logger.debug("Deleting = " + applyingDeletions); + done(); } - - if (!mergingIndexes && !applyingDeletions) - { - - if ((indexes > mergerMergeFactor) || (deltas > mergerTargetOverlays)) - { - if (indexes > deltas) - { - // Try merge - action = MergeAction.MERGE_INDEX; - } - else - { - // Try delete - action = MergeAction.APPLY_DELTA_DELETION; - - } - } - } - } - - catch (IOException e) - { - s_logger.error("Error reading index file", e); - } - finally - { - releaseReadLock(); - } - - if (action == MergeAction.APPLY_DELTA_DELETION) - { - mergeDeletions(); - } - else if (action == MergeAction.MERGE_INDEX) - { - mergeIndexes(); - } - - if (action == MergeAction.NONE) - { - done(); - } - else - { - reschedule(); + break; + case FAILED: + case UN_SCHEDULED: + default: + throw new IllegalStateException(); } } catch (Throwable t) { - s_logger.error("??", t); + try + { + if (s_logger.isWarnEnabled()) + { + s_logger.warn(getLogName() + " failed with ", t); + } + recoverImpl(); + if (s_logger.isWarnEnabled()) + { + s_logger.warn(getLogName() + " recovered from ", t); + } + done(); + } + catch (Throwable rbt) + { + fail(); + s_logger.error(getLogName() + " failed to recover - suspending ", rbt); + } } } - void mergeDeletions() + abstract ExitState runImpl() throws Exception; + + abstract ExitState recoverImpl() throws Exception; + + abstract String getLogName(); + } + + private class Merger extends AbstractSchedulable + { + String getLogName() + { + return "Index merger"; + } + + ExitState runImpl() throws IOException + { + + // Get the read local to decide what to do + // Single JVM to start with + MergeAction action = MergeAction.NONE; + + getReadLock(); + try + { + if (indexIsShared && !checkVersion()) + { + releaseReadLock(); + getWriteLock(); + try + { + // Sync with disk image if required + doWithFileLock(new LockWork() + { + public Object doWork() throws Exception + { + return null; + } + + public boolean canRetry() + { + return true; + } + }); + getReadLock(); + } + finally + { + releaseWriteLock(); + } + } + + int indexes = 0; + boolean mergingIndexes = false; + int deltas = 0; + boolean applyingDeletions = false; + + for (IndexEntry entry : indexEntries.values()) + { + if (entry.getType() == IndexType.INDEX) + { + indexes++; + if ((entry.getStatus() == TransactionStatus.MERGE) || (entry.getStatus() == TransactionStatus.MERGE_TARGET)) + { + mergingIndexes = true; + } + + } + else if (entry.getType() == IndexType.DELTA) + { + if (entry.getStatus() == TransactionStatus.COMMITTED) + { + deltas++; + } + if (entry.getStatus() == TransactionStatus.COMMITTED_DELETING) + { + applyingDeletions = true; + deltas++; + } + } + } + + if (s_logger.isDebugEnabled()) + { + s_logger.debug("Indexes = " + indexes); + s_logger.debug("Merging = " + mergingIndexes); + s_logger.debug("Deltas = " + deltas); + s_logger.debug("Deleting = " + applyingDeletions); + } + + if (!mergingIndexes && !applyingDeletions) + { + + if ((indexes > mergerMergeFactor) || (deltas > mergerTargetOverlays)) + { + if (indexes > deltas) + { + // Try merge + action = MergeAction.MERGE_INDEX; + } + else + { + // Try delete + action = MergeAction.APPLY_DELTA_DELETION; + + } + } + } + } + + catch (IOException e) + { + s_logger.error("Error reading index file", e); + return ExitState.DONE; + } + finally + { + releaseReadLock(); + } + + if (action == MergeAction.APPLY_DELTA_DELETION) + { + mergeDeletions(); + } + else if (action == MergeAction.MERGE_INDEX) + { + mergeIndexes(); + } + + if (action == MergeAction.NONE) + { + return ExitState.DONE; + } + else + { + return ExitState.RESCHEDULE; + } + } + + ExitState recoverImpl() + { + getWriteLock(); + try + { + doWithFileLock(new LockWork() + { + public Object doWork() throws Exception + { + setStatusFromFile(); + + // If the index is not shared we can do some easy clean + // up + if (!indexIsShared) + { + HashSet deletable = new HashSet(); + // clean up + for (IndexEntry entry : indexEntries.values()) + { + switch (entry.getStatus()) + { + // states which can be deleted + // We could check prepared states can be + // committed. + case ACTIVE: + case MARKED_ROLLBACK: + case NO_TRANSACTION: + case PREPARING: + case ROLLEDBACK: + case ROLLINGBACK: + case UNKNOWN: + case PREPARED: + case DELETABLE: + case COMMITTING: + case COMMITTED: + default: + if (s_logger.isInfoEnabled()) + { + s_logger.info("Roll back merge: leaving index entry " + entry); + } + break; + // States which are in mid-transition which we + // can roll back to the committed state + case COMMITTED_DELETING: + case MERGE: + if (s_logger.isInfoEnabled()) + { + s_logger.info("Roll back merge: Resetting merge and committed_deleting to committed " + entry); + } + entry.setStatus(TransactionStatus.COMMITTED); + break; + case MERGE_TARGET: + if (s_logger.isInfoEnabled()) + { + s_logger.info("Roll back merge: Deleting merge target " + entry); + } + entry.setStatus(TransactionStatus.DELETABLE); + deletable.add(entry.getName()); + break; + } + + // Check we have a reader registered + if (referenceCountingReadOnlyIndexReaders.get(entry.getName()) == null) + { + registerReferenceCountingIndexReader(entry.getName(), buildReferenceCountingIndexReader(entry.getName())); + } + } + + if (mainIndexReader != null) + { + ReferenceCounting rcMain = (ReferenceCounting) mainIndexReader; + if (rcMain.isInvalidForReuse()) + { + mainIndexReader = null; + } + } + + // Delete entries that are not required + for (String id : deletable) + { + indexEntries.remove(id); + } + clearOldReaders(); + + cleaner.schedule(); + + // persist the new state + writeStatus(); + } + return null; + } + + public boolean canRetry() + { + return false; + } + + }); + } + finally + { + releaseWriteLock(); + } + return ExitState.DONE; + } + + void mergeDeletions() throws IOException { if (s_logger.isDebugEnabled()) { @@ -2498,6 +2914,7 @@ public class IndexInfo // lock for deletions final LinkedHashMap toDelete; + LinkedHashMap indexes; getWriteLock(); try @@ -2514,13 +2931,17 @@ public class IndexInfo { return set; } + if ((entry.getType() == IndexType.INDEX) && (entry.getStatus() == TransactionStatus.MERGE_TARGET)) + { + return set; + } if ((entry.getType() == IndexType.DELTA) && (entry.getStatus() == TransactionStatus.COMMITTED_DELETING)) { return set; } } // Check it is not deleting - for (IndexEntry entry : indexEntries.values()) + BREAK: for (IndexEntry entry : indexEntries.values()) { // skip indexes at the start if (entry.getType() == IndexType.DELTA) @@ -2534,7 +2955,7 @@ public class IndexInfo { // If not committed we stop as we can not // span non committed. - break; + break BREAK; } } } @@ -2546,22 +2967,27 @@ public class IndexInfo } + public boolean canRetry() + { + return false; + } + }); + getReadLock(); } finally { - getReadLock(); releaseWriteLock(); } - LinkedHashMap indexes = new LinkedHashMap(); try { - for (IndexEntry entry : indexEntries.values()) + indexes = new LinkedHashMap(); + BREAK: for (IndexEntry entry : indexEntries.values()) { if (entry.getStatus() == TransactionStatus.COMMITTED_DELETING) { - break; + break BREAK; } indexes.put(entry.getName(), entry); } @@ -2571,147 +2997,133 @@ public class IndexInfo releaseReadLock(); } + if (toDelete.size() == 0) + { + return; + } // Build readers - boolean fail = false; - final HashSet invalidIndexes = new HashSet(); final HashMap newIndexCounts = new HashMap(); - try + LinkedHashMap readers = new LinkedHashMap(); + for (IndexEntry entry : indexes.values()) { - LinkedHashMap readers = new LinkedHashMap(); - for (IndexEntry entry : indexes.values()) + File location = new File(indexDirectory, entry.getName()).getCanonicalFile(); + IndexReader reader; + if (IndexReader.indexExists(location)) { - File location = new File(indexDirectory, entry.getName()).getCanonicalFile(); - IndexReader reader; - if (IndexReader.indexExists(location)) - { - reader = IndexReader.open(location); - } - else - { - reader = IndexReader.open(emptyIndex); - } - readers.put(entry.getName(), reader); + reader = IndexReader.open(location); } - - for (IndexEntry currentDelete : toDelete.values()) + else { - Set deletions = getDeletions(currentDelete.getName()); - for (String key : readers.keySet()) + reader = IndexReader.open(emptyIndex); + } + readers.put(entry.getName(), reader); + } + + for (IndexEntry currentDelete : toDelete.values()) + { + Set deletions = getDeletions(currentDelete.getName()); + for (String key : readers.keySet()) + { + IndexReader reader = readers.get(key); + for (String stringRef : deletions) { - IndexReader reader = readers.get(key); - for (String stringRef : deletions) + if (currentDelete.isDeletOnlyNodes()) { - if (currentDelete.isDeletOnlyNodes()) - { - Searcher searcher = new IndexSearcher(reader); + Searcher searcher = new IndexSearcher(reader); - TermQuery query = new TermQuery(new Term("ID", stringRef)); - Hits hits = searcher.search(query); - if (hits.length() > 0) + TermQuery query = new TermQuery(new Term("ID", stringRef)); + Hits hits = searcher.search(query); + if (hits.length() > 0) + { + for (int i = 0; i < hits.length(); i++) { - for (int i = 0; i < hits.length(); i++) + Document doc = hits.doc(i); + if (doc.getField("ISCONTAINER") == null) { - Document doc = hits.doc(i); - if (doc.getField("ISCONTAINER") == null) - { - reader.deleteDocument(hits.id(i)); - invalidIndexes.add(key); - // There should only be one thing to - // delete - // break; - } + reader.deleteDocument(hits.id(i)); + invalidIndexes.add(key); + // There should only be one thing to + // delete + // break; } } - searcher.close(); - } - else + searcher.close(); + + } + else + { + int deletedCount = 0; + try { - int deletedCount = 0; - try + deletedCount = reader.deleteDocuments(new Term("ID", stringRef)); + } + catch (IOException ioe) + { + if (s_logger.isDebugEnabled()) { - deletedCount = reader.deleteDocuments(new Term("ID", stringRef)); + s_logger.debug("IO Error for " + key); + throw ioe; } - catch (IOException ioe) + } + if (deletedCount > 0) + { + if (s_logger.isDebugEnabled()) { - if (s_logger.isDebugEnabled()) - { - s_logger.debug("IO Error for " + key); - throw ioe; - } - } - if (deletedCount > 0) - { - if (s_logger.isDebugEnabled()) - { - s_logger.debug("Deleted " + deletedCount + " from " + key + " for id " + stringRef + " remaining docs " + reader.numDocs()); - } - invalidIndexes.add(key); + s_logger.debug("Deleted " + deletedCount + " from " + key + " for id " + stringRef + " remaining docs " + reader.numDocs()); } + invalidIndexes.add(key); } } + } - } - File location = new File(indexDirectory, currentDelete.getName()).getCanonicalFile(); - IndexReader reader; - if (IndexReader.indexExists(location)) - { - reader = IndexReader.open(location); - } - else - { - reader = IndexReader.open(emptyIndex); - } - readers.put(currentDelete.getName(), reader); } + File location = new File(indexDirectory, currentDelete.getName()).getCanonicalFile(); + IndexReader reader; + if (IndexReader.indexExists(location)) + { + reader = IndexReader.open(location); + } + else + { + reader = IndexReader.open(emptyIndex); + } + readers.put(currentDelete.getName(), reader); + } - // Close all readers holding the write lock - so no one tries to - // read - getWriteLock(); - try + // Close all readers holding the write lock - so no one tries to + // read + getWriteLock(); + try + { + for (String key : readers.keySet()) { - for (String key : readers.keySet()) - { - IndexReader reader = readers.get(key); - // TODO:Set the new document count - newIndexCounts.put(key, new Long(reader.numDocs())); - reader.close(); - } - } - finally - { - releaseWriteLock(); + IndexReader reader = readers.get(key); + // TODO:Set the new document count + newIndexCounts.put(key, new Long(reader.numDocs())); + reader.close(); } } - catch (IOException e) + finally { - s_logger.error("Failed to merge deletions", e); - fail = true; + releaseWriteLock(); } // Prebuild all readers for affected indexes // Register them in the commit. final HashMap newReaders = new HashMap(); - try + + for (String id : invalidIndexes) { - for (String id : invalidIndexes) - { - IndexReader reader = buildReferenceCountingIndexReader(id); - newReaders.put(id, reader); - } - } - catch (IOException ioe) - { - s_logger.error("Failed build new readers", ioe); - fail = true; + IndexReader reader = buildReferenceCountingIndexReader(id); + newReaders.put(id, reader); } - final boolean wasDeleted = !fail; getWriteLock(); try { @@ -2722,12 +3134,8 @@ public class IndexInfo for (IndexEntry entry : toDelete.values()) { entry.setStatus(TransactionStatus.COMMITTED); - if (wasDeleted) - { - entry.setType(IndexType.INDEX); - entry.setDeletions(0); - } - + entry.setType(IndexType.INDEX); + entry.setDeletions(0); } for (String key : newIndexCounts.keySet()) @@ -2791,18 +3199,20 @@ public class IndexInfo return null; } + public boolean canRetry() + { + return false; + } + }); } finally { releaseWriteLock(); } - - // TODO: Flush readers etc - } - void mergeIndexes() + void mergeIndexes() throws IOException { if (s_logger.isDebugEnabled()) @@ -2827,6 +3237,10 @@ public class IndexInfo { return set; } + if ((entry.getType() == IndexType.INDEX) && (entry.getStatus() == TransactionStatus.MERGE_TARGET)) + { + return set; + } if ((entry.getType() == IndexType.DELTA) && (entry.getStatus() == TransactionStatus.COMMITTED_DELETING)) { return set; @@ -2881,6 +3295,11 @@ public class IndexInfo } + public boolean canRetry() + { + return false; + } + }); } finally @@ -2893,95 +3312,90 @@ public class IndexInfo s_logger.debug("....Merging..." + (toMerge.size() - 1)); } - boolean fail = false; + if (toMerge.size() == 0) + { + return; + } String mergeTargetId = null; - try + if (toMerge.size() > 0) { - if (toMerge.size() > 0) + int count = 0; + IndexReader[] readers = new IndexReader[toMerge.size() - 1]; + RAMDirectory ramDirectory = null; + IndexWriter writer = null; + long docCount = 0; + File outputLocation = null; + for (IndexEntry entry : toMerge.values()) { - int count = 0; - IndexReader[] readers = new IndexReader[toMerge.size() - 1]; - RAMDirectory ramDirectory = null; - IndexWriter writer = null; - long docCount = 0; - File outputLocation = null; - for (IndexEntry entry : toMerge.values()) + File location = new File(indexDirectory, entry.getName()).getCanonicalFile(); + if (entry.getStatus() == TransactionStatus.MERGE) { - File location = new File(indexDirectory, entry.getName()).getCanonicalFile(); - if (entry.getStatus() == TransactionStatus.MERGE) + IndexReader reader; + if (IndexReader.indexExists(location)) { - IndexReader reader; - if (IndexReader.indexExists(location)) - { - reader = IndexReader.open(location); - } - else - { - s_logger.error("Index is missing " + entry.getName()); - reader = IndexReader.open(emptyIndex); - } - readers[count++] = reader; - docCount += entry.getDocumentCount(); + reader = IndexReader.open(location); } - else if (entry.getStatus() == TransactionStatus.MERGE_TARGET) + else { - mergeTargetId = entry.getName(); - outputLocation = location; - if (docCount < maxDocsForInMemoryMerge) - { - ramDirectory = new RAMDirectory(); - writer = new IndexWriter(ramDirectory, new AlfrescoStandardAnalyser(), true); - } - else - { - writer = new IndexWriter(location, new AlfrescoStandardAnalyser(), true); - - } - writer.setUseCompoundFile(mergerUseCompoundFile); - writer.setMaxBufferedDocs(mergerMinMergeDocs); - writer.setMergeFactor(mergerMergeFactor); - writer.setMaxMergeDocs(mergerMaxMergeDocs); - writer.setWriteLockTimeout(writeLockTimeout); + s_logger.error("Index is missing " + entry.getName()); + reader = IndexReader.open(emptyIndex); } + readers[count++] = reader; + docCount += entry.getDocumentCount(); } - writer.addIndexes(readers); - writer.close(); - - if (ramDirectory != null) + else if (entry.getStatus() == TransactionStatus.MERGE_TARGET) { - String[] files = ramDirectory.list(); - Directory directory = FSDirectory.getDirectory(outputLocation, true); - for (int i = 0; i < files.length; i++) + mergeTargetId = entry.getName(); + outputLocation = location; + if (docCount < maxDocsForInMemoryMerge) { - // make place on ram disk - IndexOutput os = directory.createOutput(files[i]); - // read current file - IndexInput is = ramDirectory.openInput(files[i]); - // and copy to ram disk - int len = (int) is.length(); - byte[] buf = new byte[len]; - is.readBytes(buf, 0, len); - os.writeBytes(buf, len); - // graceful cleanup - is.close(); - os.close(); + ramDirectory = new RAMDirectory(); + writer = new IndexWriter(ramDirectory, new AlfrescoStandardAnalyser(), true); } - ramDirectory.close(); - directory.close(); - } + else + { + writer = new IndexWriter(location, new AlfrescoStandardAnalyser(), true); - for (IndexReader reader : readers) - { - reader.close(); + } + writer.setUseCompoundFile(mergerUseCompoundFile); + writer.setMaxBufferedDocs(mergerMinMergeDocs); + writer.setMergeFactor(mergerMergeFactor); + writer.setMaxMergeDocs(mergerMaxMergeDocs); + writer.setWriteLockTimeout(writeLockTimeout); } } - } - catch (Throwable e) - { - s_logger.error("Failed to merge indexes", e); - fail = true; + writer.addIndexes(readers); + writer.close(); + + if (ramDirectory != null) + { + String[] files = ramDirectory.list(); + Directory directory = FSDirectory.getDirectory(outputLocation, true); + for (int i = 0; i < files.length; i++) + { + // make place on ram disk + IndexOutput os = directory.createOutput(files[i]); + // read current file + IndexInput is = ramDirectory.openInput(files[i]); + // and copy to ram disk + int len = (int) is.length(); + byte[] buf = new byte[len]; + is.readBytes(buf, 0, len); + os.writeBytes(buf, len); + // graceful cleanup + is.close(); + os.close(); + } + ramDirectory.close(); + directory.close(); + } + + for (IndexReader reader : readers) + { + reader.close(); + } } final String finalMergeTargetId = mergeTargetId; @@ -2989,15 +3403,7 @@ public class IndexInfo getReadLock(); try { - try - { - newReader = buildReferenceCountingIndexReader(mergeTargetId); - } - catch (IOException e) - { - s_logger.error("Failed to open reader for merge target", e); - fail = true; - } + newReader = buildReferenceCountingIndexReader(mergeTargetId); } finally { @@ -3005,7 +3411,7 @@ public class IndexInfo } final IndexReader finalNewReader = newReader; - final boolean wasMerged = !fail; + getWriteLock(); try { @@ -3018,41 +3424,21 @@ public class IndexInfo { if (entry.getStatus() == TransactionStatus.MERGE) { - if (wasMerged) + if (s_logger.isDebugEnabled()) { - if (s_logger.isDebugEnabled()) - { - s_logger.debug("... deleting as merged " + entry.getName()); - } - toDelete.add(entry.getName()); - } - else - { - if (s_logger.isDebugEnabled()) - { - s_logger.debug("... committing as merge failed " + entry.getName()); - } - entry.setStatus(TransactionStatus.COMMITTED); + s_logger.debug("... deleting as merged " + entry.getName()); } + toDelete.add(entry.getName()); } else if (entry.getStatus() == TransactionStatus.MERGE_TARGET) { - if (wasMerged) + + if (s_logger.isDebugEnabled()) { - if (s_logger.isDebugEnabled()) - { - s_logger.debug("... committing merge target " + entry.getName()); - } - entry.setStatus(TransactionStatus.COMMITTED); - } - else - { - if (s_logger.isDebugEnabled()) - { - s_logger.debug("... deleting merge target as merge failed " + entry.getName()); - } - toDelete.add(entry.getName()); + s_logger.debug("... committing merge target " + entry.getName()); } + entry.setStatus(TransactionStatus.COMMITTED); + } } for (String id : toDelete) @@ -3073,6 +3459,10 @@ public class IndexInfo return null; } + public boolean canRetry() + { + return false; + } }); } finally @@ -3350,11 +3740,6 @@ public class IndexInfo interface Schedulable { void schedule(); - - public void done(); - - public void reschedule(); - } } diff --git a/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCounting.java b/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCounting.java index 1a4973277a..2b8efd573c 100644 --- a/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCounting.java +++ b/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCounting.java @@ -36,5 +36,7 @@ public interface ReferenceCounting public void setInvalidForReuse() throws IOException; + public boolean isInvalidForReuse(); + public String getId(); } \ No newline at end of file diff --git a/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCountingReadOnlyIndexReaderFactory.java b/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCountingReadOnlyIndexReaderFactory.java index c8e04d3428..744f715019 100644 --- a/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCountingReadOnlyIndexReaderFactory.java +++ b/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCountingReadOnlyIndexReaderFactory.java @@ -202,5 +202,10 @@ public class ReferenceCountingReadOnlyIndexReaderFactory { return id; } + + public boolean isInvalidForReuse() + { + return invalidForReuse; + } } } diff --git a/source/java/org/alfresco/repo/search/impl/lucene/index/TransactionStatus.java b/source/java/org/alfresco/repo/search/impl/lucene/index/TransactionStatus.java index ca4f59ad99..ea5a7e89d0 100644 --- a/source/java/org/alfresco/repo/search/impl/lucene/index/TransactionStatus.java +++ b/source/java/org/alfresco/repo/search/impl/lucene/index/TransactionStatus.java @@ -46,7 +46,7 @@ public enum TransactionStatus public boolean follows(TransactionStatus previous) { - return previous == null; + return (previous == null) || (previous == ACTIVE); } public int getStatus() @@ -77,7 +77,7 @@ public enum TransactionStatus public boolean follows(TransactionStatus previous) { - return previous.allowsRollbackOrMark(previous); + return previous.allowsRollbackOrMark(previous) || (previous == MARKED_ROLLBACK); } public int getStatus() @@ -108,7 +108,7 @@ public enum TransactionStatus public boolean follows(TransactionStatus previous) { - return previous == TransactionStatus.PREPARING; + return (previous == TransactionStatus.PREPARING) || (previous == PREPARED); } public int getStatus() @@ -139,7 +139,7 @@ public enum TransactionStatus public boolean follows(TransactionStatus previous) { - return previous == TransactionStatus.COMMITTING; + return (previous == TransactionStatus.COMMITTING) || (previous == COMMITTED); } public int getStatus() @@ -170,7 +170,7 @@ public enum TransactionStatus public boolean follows(TransactionStatus previous) { - return previous == TransactionStatus.ROLLINGBACK; + return (previous == TransactionStatus.ROLLINGBACK) || (previous == ROLLEDBACK); } public int getStatus() @@ -201,7 +201,7 @@ public enum TransactionStatus public boolean follows(TransactionStatus previous) { - return false; + return (previous == UNKNOWN); } public int getStatus() @@ -232,7 +232,7 @@ public enum TransactionStatus public boolean follows(TransactionStatus previous) { - return false; + return (previous == NO_TRANSACTION); } public int getStatus() @@ -263,7 +263,7 @@ public enum TransactionStatus public boolean follows(TransactionStatus previous) { - return previous == TransactionStatus.ACTIVE; + return (previous == TransactionStatus.ACTIVE) || (previous == PREPARING); } public int getStatus() @@ -294,7 +294,7 @@ public enum TransactionStatus public boolean follows(TransactionStatus previous) { - return previous == TransactionStatus.PREPARED; + return (previous == TransactionStatus.PREPARED) || (previous == COMMITTING); } public int getStatus() @@ -325,7 +325,7 @@ public enum TransactionStatus public boolean follows(TransactionStatus previous) { - return previous.allowsRollbackOrMark(previous); + return previous.allowsRollbackOrMark(previous) || (previous == ROLLINGBACK); } public int getStatus() @@ -356,7 +356,7 @@ public enum TransactionStatus public boolean follows(TransactionStatus previous) { - return false; + return (previous == MERGE); } public int getStatus() @@ -387,7 +387,7 @@ public enum TransactionStatus public boolean follows(TransactionStatus previous) { - return false; + return (previous == MERGE_TARGET); } public int getStatus() @@ -419,7 +419,7 @@ public enum TransactionStatus public boolean follows(TransactionStatus previous) { - return false; + return (previous == COMMITTED_DELETING); } public int getStatus() diff --git a/source/java/org/alfresco/repo/security/authentication/AbstractAuthenticationComponent.java b/source/java/org/alfresco/repo/security/authentication/AbstractAuthenticationComponent.java index 874fa41052..dcc6e64848 100644 --- a/source/java/org/alfresco/repo/security/authentication/AbstractAuthenticationComponent.java +++ b/source/java/org/alfresco/repo/security/authentication/AbstractAuthenticationComponent.java @@ -144,6 +144,18 @@ public abstract class AbstractAuthenticationComponent implements AuthenticationC throw new UnsupportedOperationException(); } + public Authentication setCurrentUser(String userName, UserNameValidationMode validationMode) + { + switch(validationMode) + { + case NONE: + return setCurrentUserImpl(userName); + case CHECK_AND_FIX: + default: + return setCurrentUser(userName); + } + } + public Authentication setCurrentUser(final String userName) throws AuthenticationException { if (isSystemUserName(userName)) diff --git a/source/java/org/alfresco/repo/security/authentication/AuthenticationComponent.java b/source/java/org/alfresco/repo/security/authentication/AuthenticationComponent.java index ebb9205f36..1a7b678f35 100644 --- a/source/java/org/alfresco/repo/security/authentication/AuthenticationComponent.java +++ b/source/java/org/alfresco/repo/security/authentication/AuthenticationComponent.java @@ -29,6 +29,11 @@ import net.sf.acegisecurity.Authentication; public interface AuthenticationComponent { + public enum UserNameValidationMode + { + NONE, CHECK_AND_FIX; + } + /** * Authenticate * @@ -51,6 +56,13 @@ public interface AuthenticationComponent public Authentication setCurrentUser(String userName); + /** + * Explicitly set the current user to be authenticated. + * Specify if the userName is to be checked and fixed + */ + + public Authentication setCurrentUser(String userName, UserNameValidationMode validationMode); + /** * Remove the current security information * diff --git a/source/java/org/alfresco/repo/security/authentication/AuthenticationServiceImpl.java b/source/java/org/alfresco/repo/security/authentication/AuthenticationServiceImpl.java index 640b7e7154..10ce831e89 100644 --- a/source/java/org/alfresco/repo/security/authentication/AuthenticationServiceImpl.java +++ b/source/java/org/alfresco/repo/security/authentication/AuthenticationServiceImpl.java @@ -27,6 +27,7 @@ package org.alfresco.repo.security.authentication; import java.util.Collections; import java.util.Set; +import org.alfresco.repo.security.authentication.AuthenticationComponent.UserNameValidationMode; import org.alfresco.service.cmr.security.PermissionService; public class AuthenticationServiceImpl extends AbstractAuthenticationService @@ -165,10 +166,10 @@ public class AuthenticationServiceImpl extends AbstractAuthenticationService { try { + // clear context - to avoid MT concurrency issue (causing domain mismatch) - see also 'authenticate' above clearCurrentSecurityContext(); - authenticationComponent.setCurrentUser(ticketComponent.getAuthorityForTicket(ticket)); - ticketComponent.validateTicket(ticket); + authenticationComponent.setCurrentUser(ticketComponent.validateTicket(ticket), UserNameValidationMode.NONE); } catch(AuthenticationException ae) { diff --git a/source/java/org/alfresco/repo/security/authentication/AuthenticationUtil.java b/source/java/org/alfresco/repo/security/authentication/AuthenticationUtil.java index 314ee9258b..b46b57445a 100644 --- a/source/java/org/alfresco/repo/security/authentication/AuthenticationUtil.java +++ b/source/java/org/alfresco/repo/security/authentication/AuthenticationUtil.java @@ -529,6 +529,12 @@ public abstract class AuthenticationUtil return PermissionService.GUEST_AUTHORITY.toLowerCase(); } + /* package */ static void clearCurrentSecurityContextOnly() + { + ContextHolder.setContext(null); + NDC.remove(); + } + /** * Remove the current security information */ @@ -596,7 +602,7 @@ public abstract class AuthenticationUtil { if (realUser == null) { - AuthenticationUtil.clearCurrentSecurityContext(); + AuthenticationUtil.clearCurrentSecurityContextOnly(); } else { diff --git a/source/java/org/alfresco/repo/security/authentication/ChainingAuthenticationComponentImpl.java b/source/java/org/alfresco/repo/security/authentication/ChainingAuthenticationComponentImpl.java index 3a06bf4e4b..9355b9130f 100644 --- a/source/java/org/alfresco/repo/security/authentication/ChainingAuthenticationComponentImpl.java +++ b/source/java/org/alfresco/repo/security/authentication/ChainingAuthenticationComponentImpl.java @@ -29,6 +29,7 @@ import java.util.List; import net.sf.acegisecurity.Authentication; +import org.alfresco.repo.security.authentication.AuthenticationComponent.UserNameValidationMode; import org.alfresco.service.cmr.security.PermissionService; /** @@ -343,6 +344,24 @@ public class ChainingAuthenticationComponentImpl implements AuthenticationCompon return AuthenticationUtil.setCurrentAuthentication(authentication); } + + + public Authentication setCurrentUser(String userName, UserNameValidationMode validationMode) + { + for (AuthenticationComponent authComponent : getUsableAuthenticationComponents()) + { + try + { + return authComponent.setCurrentUser(userName, validationMode); + } + catch (AuthenticationException e) + { + // Ignore and chain + } + } + throw new AuthenticationException("Failed to set current user " + userName); + } + /** * Set the current user - try all implementations - as some may check the user exists */ diff --git a/source/java/org/alfresco/repo/security/permissions/AccessControlEntry.java b/source/java/org/alfresco/repo/security/permissions/AccessControlEntry.java index 250bcafc20..70692b85ca 100644 --- a/source/java/org/alfresco/repo/security/permissions/AccessControlEntry.java +++ b/source/java/org/alfresco/repo/security/permissions/AccessControlEntry.java @@ -27,6 +27,7 @@ package org.alfresco.repo.security.permissions; import java.io.Serializable; import org.alfresco.service.cmr.security.AccessStatus; +import org.alfresco.service.cmr.security.AuthorityType; public interface AccessControlEntry extends Comparable, Serializable { @@ -35,6 +36,8 @@ public interface AccessControlEntry extends Comparable, Seri public PermissionReference getPermission(); public String getAuthority(); + + public AuthorityType getAuthorityType(); public AccessStatus getAccessStatus(); diff --git a/source/java/org/alfresco/repo/security/permissions/AccessControlList.java b/source/java/org/alfresco/repo/security/permissions/AccessControlList.java index b75fb3a117..eb7e5fdfac 100644 --- a/source/java/org/alfresco/repo/security/permissions/AccessControlList.java +++ b/source/java/org/alfresco/repo/security/permissions/AccessControlList.java @@ -27,6 +27,8 @@ package org.alfresco.repo.security.permissions; import java.io.Serializable; import java.util.List; +import org.alfresco.repo.security.permissions.impl.SimpleNodePermissionEntry; + public interface AccessControlList extends Serializable { /** @@ -48,4 +50,8 @@ public interface AccessControlList extends Serializable * @return */ public List getEntries(); + + public SimpleNodePermissionEntry getCachedSimpleNodePermissionEntry(); + + public void setCachedSimpleNodePermissionEntry(SimpleNodePermissionEntry cachedSimpleNodePermissionEntry); } diff --git a/source/java/org/alfresco/repo/security/permissions/NodePermissionEntry.java b/source/java/org/alfresco/repo/security/permissions/NodePermissionEntry.java index b8f0675bca..2ef1ec46bc 100644 --- a/source/java/org/alfresco/repo/security/permissions/NodePermissionEntry.java +++ b/source/java/org/alfresco/repo/security/permissions/NodePermissionEntry.java @@ -24,6 +24,8 @@ */ package org.alfresco.repo.security.permissions; +import java.util.Collection; +import java.util.List; import java.util.Set; import org.alfresco.service.cmr.repository.NodeRef; @@ -55,5 +57,5 @@ public interface NodePermissionEntry * * @return */ - public Set extends PermissionEntry> getPermissionEntries(); + public List extends PermissionEntry> getPermissionEntries(); } diff --git a/source/java/org/alfresco/repo/security/permissions/PermissionEntry.java b/source/java/org/alfresco/repo/security/permissions/PermissionEntry.java index 21365dc743..6cd1a0dfd6 100644 --- a/source/java/org/alfresco/repo/security/permissions/PermissionEntry.java +++ b/source/java/org/alfresco/repo/security/permissions/PermissionEntry.java @@ -81,4 +81,16 @@ public interface PermissionEntry * @return */ public AccessStatus getAccessStatus(); + + /** + * Is this permission inherited? + * @return + */ + public boolean isInherited(); + + /** + * Return the position in the inhertance chain (0 is not inherited and set on the object) + * @return + */ + public int getPosition(); } diff --git a/source/java/org/alfresco/repo/security/permissions/SimpleAccessControlEntry.java b/source/java/org/alfresco/repo/security/permissions/SimpleAccessControlEntry.java index 79fd6e11ec..0979d7fdac 100644 --- a/source/java/org/alfresco/repo/security/permissions/SimpleAccessControlEntry.java +++ b/source/java/org/alfresco/repo/security/permissions/SimpleAccessControlEntry.java @@ -46,6 +46,8 @@ public class SimpleAccessControlEntry implements AccessControlEntry private String authority; + private AuthorityType authorityType; + private AccessControlEntryContext context; private PermissionReference permission; @@ -66,6 +68,11 @@ public class SimpleAccessControlEntry implements AccessControlEntry { return authority; } + + public AuthorityType getAuthorityType() + { + return authorityType; + } public AccessControlEntryContext getContext() { @@ -108,6 +115,7 @@ public class SimpleAccessControlEntry implements AccessControlEntry public void setAuthority(String authority) { this.authority = authority; + this.authorityType = AuthorityType.getAuthorityType(authority); } /** @@ -145,7 +153,7 @@ public class SimpleAccessControlEntry implements AccessControlEntry diff = (this.getAccessStatus()== AccessStatus.DENIED ? 0 : 1) - (other.getAccessStatus()== AccessStatus.DENIED ? 0 : 1); if(diff == 0) { - return AuthorityType.getAuthorityType(this.getAuthority()).getOrderPosition() - AuthorityType.getAuthorityType(other.getAuthority()).getOrderPosition(); + return getAuthorityType().getOrderPosition() - other.getAuthorityType().getOrderPosition(); } else { diff --git a/source/java/org/alfresco/repo/security/permissions/SimpleAccessControlList.java b/source/java/org/alfresco/repo/security/permissions/SimpleAccessControlList.java index 83f571d3ab..ebba63633b 100644 --- a/source/java/org/alfresco/repo/security/permissions/SimpleAccessControlList.java +++ b/source/java/org/alfresco/repo/security/permissions/SimpleAccessControlList.java @@ -3,6 +3,8 @@ package org.alfresco.repo.security.permissions; import java.util.ArrayList; import java.util.List; +import org.alfresco.repo.security.permissions.impl.SimpleNodePermissionEntry; + public class SimpleAccessControlList implements AccessControlList { /** @@ -14,6 +16,8 @@ public class SimpleAccessControlList implements AccessControlList private List entries = new ArrayList(); + private transient SimpleNodePermissionEntry cachedSimpleNodePermissionEntry; + public List getEntries() { return entries; @@ -33,6 +37,16 @@ public class SimpleAccessControlList implements AccessControlList { this.properties = properties; } + + public synchronized SimpleNodePermissionEntry getCachedSimpleNodePermissionEntry() + { + return cachedSimpleNodePermissionEntry; + } + + public synchronized void setCachedSimpleNodePermissionEntry(SimpleNodePermissionEntry cachedSimpleNodePermissionEntry) + { + this.cachedSimpleNodePermissionEntry = cachedSimpleNodePermissionEntry; + } diff --git a/source/java/org/alfresco/repo/security/permissions/impl/PermissionServiceTest.java b/source/java/org/alfresco/repo/security/permissions/impl/PermissionServiceTest.java index 415bed0588..088d950178 100644 --- a/source/java/org/alfresco/repo/security/permissions/impl/PermissionServiceTest.java +++ b/source/java/org/alfresco/repo/security/permissions/impl/PermissionServiceTest.java @@ -25,6 +25,7 @@ package org.alfresco.repo.security.permissions.impl; import java.util.ArrayList; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -344,7 +345,7 @@ public class PermissionServiceTest extends AbstractPermissionTest public void testSetNodePermissionEntry() { runAs("andy"); - Set entries = new HashSet(); + ArrayList entries = new ArrayList(); entries.add(new SimplePermissionEntry(rootNodeRef, new SimplePermissionReference(QName.createQName("A", "B"), "C"), "user-one", AccessStatus.ALLOWED)); entries.add(new SimplePermissionEntry(rootNodeRef, permissionService.getAllPermissionReference(), "user-two", AccessStatus.ALLOWED)); entries.add(new SimplePermissionEntry(rootNodeRef, new SimplePermissionReference(QName.createQName("D", "E"), "F"), permissionService.getAllAuthorities(), @@ -363,7 +364,7 @@ public class PermissionServiceTest extends AbstractPermissionTest public void testSetNodePermissionEntry2() { - Set entries = new HashSet(); + ArrayList entries = new ArrayList(); entries.add(new SimplePermissionEntry(rootNodeRef, permissionService.getAllPermissionReference(), permissionService.getAllAuthorities(), AccessStatus.ALLOWED)); SimpleNodePermissionEntry entry = new SimpleNodePermissionEntry(rootNodeRef, false, entries); @@ -386,7 +387,7 @@ public class PermissionServiceTest extends AbstractPermissionTest public void testDoubleSetAllowDeny() { - Set extends PermissionEntry> permissionEntries = null; + List extends PermissionEntry> permissionEntries = null; // add-remove andy-all permissionService.setPermission(rootNodeRef, "andy", permissionService.getAllPermission(), true); permissionService.setPermission(rootNodeRef, "andy", permissionService.getAllPermission(), false); diff --git a/source/java/org/alfresco/repo/security/permissions/impl/SimpleNodePermissionEntry.java b/source/java/org/alfresco/repo/security/permissions/impl/SimpleNodePermissionEntry.java index bae1b0bd3e..7ae0a48595 100644 --- a/source/java/org/alfresco/repo/security/permissions/impl/SimpleNodePermissionEntry.java +++ b/source/java/org/alfresco/repo/security/permissions/impl/SimpleNodePermissionEntry.java @@ -25,6 +25,8 @@ package org.alfresco.repo.security.permissions.impl; import java.io.Serializable; +import java.util.Collection; +import java.util.List; import java.util.Set; import org.alfresco.repo.security.permissions.PermissionEntry; @@ -55,10 +57,10 @@ public final class SimpleNodePermissionEntry extends AbstractNodePermissionEntry /* * The set of permission entries. */ - private Set extends PermissionEntry> permissionEntries; + private List extends PermissionEntry> permissionEntries; - public SimpleNodePermissionEntry(NodeRef nodeRef, boolean inheritPermissions, Set extends PermissionEntry> permissionEntries) + public SimpleNodePermissionEntry(NodeRef nodeRef, boolean inheritPermissions, List extends PermissionEntry> permissionEntries) { super(); this.nodeRef = nodeRef; @@ -76,7 +78,7 @@ public final class SimpleNodePermissionEntry extends AbstractNodePermissionEntry return inheritPermissions; } - public Set extends PermissionEntry> getPermissionEntries() + public List extends PermissionEntry> getPermissionEntries() { return permissionEntries; } diff --git a/source/java/org/alfresco/repo/security/permissions/impl/SimplePermissionEntry.java b/source/java/org/alfresco/repo/security/permissions/impl/SimplePermissionEntry.java index 836eeb6bf7..2bd9dbe93f 100644 --- a/source/java/org/alfresco/repo/security/permissions/impl/SimplePermissionEntry.java +++ b/source/java/org/alfresco/repo/security/permissions/impl/SimplePermissionEntry.java @@ -56,9 +56,14 @@ public final class SimplePermissionEntry extends AbstractPermissionEntry */ private AccessStatus accessStatus; - + private int position; public SimplePermissionEntry(NodeRef nodeRef, PermissionReference permissionReference, String authority, AccessStatus accessStatus) + { + this(nodeRef, permissionReference, authority, accessStatus, 0); + } + + public SimplePermissionEntry(NodeRef nodeRef, PermissionReference permissionReference, String authority, AccessStatus accessStatus, int position) { super(); this.nodeRef = nodeRef; @@ -97,4 +102,14 @@ public final class SimplePermissionEntry extends AbstractPermissionEntry return accessStatus; } + public int getPosition() + { + return position; + } + + public boolean isInherited() + { + return position > 0; + } + } diff --git a/source/java/org/alfresco/repo/security/permissions/impl/model/GlobalPermissionEntry.java b/source/java/org/alfresco/repo/security/permissions/impl/model/GlobalPermissionEntry.java index 4d337e67ac..d57cbf6121 100644 --- a/source/java/org/alfresco/repo/security/permissions/impl/model/GlobalPermissionEntry.java +++ b/source/java/org/alfresco/repo/security/permissions/impl/model/GlobalPermissionEntry.java @@ -93,4 +93,14 @@ public class GlobalPermissionEntry implements XMLModelInitialisable, PermissionE return AccessStatus.ALLOWED; } + public int getPosition() + { + return 0; + } + + public boolean isInherited() + { + return false; + } + } diff --git a/source/java/org/alfresco/repo/security/permissions/impl/model/ModelPermissionEntry.java b/source/java/org/alfresco/repo/security/permissions/impl/model/ModelPermissionEntry.java index 308d218472..bd426eefc1 100644 --- a/source/java/org/alfresco/repo/security/permissions/impl/model/ModelPermissionEntry.java +++ b/source/java/org/alfresco/repo/security/permissions/impl/model/ModelPermissionEntry.java @@ -155,4 +155,14 @@ public class ModelPermissionEntry implements PermissionEntry, XMLModelInitialisa String name = permissionReferenceElement.attributeValue(NAME); permissionReference = PermissionReferenceImpl.getPermissionReference(typeQName, name); } + + public int getPosition() + { + return 0; + } + + public boolean isInherited() + { + return false; + } } diff --git a/source/java/org/alfresco/repo/security/permissions/impl/model/NodePermission.java b/source/java/org/alfresco/repo/security/permissions/impl/model/NodePermission.java index deb40e0359..04e43ee33e 100644 --- a/source/java/org/alfresco/repo/security/permissions/impl/model/NodePermission.java +++ b/source/java/org/alfresco/repo/security/permissions/impl/model/NodePermission.java @@ -24,9 +24,11 @@ */ package org.alfresco.repo.security.permissions.impl.model; +import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; +import java.util.List; import java.util.Set; import org.alfresco.repo.security.permissions.NodePermissionEntry; @@ -55,7 +57,7 @@ public class NodePermission implements NodePermissionEntry, XMLModelInitialisabl // If null then it is the root. private NodeRef nodeRef; - private Set permissionEntries = new HashSet(); + private List permissionEntries = new ArrayList(); private boolean inheritPermissionsFromParent; @@ -74,9 +76,9 @@ public class NodePermission implements NodePermissionEntry, XMLModelInitialisabl return inheritPermissionsFromParent; } - public Set getPermissionEntries() + public List getPermissionEntries() { - return Collections.unmodifiableSet(permissionEntries); + return Collections.unmodifiableList(permissionEntries); } public void initialise(Element element, NamespacePrefixResolver nspr, PermissionModel permissionModel) diff --git a/source/java/org/alfresco/repo/security/permissions/noop/PermissionServiceNOOPImpl.java b/source/java/org/alfresco/repo/security/permissions/noop/PermissionServiceNOOPImpl.java index f954369032..ec0fb6d0a4 100644 --- a/source/java/org/alfresco/repo/security/permissions/noop/PermissionServiceNOOPImpl.java +++ b/source/java/org/alfresco/repo/security/permissions/noop/PermissionServiceNOOPImpl.java @@ -203,7 +203,7 @@ public class PermissionServiceNOOPImpl public NodePermissionEntry getSetPermissions(NodeRef nodeRef) { - return new SimpleNodePermissionEntry(nodeRef, true, Collections.emptySet()); + return new SimpleNodePermissionEntry(nodeRef, true, Collections.emptyList()); } public Set getSettablePermissionReferences(NodeRef nodeRef) @@ -287,7 +287,7 @@ public class PermissionServiceNOOPImpl public NodePermissionEntry getSetPermissions(StoreRef storeRef) { - return new SimpleNodePermissionEntry(null, true, Collections.emptySet()); + return new SimpleNodePermissionEntry(null, true, Collections.emptyList()); } diff --git a/source/java/org/alfresco/repo/security/person/AbstractHomeFolderProvider.java b/source/java/org/alfresco/repo/security/person/AbstractHomeFolderProvider.java index d66e1367dc..e2cc9f55f0 100644 --- a/source/java/org/alfresco/repo/security/person/AbstractHomeFolderProvider.java +++ b/source/java/org/alfresco/repo/security/person/AbstractHomeFolderProvider.java @@ -90,7 +90,7 @@ public abstract class AbstractHomeFolderProvider implements HomeFolderProvider, /** * A set of permissions to set for the owner when a home folder is created */ - private Set ownerPemissionsToSetOnCreate; + private Set ownerPermissionsToSetOnCreate; /** * General permissions to set on the node Map<(String)uid, Set<(String)permission>>. @@ -100,7 +100,7 @@ public abstract class AbstractHomeFolderProvider implements HomeFolderProvider, /** * Permissions to set for the user - on create and reference. */ - private Set userPemissions; + private Set userPermissions; /** * Clear existing permissions on new home folders (useful of created from a template. @@ -251,11 +251,11 @@ public abstract class AbstractHomeFolderProvider implements HomeFolderProvider, /** * The owner permissions to set on create. * - * @param ownerPemissionsToSetOnCreate + * @param ownerPermissionsToSetOnCreate */ - public void setOwnerPemissionsToSetOnCreate(Set ownerPemissionsToSetOnCreate) + public void setOwnerPermissionsToSetOnCreate(Set ownerPermissionsToSetOnCreate) { - this.ownerPemissionsToSetOnCreate = ownerPemissionsToSetOnCreate; + this.ownerPermissionsToSetOnCreate = ownerPermissionsToSetOnCreate; } /** @@ -271,11 +271,11 @@ public abstract class AbstractHomeFolderProvider implements HomeFolderProvider, /** * User permissions to set on create and on reference. * - * @param userPemissions + * @param userPermissions */ - public void setUserPemissions(Set userPemissions) + public void setUserPermissions(Set userPermissions) { - this.userPemissions = userPemissions; + this.userPermissions = userPermissions; } /** @@ -400,9 +400,9 @@ public abstract class AbstractHomeFolderProvider implements HomeFolderProvider, // Set owner permissions - if (ownerPemissionsToSetOnCreate != null) + if (ownerPermissionsToSetOnCreate != null) { - for (String permission : ownerPemissionsToSetOnCreate) + for (String permission : ownerPermissionsToSetOnCreate) { serviceRegistry.getPermissionService().setPermission(homeFolder.getNodeRef(), PermissionService.OWNER_AUTHORITY, permission, true); @@ -430,9 +430,9 @@ public abstract class AbstractHomeFolderProvider implements HomeFolderProvider, // Add user permissions on create and reference - if (userPemissions != null) + if (userPermissions != null) { - for (String permission : userPemissions) + for (String permission : userPermissions) { serviceRegistry.getPermissionService().setPermission(homeFolder.getNodeRef(), uid, permission, true); diff --git a/source/java/org/alfresco/repo/security/person/PersonTest.java b/source/java/org/alfresco/repo/security/person/PersonTest.java index 426b0ef3af..447143ed7a 100644 --- a/source/java/org/alfresco/repo/security/person/PersonTest.java +++ b/source/java/org/alfresco/repo/security/person/PersonTest.java @@ -110,6 +110,39 @@ public class PersonTest extends BaseSpringTest } } + + public void testCreateAndThenDelete() + { + personService.setCreateMissingPeople(false); + assertFalse(personService.createMissingPeople()); + + personService.setCreateMissingPeople(true); + assertTrue(personService.createMissingPeople()); + + personService.setCreateMissingPeople(false); + try + { + personService.getPerson("andy"); + fail("Getting Andy should fail"); + } + catch (PersonException pe) + { + + } + personService.createPerson(createDefaultProperties("andy", "Andy", "Hind", "andy@hind", "alfresco", rootNodeRef)); + personService.getPerson("andy"); + personService.deletePerson("andy"); + try + { + personService.getPerson("andy"); + fail("Getting Andy should fail"); + } + catch (PersonException pe) + { + + } + } + public void testCreateMissingPeople1() { personService.setCreateMissingPeople(false); diff --git a/source/java/org/alfresco/repo/transaction/AlfrescoTransactionSupport.java b/source/java/org/alfresco/repo/transaction/AlfrescoTransactionSupport.java index c5d138cc6c..eaa357f2c0 100644 --- a/source/java/org/alfresco/repo/transaction/AlfrescoTransactionSupport.java +++ b/source/java/org/alfresco/repo/transaction/AlfrescoTransactionSupport.java @@ -24,6 +24,7 @@ */ package org.alfresco.repo.transaction; +import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -33,6 +34,7 @@ import java.util.Map; import java.util.Set; import org.alfresco.error.AlfrescoRuntimeException; +import org.alfresco.repo.cache.TransactionalCache; import org.alfresco.repo.node.integrity.IntegrityChecker; import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcher; import org.alfresco.util.GUID; @@ -319,7 +321,7 @@ public abstract class AlfrescoTransactionSupport TransactionSynchronizationImpl synch = getSynchronization(); // bind the service in - boolean bound = synch.getListeners().add(listener); + boolean bound = synch.addListener(listener); // done if (logger.isDebugEnabled()) @@ -387,7 +389,8 @@ public abstract class AlfrescoTransactionSupport */ if (!TransactionSynchronizationManager.isSynchronizationActive()) { - throw new AlfrescoRuntimeException("Transaction must be active and synchronization is required"); + Thread currentThread = Thread.currentThread(); + throw new AlfrescoRuntimeException("Transaction must be active and synchronization is required: " + currentThread); } TransactionSynchronizationImpl txnSynch = (TransactionSynchronizationImpl) TransactionSynchronizationManager.getResource(RESOURCE_KEY_TXN_SYNCH); @@ -453,6 +456,8 @@ public abstract class AlfrescoTransactionSupport private final Set integrityCheckers; private final Set lucenes; private final LinkedHashSet listeners; + private final Set> transactionalCaches; +// private final Set jgroupsEhCacheListeners; private final Map resources; /** @@ -468,6 +473,8 @@ public abstract class AlfrescoTransactionSupport integrityCheckers = new HashSet(3); lucenes = new HashSet(3); listeners = new LinkedHashSet(5); + transactionalCaches = new HashSet>(3); +// jgroupsEhCacheListeners = new HashSet(3); resources = new HashMap(17); } @@ -512,9 +519,21 @@ public abstract class AlfrescoTransactionSupport * @return Returns a set of TransactionListener instances that will be called * during end-of-transaction processing */ - public Set getListeners() + @SuppressWarnings("unchecked") + public boolean addListener(TransactionListener listener) { - return listeners; + if (listener instanceof TransactionalCache) + { + return transactionalCaches.add((TransactionalCache)listener); + } +// else if (listener instanceof JGroupsEhCacheListener) +// { +// return jgroupsEhCacheListeners.add((JGroupsEhCacheListener)listener); +// } + else + { + return listeners.add(listener); + } } /** @@ -608,6 +627,18 @@ public abstract class AlfrescoTransactionSupport { dao.beforeCommit(); } + + // Flush the transactional caches + for (TransactionalCache cache : transactionalCaches) + { + cache.beforeCommit(readOnly); + } +// +// // Flush the JGroups listeners +// for (JGroupsEhCacheListener listener : jgroupsEhCacheListeners) +// { +// listener.beforeCommit(readOnly); +// } } /** @@ -621,7 +652,9 @@ public abstract class AlfrescoTransactionSupport } /** - * Executes the beforeCommit event handlers for the outstanding listeners + * Executes the beforeCommit event handlers for the outstanding listeners. + * This process is iterative as the process of calling listeners may lead to more listeners + * being added. The new listeners will be processed until there no listeners remaining. * * @param visitedListeners a set containing the already visited listeners * @param readOnly is read only diff --git a/source/java/org/alfresco/repo/transaction/RetryingTransactionHelper.java b/source/java/org/alfresco/repo/transaction/RetryingTransactionHelper.java index 4f5e15efb2..e5f9fce950 100644 --- a/source/java/org/alfresco/repo/transaction/RetryingTransactionHelper.java +++ b/source/java/org/alfresco/repo/transaction/RetryingTransactionHelper.java @@ -25,13 +25,15 @@ package org.alfresco.repo.transaction; import java.sql.BatchUpdateException; +import java.sql.SQLException; import java.util.Random; import javax.transaction.RollbackException; import javax.transaction.Status; -import javax.transaction.SystemException; import javax.transaction.UserTransaction; +import net.sf.ehcache.distribution.RemoteCacheException; + import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.error.ExceptionStackUtil; import org.alfresco.repo.security.permissions.AccessDeniedException; @@ -63,6 +65,7 @@ public class RetryingTransactionHelper /** * Exceptions that trigger retries. */ + @SuppressWarnings("unchecked") public static final Class[] RETRY_EXCEPTIONS; static { @@ -71,11 +74,14 @@ public class RetryingTransactionHelper DeadlockLoserDataAccessException.class, StaleObjectStateException.class, LockAcquisitionException.class, - BatchUpdateException.class, ConstraintViolationException.class, + UncategorizedSQLException.class, + SQLException.class, + BatchUpdateException.class, DataIntegrityViolationException.class, StaleStateException.class, - ObjectNotFoundException.class + ObjectNotFoundException.class, + RemoteCacheException.class }; } @@ -304,20 +310,10 @@ public class RetryingTransactionHelper txn.rollback(); } } - catch (IllegalStateException e1) + catch (Throwable e1) { - logger.error(e); - throw new AlfrescoRuntimeException("Failure during rollback: " + cb, e1); - } - catch (SecurityException e1) - { - logger.error(e); - throw new AlfrescoRuntimeException("Failure during rollback: " + cb, e1); - } - catch (SystemException e1) - { - logger.error(e); - throw new AlfrescoRuntimeException("Failure during rollback: " + cb, e1); + // A rollback failure should not preclude a retry, but logging of the rollback failure is required + logger.error("Rollback failure. Normal retry behaviour will resume.", e1); } } if (e instanceof RollbackException) @@ -365,6 +361,7 @@ public class RetryingTransactionHelper * @param cause the cause to examine * @return Returns the original cause if it is a valid retry cause, otherwise null */ + @SuppressWarnings("unchecked") public static Throwable extractRetryCause(Throwable cause) { Throwable retryCause = ExceptionStackUtil.getCause(cause, RETRY_EXCEPTIONS); @@ -372,57 +369,9 @@ public class RetryingTransactionHelper { return null; } - else if (retryCause instanceof BatchUpdateException) - { - if (retryCause.getMessage().contains("Lock wait")) - { - // It is valid - return retryCause; - } - else - { - // Not valid - return null; - } - } - else if (retryCause instanceof DataIntegrityViolationException) - { - if (retryCause.getMessage().contains("ChildAssocImpl")) - { - // It is probably the duplicate name violation - return retryCause; - } - else - { - // Something else - return null; - } - } - else if (retryCause instanceof UncategorizedSQLException) - { - // Handle error that slips out of MSSQL - if (retryCause.getMessage().contains("deadlock")) - { - // It is valid - return retryCause; - } - else - { - // Not valid - return null; - } - } - else if (retryCause instanceof ObjectNotFoundException) - { - // This is (I'm almost certain) an optimistic locking failure in disguise. - if (retryCause.getMessage().contains("No row")) - { - return retryCause; - } - return null; - } else { + // A simple match return retryCause; } } diff --git a/source/java/org/alfresco/repo/transaction/RetryingTransactionHelperTest.java b/source/java/org/alfresco/repo/transaction/RetryingTransactionHelperTest.java index ac862f81d1..417bb79dae 100644 --- a/source/java/org/alfresco/repo/transaction/RetryingTransactionHelperTest.java +++ b/source/java/org/alfresco/repo/transaction/RetryingTransactionHelperTest.java @@ -39,8 +39,10 @@ import org.alfresco.service.namespace.NamespaceService; import org.alfresco.service.namespace.QName; import org.alfresco.service.transaction.TransactionService; import org.alfresco.util.ApplicationContextHelper; +import org.hibernate.SessionFactory; import org.springframework.context.ApplicationContext; import org.springframework.dao.ConcurrencyFailureException; +import org.springframework.orm.hibernate3.support.HibernateDaoSupport; /** * Tests the transaction retrying behaviour with various failure modes. @@ -309,6 +311,7 @@ public class RetryingTransactionHelperTest extends TestCase * Checks nesting of two transactions with requiresNew == true, * but where the two transactions get involved in a concurrency struggle. */ + @SuppressWarnings("unchecked") public void testNestedWithoutPropogationConcurrentUntilFailure() { RetryingTransactionCallback callback = new RetryingTransactionCallback() @@ -379,4 +382,41 @@ public class RetryingTransactionHelperTest extends TestCase long checkValue = txnHelper.doInTransaction(callback); assertEquals("Check value not incremented", 11, checkValue); } + + public void testLostConnectionRecovery() + { + RetryingTransactionCallback killConnectionCallback = new RetryingTransactionCallback() + { + private boolean killed = false; + public Object execute() throws Throwable + { + // Do some work + nodeService.deleteNode(workingNodeRef); + // Successful upon retry + if (killed) + { + return null; + } + // Kill the connection the first time + HibernateConnectionKiller killer = new HibernateConnectionKiller(); + killer.setSessionFactory((SessionFactory)ctx.getBean("sessionFactory")); + killer.killConnection(); + killed = true; + return null; + } + }; + // This should work + txnHelper.doInTransaction(killConnectionCallback); + } + + /** + * Helper class to kill the session's DB connection + */ + private class HibernateConnectionKiller extends HibernateDaoSupport + { + private void killConnection() throws Exception + { + getSession().connection().rollback(); + } + } } diff --git a/source/java/org/alfresco/repo/transaction/TransactionListener.java b/source/java/org/alfresco/repo/transaction/TransactionListener.java index 11741fbfb6..8fb2633634 100644 --- a/source/java/org/alfresco/repo/transaction/TransactionListener.java +++ b/source/java/org/alfresco/repo/transaction/TransactionListener.java @@ -52,7 +52,8 @@ public interface TransactionListener * {@link #beforeCommit(boolean) } even if {@link #beforeCommit(boolean)} * failed. * - * Any exceptions generated here will cause the transaction to rollback. + * Any exceptions generated here will only be logged and will have no effect + * on the state of the transaction. * * All transaction resources are still available. */ diff --git a/source/java/org/alfresco/service/cmr/repository/MLText.java b/source/java/org/alfresco/service/cmr/repository/MLText.java index 2b7c6bee2c..3c1251c740 100644 --- a/source/java/org/alfresco/service/cmr/repository/MLText.java +++ b/source/java/org/alfresco/service/cmr/repository/MLText.java @@ -128,6 +128,12 @@ public class MLText extends HashMap */ public String getDefaultValue() { + // Shortcut so that we don't have to go and get the current locale + if (this.size() == 0) + { + return null; + } + // There is some hope of getting a match Locale locale = I18NUtil.getLocale(); return getClosestValue(locale); } diff --git a/source/java/org/alfresco/service/cmr/repository/datatype/Duration.java b/source/java/org/alfresco/service/cmr/repository/datatype/Duration.java index d9c0a603a1..206c9373a4 100644 --- a/source/java/org/alfresco/service/cmr/repository/datatype/Duration.java +++ b/source/java/org/alfresco/service/cmr/repository/datatype/Duration.java @@ -1004,6 +1004,19 @@ public class Duration implements Comparable, Serializable System.out.println(" D: " + d.divide(10)); System.out.println(" +: " + d.add(a2)); //System.out.println(" +: " + d.add(a1)); + + + d = new Duration("P1H"); + System.out.println("V: " + d); + System.out.println("F: " + d.formattedString()); + System.out.println(" D: " + d.divide(10)); + System.out.println(" +: " + d.add(a2)); + + d = new Duration("PT1H"); + System.out.println("V: " + d); + System.out.println("F: " + d.formattedString()); + System.out.println(" D: " + d.divide(10)); + System.out.println(" +: " + d.add(a2)); } }
requiresNew == true
- * Any exceptions generated here will cause the transaction to rollback. + * Any exceptions generated here will only be logged and will have no effect + * on the state of the transaction. *
* All transaction resources are still available. */ diff --git a/source/java/org/alfresco/service/cmr/repository/MLText.java b/source/java/org/alfresco/service/cmr/repository/MLText.java index 2b7c6bee2c..3c1251c740 100644 --- a/source/java/org/alfresco/service/cmr/repository/MLText.java +++ b/source/java/org/alfresco/service/cmr/repository/MLText.java @@ -128,6 +128,12 @@ public class MLText extends HashMap */ public String getDefaultValue() { + // Shortcut so that we don't have to go and get the current locale + if (this.size() == 0) + { + return null; + } + // There is some hope of getting a match Locale locale = I18NUtil.getLocale(); return getClosestValue(locale); } diff --git a/source/java/org/alfresco/service/cmr/repository/datatype/Duration.java b/source/java/org/alfresco/service/cmr/repository/datatype/Duration.java index d9c0a603a1..206c9373a4 100644 --- a/source/java/org/alfresco/service/cmr/repository/datatype/Duration.java +++ b/source/java/org/alfresco/service/cmr/repository/datatype/Duration.java @@ -1004,6 +1004,19 @@ public class Duration implements Comparable, Serializable System.out.println(" D: " + d.divide(10)); System.out.println(" +: " + d.add(a2)); //System.out.println(" +: " + d.add(a1)); + + + d = new Duration("P1H"); + System.out.println("V: " + d); + System.out.println("F: " + d.formattedString()); + System.out.println(" D: " + d.divide(10)); + System.out.println(" +: " + d.add(a2)); + + d = new Duration("PT1H"); + System.out.println("V: " + d); + System.out.println("F: " + d.formattedString()); + System.out.println(" D: " + d.divide(10)); + System.out.println(" +: " + d.add(a2)); } }