Merged V3.2 to HEAD

16737: Fix for ETHREEOH-2903: MSSQL bootstrap failed : "n must be positive"
   16826: Missing merge metadata on root from previous check-in CHK-9689
   16847: Merged V3.1 to V3.2
      16835: Build fix after PDFBox lib change
   16915: (RECORD ONLY) Removed deep svn:mergeinfo
   16923: Lucene performance: avoid too many index deltas ...
   16933: Fix ETHREEOH-1788 - Bootstrap fails due to disabled-avm-indexing-context.xml
   16935: Merged V3.1 to V3.2
      15894: Fix ETHREEOH-2416 and ALFCOM-3300: Duplicate name exceptions not being trapped correctly   
   16936: Merged V3.1 to V3.2
      16672: Fixed ETHREEOH-2021: System error appears when trying to paste content
      16686: Fixed ETHREEOH-2129: HibernateNodeDaoService implementation does not cater for a null condition
___________________________________________________________________
Modified: svn:mergeinfo
   Merged /alfresco/BRANCHES/V3.1:r15894,16672,16686,16811,16816,16835
   Merged /alfresco/BRANCHES/V3.2:r16737,16826,16847,16915,16923,16933,16935-16936


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@17015 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley
2009-10-19 10:32:00 +00:00
parent 5a0883f91c
commit b27e75d1aa
11 changed files with 424 additions and 245 deletions

View File

@@ -231,12 +231,23 @@ public class ConfigurationChecker extends AbstractLifecycleBean
{
if (storeRef.getProtocol().equals(StoreRef.PROTOCOL_AVM))
{
IndexMode storeIndexMode = avmSnapShotTriggeredIndexingMethodInterceptor.getIndexMode(storeRef.getIdentifier());
if (storeIndexMode.equals(IndexMode.UNINDEXED))
if (avmSnapShotTriggeredIndexingMethodInterceptor.isIndexingEnabled())
{
IndexMode storeIndexMode = avmSnapShotTriggeredIndexingMethodInterceptor.getIndexMode(storeRef.getIdentifier());
if (storeIndexMode.equals(IndexMode.UNINDEXED))
{
if (logger.isDebugEnabled())
{
logger.debug("Skipping index check for store: " + storeRef + " (unindexed AVM store)");
}
continue;
}
}
else
{
if (logger.isDebugEnabled())
{
logger.debug("Skipping index for store: " + storeRef + " (unindexed AVM store)");
logger.debug("Skipping index check for store: " + storeRef + " (AVM indexing is disabled)");
}
continue;
}

View File

@@ -64,6 +64,7 @@ import org.alfresco.service.cmr.search.QueryParameterDefinition;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.GUID;
import org.alfresco.util.SearchLanguageConversion;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -571,23 +572,18 @@ public class FileFolderServiceImpl implements FileFolderService
targetParentRef = assocRef.getParentRef();
}
boolean changedParent = !targetParentRef.equals(assocRef.getParentRef());
// there is nothing to do if both the name and parent folder haven't changed
if (targetParentRef.equals(assocRef.getParentRef()))
if (!nameChanged && !changedParent)
{
if (newName.equals(beforeFileInfo.getName()))
{
if (logger.isDebugEnabled())
{
logger.debug("Doing nothing - neither filename or parent has changed: \n" +
" parent: " + targetParentRef + "\n" +
" before: " + beforeFileInfo + "\n" +
" new name: " + newName);
}
return beforeFileInfo;
}
else if (newName.equalsIgnoreCase(beforeFileInfo.getName()))
if (logger.isDebugEnabled())
{
logger.debug("Doing nothing - neither filename or parent has changed: \n" +
" parent: " + targetParentRef + "\n" +
" before: " + beforeFileInfo + "\n" +
" new name: " + newName);
}
return beforeFileInfo;
}
QName existingQName = assocRef.getQName();
@@ -629,13 +625,29 @@ public class FileFolderServiceImpl implements FileFolderService
// TODO: Replace this with a more formal means of identifying "system" folders (i.e. aspect or UUID)
if (!isSystemPath(sourceNodeRef))
{
// move the node so that the association moves as well
ChildAssociationRef newAssocRef = nodeService.moveNode(
sourceNodeRef,
targetParentRef,
assocTypeQname,
qname);
targetNodeRef = newAssocRef.getChildRef();
// The cm:name might clash with another node in the target location.
if (nameChanged)
{
// The name will be changing, so we really need to set the node's name to the new
// name. This can't be done at the same time as the move - to avoid incorrect violations
// of the name constraints, the cm:name is set to something random and will be reset
// to the correct name later.
nodeService.setProperty(sourceNodeRef, ContentModel.PROP_NAME, GUID.generate());
}
try
{
// move the node so that the association moves as well
ChildAssociationRef newAssocRef = nodeService.moveNode(
sourceNodeRef,
targetParentRef,
assocTypeQname,
qname);
targetNodeRef = newAssocRef.getChildRef();
}
catch (DuplicateChildNodeNameException e)
{
throw new FileExistsException(targetParentRef, newName);
}
}
else
{
@@ -647,7 +659,7 @@ public class FileFolderServiceImpl implements FileFolderService
{
try
{
// copy the node
// Copy the node. The cm:name will be dropped and reset later.
targetNodeRef = copyService.copy(
sourceNodeRef,
targetParentRef,

View File

@@ -64,13 +64,12 @@ import org.springframework.context.ApplicationContext;
/**
* @see org.alfresco.repo.model.filefolder.FileFolderServiceImpl
*
* @author Derek Hulley
*/
public class FileFolderServiceImplTest extends TestCase
{
private static final String IMPORT_VIEW = "filefolder/filefolder-test-import.xml";
private static final String NAME_L0_FILE_A = "L0- File A";
private static final String NAME_L0_FILE_B = "L0- File B";
private static final String NAME_L0_FOLDER_A = "L0- Folder A";
@@ -83,7 +82,7 @@ public class FileFolderServiceImplTest extends TestCase
private static final String NAME_L1_FILE_C = "L1- File C (%_)";
private static final String NAME_CHECK_FILE = "CHECK_FILE";
private static final String NAME_CHECK_FOLDER = "CHECK_FOLDER";
private static final ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
private TransactionService transactionService;
@@ -93,7 +92,7 @@ public class FileFolderServiceImplTest extends TestCase
private UserTransaction txn;
private NodeRef rootNodeRef;
private NodeRef workingRootNodeRef;
@Override
public void setUp() throws Exception
{
@@ -102,29 +101,29 @@ public class FileFolderServiceImplTest extends TestCase
nodeService = serviceRegistry.getNodeService();
fileFolderService = serviceRegistry.getFileFolderService();
dictionaryDAO = (DictionaryDAO) ctx.getBean("dictionaryDAO");
AuthenticationComponent authenticationComponent = (AuthenticationComponent) ctx.getBean("authenticationComponent");
AuthenticationComponent authenticationComponent = (AuthenticationComponent) ctx
.getBean("authenticationComponent");
// start the transaction
txn = transactionService.getUserTransaction();
txn.begin();
// downgrade integrity
IntegrityChecker.setWarnInTransaction();
// authenticate
authenticationComponent.setCurrentUser(authenticationComponent.getSystemUserName());
// create a test store
StoreRef storeRef = nodeService.createStore(StoreRef.PROTOCOL_WORKSPACE, getName() + System.currentTimeMillis());
StoreRef storeRef = nodeService
.createStore(StoreRef.PROTOCOL_WORKSPACE, getName() + System.currentTimeMillis());
rootNodeRef = nodeService.getRootNode(storeRef);
// create a folder to import into
workingRootNodeRef = nodeService.createNode(
rootNodeRef,
ContentModel.ASSOC_CHILDREN,
QName.createQName(NamespaceService.ALFRESCO_URI, "working root"),
ContentModel.TYPE_FOLDER).getChildRef();
workingRootNodeRef = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN,
QName.createQName(NamespaceService.ALFRESCO_URI, "working root"), ContentModel.TYPE_FOLDER)
.getChildRef();
// import the test data
ImporterService importerService = serviceRegistry.getImporterService();
Location importLocation = new Location(workingRootNodeRef);
@@ -136,7 +135,7 @@ public class FileFolderServiceImplTest extends TestCase
Reader reader = new InputStreamReader(is);
importerService.importView(reader, importLocation, null, null);
}
public void tearDown() throws Exception
{
try
@@ -157,7 +156,11 @@ public class FileFolderServiceImplTest extends TestCase
* @param expectedFolderCount the number of uniquely named folders expected
* @param expectedNames the names of the files and folders expected
*/
private void checkFileList(List<FileInfo> files, int expectedFileCount, int expectedFolderCount, String[] expectedNames)
private void checkFileList(
List<FileInfo> files,
int expectedFileCount,
int expectedFolderCount,
String[] expectedNames)
{
int fileCount = 0;
int folderCount = 0;
@@ -182,70 +185,61 @@ public class FileFolderServiceImplTest extends TestCase
assertEquals("Incorrect number of files", expectedFileCount, fileCount);
assertEquals("Incorrect number of folders", expectedFolderCount, folderCount);
}
public void testShallowFilesAndFoldersList() throws Exception
{
List<FileInfo> files = fileFolderService.list(workingRootNodeRef);
// check
String[] expectedNames = new String[] {NAME_L0_FILE_A, NAME_L0_FILE_B, NAME_L0_FOLDER_A, NAME_L0_FOLDER_B, NAME_L0_FOLDER_C};
String[] expectedNames = new String[]
{ NAME_L0_FILE_A, NAME_L0_FILE_B, NAME_L0_FOLDER_A, NAME_L0_FOLDER_B, NAME_L0_FOLDER_C };
checkFileList(files, 2, 3, expectedNames);
}
public void testShallowFilesOnlyList() throws Exception
{
List<FileInfo> files = fileFolderService.listFiles(workingRootNodeRef);
// check
String[] expectedNames = new String[] {NAME_L0_FILE_A, NAME_L0_FILE_B};
String[] expectedNames = new String[]
{ NAME_L0_FILE_A, NAME_L0_FILE_B };
checkFileList(files, 2, 0, expectedNames);
}
public void testShallowFoldersOnlyList() throws Exception
{
List<FileInfo> files = fileFolderService.listFolders(workingRootNodeRef);
// check
String[] expectedNames = new String[] {NAME_L0_FOLDER_A, NAME_L0_FOLDER_B, NAME_L0_FOLDER_C};
String[] expectedNames = new String[]
{ NAME_L0_FOLDER_A, NAME_L0_FOLDER_B, NAME_L0_FOLDER_C };
checkFileList(files, 0, 3, expectedNames);
}
public void testShallowFileSearch() throws Exception
{
List<FileInfo> files = fileFolderService.search(
workingRootNodeRef,
NAME_L0_FILE_B,
true,
false,
false);
List<FileInfo> files = fileFolderService.search(workingRootNodeRef, NAME_L0_FILE_B, true, false, false);
// check
String[] expectedNames = new String[] {NAME_L0_FILE_B};
String[] expectedNames = new String[]
{ NAME_L0_FILE_B };
checkFileList(files, 1, 0, expectedNames);
}
public void testDeepFilesAndFoldersSearch() throws Exception
{
List<FileInfo> files = fileFolderService.search(
workingRootNodeRef,
"?1-*",
true,
true,
true);
List<FileInfo> files = fileFolderService.search(workingRootNodeRef, "?1-*", true, true, true);
// check
String[] expectedNames = new String[] {NAME_L1_FOLDER_A, NAME_L1_FOLDER_B, NAME_L1_FILE_A, NAME_L1_FILE_B, NAME_L1_FILE_C};
String[] expectedNames = new String[]
{ NAME_L1_FOLDER_A, NAME_L1_FOLDER_B, NAME_L1_FILE_A, NAME_L1_FILE_B, NAME_L1_FILE_C };
checkFileList(files, 3, 2, expectedNames);
}
public void testDeepFilesOnlySearch() throws Exception
{
List<FileInfo> files = fileFolderService.search(
workingRootNodeRef,
"?1-*",
true,
false,
true);
List<FileInfo> files = fileFolderService.search(workingRootNodeRef, "?1-*", true, false, true);
// check
String[] expectedNames = new String[] {NAME_L1_FILE_A, NAME_L1_FILE_B, NAME_L1_FILE_C};
String[] expectedNames = new String[]
{ NAME_L1_FILE_A, NAME_L1_FILE_B, NAME_L1_FILE_C };
checkFileList(files, 3, 0, expectedNames);
}
/**
* Helper to fetch a file or folder by name
*
@@ -258,9 +252,8 @@ public class FileFolderServiceImplTest extends TestCase
List<FileInfo> results = fileFolderService.search(workingRootNodeRef, name, !isFolder, isFolder, true);
if (results.size() > 1)
{
throw new AlfrescoRuntimeException("Name is not unique in hierarchy: \n" +
" name: " + name + "\n" +
" is folder: " + isFolder);
throw new AlfrescoRuntimeException("Name is not unique in hierarchy: \n" + " name: " + name + "\n"
+ " is folder: " + isFolder);
}
else if (results.size() == 0)
{
@@ -287,7 +280,7 @@ public class FileFolderServiceImplTest extends TestCase
assertNotNull(fileInfo);
assertFalse(fileInfo.isFolder());
}
public void testRenameNormal() throws Exception
{
FileInfo folderInfo = getByName(NAME_L0_FOLDER_A, true);
@@ -301,7 +294,7 @@ public class FileFolderServiceImplTest extends TestCase
checkInfo = getByName(newName, true);
assertNotNull("Folder info for new name is not present", checkInfo);
}
public void testRenameWithoutAssocQNameChange() throws Exception
{
FileInfo folderInfo = getByName(NAME_L0_FOLDER_A, true);
@@ -309,33 +302,25 @@ public class FileFolderServiceImplTest extends TestCase
NodeRef folderNodeRef = folderInfo.getNodeRef();
// Create a child file
QName assocQName = QName.createQName(NamespaceService.APP_MODEL_1_0_URI, "abc");
NodeRef newFileNodeRef = fileFolderService.create(
folderNodeRef,
"AnotherFile.txt",
ContentModel.TYPE_CONTENT,
NodeRef newFileNodeRef = fileFolderService.create(folderNodeRef, "AnotherFile.txt", ContentModel.TYPE_CONTENT,
assocQName).getNodeRef();
// Make sure that the correct association QName was used
QName checkQName = nodeService.getPrimaryParent(newFileNodeRef).getQName();
assertEquals(
"The given assoc QName was not used for the path",
assocQName,
checkQName);
assertEquals("The given assoc QName was not used for the path", assocQName, checkQName);
// Rename
String newName = "AnotherFile-new.txt";
folderInfo = fileFolderService.rename(newFileNodeRef, newName);
// Make sure that the association QName did not change
checkQName = nodeService.getPrimaryParent(newFileNodeRef).getQName();
assertEquals(
"The given assoc QName was not used for the path after a rename",
assocQName,
nodeService.getPrimaryParent(newFileNodeRef).getQName());
assertEquals("The given assoc QName was not used for the path after a rename", assocQName, nodeService
.getPrimaryParent(newFileNodeRef).getQName());
}
public void testRenameDuplicate() throws Exception
{
FileInfo folderInfo = getByName(NAME_L0_FOLDER_A, true);
assertNotNull(folderInfo);
// rename duplicate. A file with that name already exists
// rename duplicate. A file with that name already exists
String newName = NAME_L0_FILE_A;
try
{
@@ -347,9 +332,15 @@ public class FileFolderServiceImplTest extends TestCase
// expected
}
}
public void testMove() throws Exception
{
// we are testing failures as well
txn.commit();
// start a new one
txn = transactionService.getNonPropagatingUserTransaction();
txn.begin();
FileInfo folderToMoveInfo = getByName(NAME_L1_FOLDER_A, true);
assertNotNull(folderToMoveInfo);
NodeRef folderToMoveRef = folderToMoveInfo.getNodeRef();
@@ -372,8 +363,34 @@ public class FileFolderServiceImplTest extends TestCase
{
// expected
}
txn.rollback();
txn = transactionService.getNonPropagatingUserTransaction();
txn.begin();
// Move a file to a new location
FileInfo fileA = getByName(NAME_L1_FILE_A, false);
FileInfo folderB = getByName(NAME_L0_FOLDER_B, true);
fileFolderService.copy(fileA.getNodeRef(), folderB.getNodeRef(), null);
try
{
// Move to a target folder without a rename and expecting a name clash
fileFolderService.move(fileA.getNodeRef(), folderB.getNodeRef(), null);
fail("Duplicately-named file in target folder was not detected");
}
catch (FileExistsException e)
{
// Expected
}
txn.rollback();
txn = transactionService.getNonPropagatingUserTransaction();
txn.begin();
// Move to a target folder but with a rename to avoid the name clash
fileFolderService.move(fileA.getNodeRef(), folderB.getNodeRef(), NAME_L1_FILE_B);
}
public void testCopy() throws Exception
{
FileInfo folderToCopyInfo = getByName(NAME_L1_FOLDER_A, true);
@@ -400,7 +417,7 @@ public class FileFolderServiceImplTest extends TestCase
// expected
}
}
public void testCreateFolder() throws Exception
{
// we are testing failures as well
@@ -408,7 +425,7 @@ public class FileFolderServiceImplTest extends TestCase
// start a new one
txn = transactionService.getNonPropagatingUserTransaction();
txn.begin();
FileInfo parentFolderInfo = getByName(NAME_L0_FOLDER_A, true);
assertNotNull(parentFolderInfo);
NodeRef parentFolderRef = parentFolderInfo.getNodeRef();
@@ -461,7 +478,8 @@ public class FileFolderServiceImplTest extends TestCase
M2Type testType = testModel.createType("t111:subfolder");
testType.setParentName("cm:" + ContentModel.TYPE_FOLDER.getLocalName());
dictionaryDAO.putModel(testModel);
fileFolderService.create(parentFolderRef, "Legal subtype of folder", QName.createQName(testNs, "subfolder"));
fileFolderService
.create(parentFolderRef, "Legal subtype of folder", QName.createQName(testNs, "subfolder"));
}
catch (Throwable e)
{
@@ -478,17 +496,17 @@ public class FileFolderServiceImplTest extends TestCase
assertTrue("Node not created", nodeService.exists(fileInfo.getNodeRef()));
assertFalse("File type expected", fileInfo.isFolder());
}
public void testCreateFile() throws Exception
{
}
public void testCreateInRoot() throws Exception
{
fileFolderService.create(rootNodeRef, "New Folder", ContentModel.TYPE_FOLDER);
}
public void testMakeFolders() throws Exception
{
// create a completely new path below the root
@@ -497,12 +515,14 @@ public class FileFolderServiceImplTest extends TestCase
namePath.add("BBB");
namePath.add("CCC");
namePath.add("DDD");
FileInfo lastFileInfo = FileFolderServiceImpl.makeFolders(fileFolderService, rootNodeRef, namePath, ContentModel.TYPE_FOLDER);
FileInfo lastFileInfo = FileFolderServiceImpl.makeFolders(fileFolderService, rootNodeRef, namePath,
ContentModel.TYPE_FOLDER);
assertNotNull("First makeFolder failed", lastFileInfo);
// check that a repeat works
FileInfo lastFileInfoAgain = FileFolderServiceImpl.makeFolders(fileFolderService, rootNodeRef, namePath, ContentModel.TYPE_FOLDER);
FileInfo lastFileInfoAgain = FileFolderServiceImpl.makeFolders(fileFolderService, rootNodeRef, namePath,
ContentModel.TYPE_FOLDER);
assertNotNull("Repeat makeFolders failed", lastFileInfoAgain);
assertEquals("Repeat created new leaf", lastFileInfo.getNodeRef(), lastFileInfoAgain.getNodeRef());
// check that it worked
@@ -518,7 +538,7 @@ public class FileFolderServiceImplTest extends TestCase
i++;
}
}
/**
* Lucene only indexes terms that are 3 characters or more
*/
@@ -530,34 +550,36 @@ public class FileFolderServiceImplTest extends TestCase
namePath.add("B");
namePath.add("C");
namePath.add("D");
FileInfo lastFileInfo = FileFolderServiceImpl.makeFolders(fileFolderService, rootNodeRef, namePath, ContentModel.TYPE_FOLDER);
FileInfo lastFileInfo = FileFolderServiceImpl.makeFolders(fileFolderService, rootNodeRef, namePath,
ContentModel.TYPE_FOLDER);
assertNotNull("First makeFolder failed", lastFileInfo);
// check that a repeat works
FileInfo lastFileInfoAgain = FileFolderServiceImpl.makeFolders(fileFolderService, rootNodeRef, namePath, ContentModel.TYPE_FOLDER);
FileInfo lastFileInfoAgain = FileFolderServiceImpl.makeFolders(fileFolderService, rootNodeRef, namePath,
ContentModel.TYPE_FOLDER);
assertNotNull("Repeat makeFolders failed", lastFileInfoAgain);
assertEquals("Repeat created new leaf", lastFileInfo.getNodeRef(), lastFileInfoAgain.getNodeRef());
}
public void testGetNamePath() throws Exception
{
FileInfo fileInfo = getByName(NAME_L1_FILE_A, false);
assertNotNull(fileInfo);
NodeRef nodeRef = fileInfo.getNodeRef();
List<FileInfo> infoPaths = fileFolderService.getNamePath(workingRootNodeRef, nodeRef);
assertEquals("Not enough elements", 2, infoPaths.size());
assertEquals("First level incorrent", NAME_L0_FOLDER_A, infoPaths.get(0).getName());
assertEquals("Second level incorrent", NAME_L1_FILE_A, infoPaths.get(1).getName());
// pass in a null root and make sure that it still works
infoPaths = fileFolderService.getNamePath(null, nodeRef);
assertEquals("Not enough elements", 3, infoPaths.size());
assertEquals("First level incorrent", workingRootNodeRef.getId(), infoPaths.get(0).getName());
assertEquals("Second level incorrent", NAME_L0_FOLDER_A, infoPaths.get(1).getName());
assertEquals("Third level incorrent", NAME_L1_FILE_A, infoPaths.get(2).getName());
// check that a non-aligned path is detected
NodeRef startRef = getByName(NAME_L0_FOLDER_B, true).getNodeRef();
try
@@ -570,7 +592,7 @@ public class FileFolderServiceImplTest extends TestCase
// expected
}
}
public void testSearchSimple() throws Exception
{
FileInfo folderInfo = getByName(NAME_L0_FOLDER_A, true);
@@ -586,28 +608,28 @@ public class FileFolderServiceImplTest extends TestCase
FileInfo checkInfo = getByName(NAME_L1_FILE_A, false);
assertEquals("Incorrect node found", checkInfo.getNodeRef(), fileNodeRef);
}
public void testResolveNamePath() throws Exception
{
FileInfo fileInfo = getByName(NAME_L1_FILE_A, false);
List<String> pathElements = new ArrayList<String>(3);
pathElements.add(NAME_L0_FOLDER_A);
pathElements.add(NAME_L1_FILE_A);
FileInfo fileInfoCheck = fileFolderService.resolveNamePath(workingRootNodeRef, pathElements);
assertNotNull("File info not found", fileInfoCheck);
assertEquals("Path not resolved to correct node", fileInfo.getNodeRef(), fileInfoCheck.getNodeRef());
}
public void testGetReaderWriter() throws Exception
{
// testing a failure
txn.commit();
txn = transactionService.getUserTransaction();
txn.begin();
FileInfo dirInfo = getByName(NAME_L0_FOLDER_A, true);
UserTransaction rollbackTxn = null;
try
{
@@ -624,9 +646,9 @@ public class FileFolderServiceImplTest extends TestCase
{
rollbackTxn.rollback();
}
FileInfo fileInfo = getByName(NAME_L1_FILE_A, false);
ContentWriter writer = fileFolderService.getWriter(fileInfo.getNodeRef());
assertNotNull("Writer is null", writer);
// write some content
@@ -638,23 +660,22 @@ public class FileFolderServiceImplTest extends TestCase
String checkContent = reader.getContentString();
assertEquals("Content mismatch", content, checkContent);
}
public void testLongFileNames() throws Exception
{
String fileName =
"12345678901234567890123456789012345678901234567890" +
"12345678901234567890123456789012345678901234567890" +
"12345678901234567890123456789012345678901234567890" +
"12345678901234567890123456789012345678901234567890" +
"12345678901234567890123456789012345678901234567890" +
"12345678901234567890123456789012345678901234567890";
String fileName = "12345678901234567890123456789012345678901234567890"
+ "12345678901234567890123456789012345678901234567890"
+ "12345678901234567890123456789012345678901234567890"
+ "12345678901234567890123456789012345678901234567890"
+ "12345678901234567890123456789012345678901234567890"
+ "12345678901234567890123456789012345678901234567890";
FileInfo fileInfo = fileFolderService.create(workingRootNodeRef, fileName, ContentModel.TYPE_CONTENT);
// see if we can get it again
NodeRef fileNodeRef = fileFolderService.searchSimple(workingRootNodeRef, fileName);
assertNotNull("Long filename not found", fileNodeRef);
assertEquals(fileInfo.getNodeRef(), fileNodeRef);
}
/**
* Validates <a href="https://issues.alfresco.com/jira/browse/ALFCOM-2655">ACT-7225</a>
*/

View File

@@ -184,7 +184,8 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
static
{
DUPLICATE_CHILD_NAME_EXCEPTIONS = new Class[] {
ConstraintViolationException.class
ConstraintViolationException.class,
DataIntegrityViolationException.class
};
}
@@ -1956,75 +1957,97 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
}
}
/**
* Explicitly flushes the session looking out for {@link #DUPLICATE_CHILD_NAME_EXCEPTIONS exceptions}
* indicating that the child association name constraint has been violated.
* <p/>
* <b>NOTE: </b>The Hibernate session will be flushed prior to calling the callback. This is necessary
* to prevent legitimate other contstraint violations from being dressed up as
* {@link DuplicateChildNodeNameException}.
*
* @param childAssocChangingCallback the callback in which the child assoc is modified
* @return Returns the callback's result
*/
@SuppressWarnings("unchecked")
private Object writeChildAssocChanges(
HibernateCallback childAssocChangingCallback,
NodeRef parentNodeRef,
QName assocTypeQName,
String childName)
{
// Make sure there are no outstanding changes to flush
DirtySessionMethodInterceptor.flushSession(getSession(false));
// Call the callback and dig into any exception
try
{
Object ret = getHibernateTemplate().execute(childAssocChangingCallback);
// Now flush. Note that we *force* it to flush as the dirty flag will not have been set.
DirtySessionMethodInterceptor.flushSession(getSession(false), true);
// No clashes
return ret;
}
catch (Throwable e)
{
Throwable constraintViolation = (Throwable) ExceptionStackUtil.getCause(
e,
DUPLICATE_CHILD_NAME_EXCEPTIONS);
if (constraintViolation == null)
{
// It was something else
RuntimeException ee = AlfrescoRuntimeException.makeRuntimeException(
e, "Exception while flushing child assoc to database");
throw ee;
}
else
{
if (isDebugEnabled)
{
logger.debug(
"Duplicate child association detected: \n" +
" Parent node: " + parentNodeRef + "\n" +
" Child node name: " + childName,
e);
}
throw new DuplicateChildNodeNameException(parentNodeRef, assocTypeQName, childName);
}
}
}
public Pair<Long, ChildAssociationRef> newChildAssoc(
Long parentNodeId,
Long childNodeId,
boolean isPrimary,
final boolean isPrimary,
final QName assocTypeQName,
QName assocQName,
final QName assocQName,
String newName)
{
final Node parentNode = (Node) getSession().get(NodeImpl.class, parentNodeId);
Node childNode = (Node) getSession().get(NodeImpl.class, childNodeId);
final Node childNode = (Node) getSession().get(NodeImpl.class, childNodeId);
final Pair<String, Long> childNameUnique = getChildNameUnique(assocTypeQName, newName);
final ChildAssoc assoc = new ChildAssocImpl();
assoc.setTypeQName(qnameDAO, assocTypeQName);
assoc.setChildNodeName(childNameUnique.getFirst());
assoc.setChildNodeNameCrc(childNameUnique.getSecond());
assoc.setQName(qnameDAO, assocQName);
assoc.setIsPrimary(isPrimary);
assoc.setIndex(-1);
// maintain inverse sets
assoc.buildAssociation(parentNode, childNode);
// Make sure that all changes to the session are persisted so that we know if any
// failures are from the constraint or not
DirtySessionMethodInterceptor.flushSession(getSession(false));
Long assocId = (Long) getHibernateTemplate().execute(new HibernateCallback()
HibernateCallback newAssocCallback = new HibernateCallback()
{
@SuppressWarnings("unchecked")
public Object doInHibernate(Session session)
public Object doInHibernate(Session session) throws HibernateException, SQLException
{
try
{
try
{
Object result = session.save(assoc);
DirtySessionMethodInterceptor.flushSession(session, true);
return result;
}
catch (Throwable e)
{
ConstraintViolationException constraintViolation = (ConstraintViolationException) ExceptionStackUtil.getCause(
e,
DUPLICATE_CHILD_NAME_EXCEPTIONS);
if (constraintViolation == null)
{
// It was something else
RuntimeException ee = AlfrescoRuntimeException.makeRuntimeException(
e, "Exception while flushing child assoc to database");
throw ee;
}
else
{
throw constraintViolation;
}
}
}
catch (ConstraintViolationException e)
{
// There is already an entity
if (isDebugEnabled)
{
logger.debug("Duplicate child association detected: \n" + " Parent Node: "
+ parentNode.getId() + "\n" + " Child Name Used: " + childNameUnique, e);
}
throw new DuplicateChildNodeNameException(parentNode.getNodeRef(), assocTypeQName, childNameUnique
.getFirst());
}
assoc.setTypeQName(qnameDAO, assocTypeQName);
assoc.setChildNodeName(childNameUnique.getFirst());
assoc.setChildNodeNameCrc(childNameUnique.getSecond());
assoc.setQName(qnameDAO, assocQName);
assoc.setIsPrimary(isPrimary);
assoc.setIndex(-1);
// maintain inverse sets
assoc.buildAssociation(parentNode, childNode);
// Save it
return session.save(assoc);
}
});
};
Long assocId = (Long) writeChildAssocChanges(
newAssocCallback,
parentNode.getNodeRef(),
assocTypeQName,
childNameUnique.getFirst());
// Add it to the cache
Set<Long> oldParentAssocIds = parentAssocsCache.get(childNode.getId());
@@ -2085,32 +2108,14 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
{
childAssoc.setChildNodeName(childNameUnique.getFirst());
childAssoc.setChildNodeNameCrc(childNameUnique.getSecond().longValue());
// Flush again to force a DB constraint here
try
{
DirtySessionMethodInterceptor.flushSession(session, true);
// Done
return null;
}
catch (ConstraintViolationException e)
{
// There is already an entity
if (isDebugEnabled)
{
logger.debug("Duplicate child association detected: \n" + " Parent Node: "
+ parentNode.getId() + "\n" + " Child Name Used: " + childNameUnique, e);
}
throw new DuplicateChildNodeNameException(parentNode.getNodeRef(), childAssoc
.getTypeQName(qnameDAO), childNameUnique.getFirst());
}
return null;
}
};
// Make sure that all changes to the session are persisted so that we know if any
// failures are from the constraint or not
DirtySessionMethodInterceptor.flushSession(getSession(false));
getHibernateTemplate().execute(callback);
writeChildAssocChanges(
callback,
parentNode.getNodeRef(),
childAssoc.getTypeQName(qnameDAO),
childName);
// Done
if (isDebugEnabled)
@@ -2169,9 +2174,9 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
Long childAssocId,
Long parentNodeId,
Long childNodeId,
QName assocTypeQName,
QName assocQName,
int index,
final QName assocTypeQName,
final QName assocQName,
final int index,
String childName)
{
final ChildAssoc childAssoc = getChildAssocNotNull(childAssocId);
@@ -2182,19 +2187,31 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
final Node newParentNode = getNodeNotNull(parentNodeId);
final Node newChildNode = getNodeNotNull(childNodeId);
final NodeRef newChildNodeRef = newChildNode.getNodeRef();
final Pair<String, Long> childNameUnique = getChildNameUnique(assocTypeQName, childName);
// Reset the cm:name duplicate handling. This has to be redone, if required.
Pair<String, Long> childNameUnique = getChildNameUnique(assocTypeQName, childName);
childAssoc.setChildNodeName(childNameUnique.getFirst());
childAssoc.setChildNodeNameCrc(childNameUnique.getSecond());
childAssoc.buildAssociation(newParentNode, newChildNode);
childAssoc.setTypeQName(qnameDAO, assocTypeQName);
childAssoc.setQName(qnameDAO, assocQName);
if (index >= 0)
HibernateCallback updateChildAssocCallback = new HibernateCallback()
{
childAssoc.setIndex(index);
}
public Object doInHibernate(Session session) throws HibernateException, SQLException
{
childAssoc.setChildNodeName(childNameUnique.getFirst());
childAssoc.setChildNodeNameCrc(childNameUnique.getSecond());
childAssoc.buildAssociation(newParentNode, newChildNode);
childAssoc.setTypeQName(qnameDAO, assocTypeQName);
childAssoc.setQName(qnameDAO, assocQName);
if (index >= 0)
{
childAssoc.setIndex(index);
}
return null;
}
};
writeChildAssocChanges(
updateChildAssocCallback,
newParentNode.getNodeRef(),
assocTypeQName,
childNameUnique.getFirst());
// Record change ID
if (oldChildNodeRef.equals(newChildNodeRef))
@@ -3446,6 +3463,10 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
}
};
NodeAssoc result = (NodeAssoc) getHibernateTemplate().execute(callback);
if (result == null)
{
return null;
}
Pair<Long, AssociationRef> ret = new Pair<Long, AssociationRef>(result.getId(), result.getNodeAssocRef(qnameDAO));
return ret;
}

View File

@@ -219,8 +219,18 @@ public class AVMSnapShotTriggeredIndexingMethodInterceptor implements MethodInte
{
this.defaultMode = defaultMode;
}
/**
* Is snapshot triggered indexing enabled
*
* @return true if indexing is enabled for AVM
*/
public boolean isIndexingEnabled()
{
return enableIndexing;
}
/**
* @param store
* @param before
* @param after

View File

@@ -171,12 +171,16 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI
private int mergerMergeFactor = 5;
private int mergerMergeBlockingFactor = 1;
private int mergerMinMergeDocs = 1000;
private int mergerTargetIndexCount = 5;
private int mergerTargetOverlayCount = 5;
private int mergerTargetOverlaysBlockingFactor = 2;
private int termIndexInterval =IndexWriter.DEFAULT_TERM_INDEX_INTERVAL;
private boolean useNioMemoryMapping = true;
@@ -1686,6 +1690,16 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI
{
this.mergerMergeFactor = mergerMergeFactor;
}
public int getMergerMergeBlockingFactor()
{
return mergerMergeBlockingFactor;
}
public void setMergerMergeBlockingFactor(int mergerMergeBlockingFactor)
{
this.mergerMergeBlockingFactor = mergerMergeBlockingFactor;
}
public int getMergerMinMergeDocs()
{
@@ -1716,6 +1730,16 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI
{
this.mergerTargetOverlayCount = mergerTargetOverlayCount;
}
public int getMergerTargetOverlaysBlockingFactor()
{
return mergerTargetOverlaysBlockingFactor;
}
public void setMergerTargetOverlaysBlockingFactor(int mergerTargetOverlaysBlockingFactor)
{
this.mergerTargetOverlaysBlockingFactor = mergerTargetOverlaysBlockingFactor;
}
public int getTermIndexInterval()
{

View File

@@ -138,6 +138,14 @@ public interface LuceneConfig
*/
public int getMergerMergeFactor();
/**
* The factor by which the merge factor is multiplied to determine the allowable number of indexes before blocking.
*
* @return the factor by which the merge factor is multiplied to determine the allowable number of indexes before
* blocking
*/
public int getMergerMergeBlockingFactor();
/**
* Lucene merger config
* @return
@@ -150,6 +158,15 @@ public interface LuceneConfig
*/
public int getMergerTargetOverlayCount();
/**
* The factor by which the target overlay count is multiplied to determine the allowable number of overlays before
* blocking.
*
* @return the factor by which the target overlay count is multiplied to determine the allowable number of overlays
* before blocking
*/
public int getMergerTargetOverlaysBlockingFactor();
/**
* Target index count. Over this indexes will be merged together.
* @return
@@ -228,4 +245,5 @@ public interface LuceneConfig
* @return
*/
public ConfigurableApplicationContext getApplicationContext();
}

View File

@@ -228,7 +228,7 @@ public class IndexInfo implements IndexMonitor
* Lock for the index entries
*/
private ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock();
/**
* Read only index readers that also do reference counting.
*/
@@ -315,12 +315,16 @@ public class IndexInfo implements IndexMonitor
private int mergerMergeFactor = 5;
private int mergerMergeBlockingFactor = 1;
private int mergerMaxMergeDocs = 1000000;
private boolean mergerUseCompoundFile = true;
private int mergerTargetOverlays = 5;
private int mergerTargetOverlaysBlockingFactor = 2;
// Common properties for indexers
private long writeLockTimeout = IndexWriter.WRITE_LOCK_TIMEOUT;
@@ -404,9 +408,11 @@ public class IndexInfo implements IndexMonitor
this.writerMaxMergeDocs = config.getWriterMaxMergeDocs();
this.mergerMinMergeDocs = config.getMergerMinMergeDocs();
this.mergerMergeFactor = config.getMergerMergeFactor();
this.mergerMergeBlockingFactor = config.getMergerMergeBlockingFactor();
this.mergerMaxMergeDocs = config.getMergerMaxMergeDocs();
this.termIndexInterval = config.getTermIndexInterval();
this.mergerTargetOverlays = config.getMergerTargetOverlayCount();
this.mergerTargetOverlaysBlockingFactor = config.getMergerTargetOverlaysBlockingFactor();
// Work out the relative path of the index
try
{
@@ -1261,7 +1267,38 @@ public class IndexInfo implements IndexMonitor
getReadLock();
try
{
transition.beforeWithReadLock(id, toDelete, read);
// beforeWithReadLock may indicate that we need to block for the merger to do some work
while (!transition.beforeWithReadLock(id, toDelete, read))
{
synchronized (merger)
{
// If the merger is scheduled, let's wait for it...
int count = merger.getScheduledCount();
if (count <= 0)
{
if (s_logger.isDebugEnabled())
{
s_logger.debug("CAN'T THROTTLE: " + indexEntries.size());
}
break;
}
if (s_logger.isDebugEnabled())
{
s_logger.debug("THROTTLING: " + indexEntries.size());
}
releaseReadLock();
try
{
merger.wait();
}
catch (InterruptedException e)
{
}
}
getReadLock();
}
releaseReadLock();
getWriteLock();
try
@@ -1361,7 +1398,7 @@ public class IndexInfo implements IndexMonitor
*/
private interface Transition
{
void beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException;
boolean beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException;
void transition(String id, Set<Term> toDelete, Set<Term> read) throws IOException;
@@ -1375,9 +1412,9 @@ public class IndexInfo implements IndexMonitor
*/
private class PreparingTransition implements Transition
{
public void beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
public boolean beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
{
// Nothing to do
return true;
}
public void transition(String id, Set<Term> toDelete, Set<Term> read) throws IOException
@@ -1411,9 +1448,10 @@ public class IndexInfo implements IndexMonitor
*/
private class PreparedTransition implements Transition
{
public void beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
public boolean beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
{
// We want to block until the merger has executed if we have more than a certain number of indexes
return indexEntries.size() <= mergerMergeBlockingFactor * mergerMergeFactor + mergerTargetOverlaysBlockingFactor * mergerTargetOverlays;
}
public void transition(String id, Set<Term> toDelete, Set<Term> read) throws IOException
@@ -1479,9 +1517,9 @@ public class IndexInfo implements IndexMonitor
private class CommittingTransition implements Transition
{
public void beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
public boolean beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
{
return true;
}
public void transition(String id, Set<Term> toDelete, Set<Term> read) throws IOException
@@ -1513,13 +1551,14 @@ public class IndexInfo implements IndexMonitor
ThreadLocal<IndexReader> tl = new ThreadLocal<IndexReader>();
public void beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
public boolean beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
{
// Make sure we have set up the reader for the data
// ... and close it so we do not up the ref count
closeDelta(id);
IndexEntry entry = indexEntries.get(id);
tl.set(buildReferenceCountingIndexReader(id, entry.getDocumentCount()));
return true;
}
/**
@@ -1594,9 +1633,9 @@ public class IndexInfo implements IndexMonitor
private class RollingBackTransition implements Transition
{
public void beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
public boolean beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
{
return true;
}
public void transition(String id, Set<Term> toDelete, Set<Term> read) throws IOException
@@ -1628,11 +1667,12 @@ public class IndexInfo implements IndexMonitor
{
ThreadLocal<IndexReader> tl = new ThreadLocal<IndexReader>();
public void beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
public boolean beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
{
closeDelta(id);
IndexEntry entry = indexEntries.get(id);
tl.set(buildReferenceCountingIndexReader(id, entry.getDocumentCount()));
return true;
}
public void transition(String id, Set<Term> toDelete, Set<Term> read) throws IOException
@@ -1682,9 +1722,9 @@ public class IndexInfo implements IndexMonitor
private class DeletableTransition implements Transition
{
public void beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
public boolean beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
{
return true;
}
public void transition(String id, Set<Term> toDelete, Set<Term> read) throws IOException
@@ -1727,9 +1767,9 @@ public class IndexInfo implements IndexMonitor
private class ActiveTransition implements Transition
{
public void beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
public boolean beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
{
return true;
}
public void transition(String id, Set<Term> toDelete, Set<Term> read) throws IOException
@@ -2906,6 +2946,13 @@ public class IndexInfo implements IndexMonitor
private abstract class AbstractSchedulable implements Schedulable, Runnable
{
ScheduledState scheduledState = ScheduledState.UN_SCHEDULED;
private int scheduledCount;
public synchronized int getScheduledCount()
{
return scheduledCount;
}
public synchronized void schedule()
{
@@ -2917,6 +2964,7 @@ public class IndexInfo implements IndexMonitor
break;
case UN_SCHEDULED:
scheduledState = ScheduledState.SCHEDULED;
scheduledCount++;
threadPoolExecutor.execute(this);
break;
case RECOVERY_SCHEDULED:
@@ -2931,8 +2979,10 @@ public class IndexInfo implements IndexMonitor
{
switch (scheduledState)
{
case RECOVERY_SCHEDULED:
case SCHEDULED:
scheduledCount--;
notifyAll();
case RECOVERY_SCHEDULED:
scheduledState = ScheduledState.UN_SCHEDULED;
break;
case FAILED:
@@ -2977,8 +3027,10 @@ public class IndexInfo implements IndexMonitor
{
switch (scheduledState)
{
case RECOVERY_SCHEDULED:
case SCHEDULED:
scheduledCount--;
notifyAll();
case RECOVERY_SCHEDULED:
scheduledState = ScheduledState.FAILED;
break;
case FAILED:

View File

@@ -418,7 +418,9 @@ public class RetryingTransactionHelper
{
// Sleep a random amount of time before retrying.
// The sleep interval increases with the number of retries.
int sleepIntervalRandom = count > 0 ? random.nextInt(count * retryWaitIncrementMs) : minRetryWaitMs;
int sleepIntervalRandom = (count > 0 && retryWaitIncrementMs > 0)
? random.nextInt(count * retryWaitIncrementMs)
: minRetryWaitMs;
int sleepInterval = Math.min(maxRetryWaitMs, sleepIntervalRandom);
sleepInterval = Math.max(sleepInterval, minRetryWaitMs);
if (logger.isInfoEnabled() && !logger.isDebugEnabled())