New index backup.

Ordering tests for the new indexer,
More robust new index merger thread - more to come. 

git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@3372 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Andrew Hind
2006-07-21 15:04:23 +00:00
parent 305124e75b
commit fd1646e8a8
11 changed files with 1010 additions and 501 deletions

View File

@@ -23,6 +23,7 @@ import java.util.Set;
import org.alfresco.repo.search.IndexerException;
import org.alfresco.repo.search.impl.lucene.index.IndexInfo;
import org.alfresco.repo.search.impl.lucene.index.TransactionStatus;
import org.alfresco.repo.search.impl.lucene.index.IndexInfo.LockWork;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.StoreRef;
@@ -279,5 +280,11 @@ public abstract class LuceneBase2
{
return deltaId;
}
public <R> R doWithWriteLock(LockWork<R> lockWork)
{
return indexInfo.doWithWriteLock(lockWork);
}
}

View File

@@ -18,6 +18,8 @@ package org.alfresco.repo.search.impl.lucene;
import java.io.File;
import junit.framework.TestCase;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory.LuceneIndexBackupComponent;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.service.cmr.repository.NodeService;
@@ -26,8 +28,6 @@ import org.alfresco.util.ApplicationContextHelper;
import org.alfresco.util.TempFileProvider;
import org.springframework.context.ApplicationContext;
import junit.framework.TestCase;
/**
* @see org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory.LuceneIndexBackupComponent
*
@@ -47,7 +47,7 @@ public class LuceneIndexBackupComponentTest extends TestCase
{
TransactionService transactionService = (TransactionService) ctx.getBean("transactionComponent");
NodeService nodeService = (NodeService) ctx.getBean("NodeService");
LuceneIndexerAndSearcherFactory factory = (LuceneIndexerAndSearcherFactory) ctx.getBean("luceneIndexerAndSearcherFactory");
LuceneIndexerAndSearcher factory = (LuceneIndexerAndSearcher) ctx.getBean("luceneIndexerAndSearcherFactory");
this.authenticationComponent = (AuthenticationComponent)ctx.getBean("authenticationComponent");
this.authenticationComponent.setSystemUserAsCurrentUser();

View File

@@ -18,10 +18,9 @@ package org.alfresco.repo.search.impl.lucene;
import java.util.Set;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.IndexerSPI;
import org.alfresco.repo.search.impl.lucene.fts.FTSIndexerAware;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.search.impl.lucene.index.IndexInfo;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
@@ -44,4 +43,6 @@ public interface LuceneIndexer2 extends IndexerSPI
public void flushPending() throws LuceneIndexException;
public Set<NodeRef> getDeletions();
public boolean getDeleteOnlyNodes();
public <R> R doWithWriteLock(IndexInfo.LockWork <R> lockWork);
}

View File

@@ -24,4 +24,12 @@ public interface LuceneIndexerAndSearcher extends IndexerAndSearcher, LuceneConf
public int prepare() throws IndexerException;
public void commit() throws IndexerException;
public void rollback();
public interface WithAllWriteLocksWork<Result>
{
public Result doWork() throws Exception;
}
public <R> R doWithAllWriteLocks(WithAllWriteLocksWork<R> lockWork);
}

View File

@@ -33,6 +33,7 @@ import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.search.IndexerException;
import org.alfresco.repo.search.QueryRegisterComponent;
import org.alfresco.repo.search.SearcherException;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcher.WithAllWriteLocksWork;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.search.transaction.LuceneIndexLock;
import org.alfresco.repo.search.transaction.SimpleTransaction;
@@ -61,11 +62,9 @@ import org.quartz.JobExecutionException;
/**
* This class is resource manager LuceneIndexers and LuceneSearchers.
*
* It supports two phase commit inside XA transactions and outside transactions
* it provides thread local transaction support.
* It supports two phase commit inside XA transactions and outside transactions it provides thread local transaction support.
*
* TODO: Provide pluggable support for a transaction manager TODO: Integrate
* with Spring transactions
* TODO: Provide pluggable support for a transaction manager TODO: Integrate with Spring transactions
*
* @author andyh
*
@@ -73,6 +72,8 @@ import org.quartz.JobExecutionException;
public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher, XAResource
{
private static Log logger = LogFactory.getLog(LuceneIndexerAndSearcherFactory.class);
private DictionaryService dictionaryService;
private NamespaceService nameSpaceService;
@@ -90,9 +91,7 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
private String lockDirectory;
/**
* A map of active global transactions . It contains all the indexers a
* transaction has used, with at most one indexer for each store within a
* transaction
* A map of active global transactions . It contains all the indexers a transaction has used, with at most one indexer for each store within a transaction
*/
private static Map<Xid, Map<StoreRef, LuceneIndexer>> activeIndexersInGlobalTx = new HashMap<Xid, Map<StoreRef, LuceneIndexer>>();
@@ -191,20 +190,18 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
}
/**
* Set the maximum average transformation time allowed to a transformer in order to have
* the transformation performed in the current transaction. The default is 20ms.
* Set the maximum average transformation time allowed to a transformer in order to have the transformation performed in the current transaction. The default is 20ms.
*
* @param maxAtomicTransformationTime the maximum average time that a text transformation may
* take in order to be performed atomically.
* @param maxAtomicTransformationTime
* the maximum average time that a text transformation may take in order to be performed atomically.
*/
public void setMaxAtomicTransformationTime(long maxAtomicTransformationTime)
{
this.maxAtomicTransformationTime = maxAtomicTransformationTime;
}
/**
* Check if we are in a global transactoin according to the transaction
* manager
* Check if we are in a global transactoin according to the transaction manager
*
* @return
*/
@@ -240,8 +237,7 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
}
/**
* Get an indexer for the store to use in the current transaction for this
* thread of control.
* Get an indexer for the store to use in the current transaction for this thread of control.
*
* @param storeRef -
* the id of the store
@@ -454,7 +450,8 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
}
return;
}
} finally
}
finally
{
activeIndexersInGlobalTx.remove(xid);
}
@@ -587,7 +584,8 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
{
indexer.rollback();
}
} finally
}
finally
{
activeIndexersInGlobalTx.remove(xid);
}
@@ -677,7 +675,8 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
}
}
}
} finally
}
finally
{
if (threadLocalIndexers.get() != null)
{
@@ -875,17 +874,17 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
{
IndexWriter.WRITE_LOCK_TIMEOUT = timeout;
}
public void setCommitLockTimeout(long timeout)
{
IndexWriter.COMMIT_LOCK_TIMEOUT = timeout;
}
public void setLockPollInterval(long time)
{
Lock.LOCK_POLL_INTERVAL = time;
}
public int getIndexerMaxFieldLength()
{
return indexerMaxFieldLength;
@@ -896,25 +895,24 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
this.indexerMaxFieldLength = indexerMaxFieldLength;
System.setProperty("org.apache.lucene.maxFieldLength", "" + indexerMaxFieldLength);
}
/**
* This component is able to <i>safely</i> perform backups of the Lucene indexes while
* the server is running.
* This component is able to <i>safely</i> perform backups of the Lucene indexes while the server is running.
* <p>
* It can be run directly by calling the {@link #backup() } method, but the convenience
* {@link LuceneIndexBackupJob} can be used to call it as well.
* It can be run directly by calling the {@link #backup() } method, but the convenience {@link LuceneIndexBackupJob} can be used to call it as well.
*
* @author Derek Hulley
*/
public static class LuceneIndexBackupComponent
{
private static Log logger = LogFactory.getLog(LuceneIndexerAndSearcherFactory.class);
private TransactionService transactionService;
private LuceneIndexerAndSearcherFactory factory;
private LuceneIndexerAndSearcher factory;
private NodeService nodeService;
private String targetLocation;
public LuceneIndexBackupComponent()
{
}
@@ -932,9 +930,10 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
/**
* Set the Lucene index factory that will be used to control the index locks
*
* @param factory the index factory
* @param factory
* the index factory
*/
public void setFactory(LuceneIndexerAndSearcherFactory factory)
public void setFactory(LuceneIndexerAndSearcher factory)
{
this.factory = factory;
}
@@ -942,7 +941,8 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
/**
* Used to retrieve the stores
*
* @param nodeService the node service
* @param nodeService
* the node service
*/
public void setNodeService(NodeService nodeService)
{
@@ -952,13 +952,14 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
/**
* Set the directory to which the backup will be copied
*
* @param targetLocation the backup directory
* @param targetLocation
* the backup directory
*/
public void setTargetLocation(String targetLocation)
{
this.targetLocation = targetLocation;
}
/**
* Backup the Lucene indexes
*/
@@ -978,64 +979,48 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
private void backupImpl()
{
// create the location to copy to
File targetDir = new File(targetLocation);
final File targetDir = new File(targetLocation);
if (targetDir.exists() && !targetDir.isDirectory())
{
throw new AlfrescoRuntimeException("Target location is a file and not a directory: " + targetDir);
}
File targetParentDir = targetDir.getParentFile();
final File targetParentDir = targetDir.getParentFile();
if (targetParentDir == null)
{
throw new AlfrescoRuntimeException("Target location may not be a root directory: " + targetDir);
}
File tempDir = new File(targetParentDir, "indexbackup_temp");
final File tempDir = new File(targetParentDir, "indexbackup_temp");
// get all the available stores
List<StoreRef> storeRefs = nodeService.getStores();
// lock all the stores
List<StoreRef> lockedStores = new ArrayList<StoreRef>(storeRefs.size());
try
factory.doWithAllWriteLocks(new WithAllWriteLocksWork<Object> ()
{
for (StoreRef storeRef : storeRefs)
{
factory.luceneIndexLock.getWriteLock(storeRef);
lockedStores.add(storeRef);
}
File indexRootDir = new File(factory.indexRootLocation);
// perform the copy
backupDirectory(indexRootDir, tempDir, targetDir);
}
catch (Throwable e)
{
throw new AlfrescoRuntimeException("Failed to copy Lucene index root: \n" +
" Index root: " + factory.indexRootLocation + "\n" +
" Target: " + targetDir,
e);
}
finally
{
for (StoreRef storeRef : lockedStores)
public Object doWork()
{
try
{
factory.luceneIndexLock.releaseWriteLock(storeRef);
File indexRootDir = new File(factory.getIndexRootLocation());
// perform the copy
backupDirectory(indexRootDir, tempDir, targetDir);
return null;
}
catch (Throwable e)
{
logger.error("Failed to release index lock for store " + storeRef, e);
throw new AlfrescoRuntimeException(
"Failed to copy Lucene index root: \n"
+ " Index root: " + factory.getIndexRootLocation() + "\n" + " Target: "
+ targetDir, e);
}
}
}
});
if (logger.isDebugEnabled())
{
logger.debug("Backed up Lucene indexes: \n" +
" Target directory: " + targetDir);
logger.debug("Backed up Lucene indexes: \n" + " Target directory: " + targetDir);
}
}
/**
* Makes a backup of the source directory via a temporary folder
*
* @param storeRef
*/
private void backupDirectory(File sourceDir, File tempDir, File targetDir) throws Exception
@@ -1086,14 +1071,15 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
{
/** KEY_LUCENE_INDEX_BACKUP_COMPONENT = 'luceneIndexBackupComponent' */
public static final String KEY_LUCENE_INDEX_BACKUP_COMPONENT = "luceneIndexBackupComponent";
/**
* Locks the Lucene indexes and copies them to a backup location
*/
public void execute(JobExecutionContext context) throws JobExecutionException
{
JobDataMap jobData = context.getJobDetail().getJobDataMap();
LuceneIndexBackupComponent backupComponent = (LuceneIndexBackupComponent) jobData.get(KEY_LUCENE_INDEX_BACKUP_COMPONENT);
LuceneIndexBackupComponent backupComponent = (LuceneIndexBackupComponent) jobData
.get(KEY_LUCENE_INDEX_BACKUP_COMPONENT);
if (backupComponent == null)
{
throw new JobExecutionException("Missing job data: " + KEY_LUCENE_INDEX_BACKUP_COMPONENT);
@@ -1102,4 +1088,53 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
backupComponent.backup();
}
}
public <R> R doWithAllWriteLocks(WithAllWriteLocksWork<R> lockWork)
{
// get all the available stores
List<StoreRef> storeRefs = nodeService.getStores();
// lock all the stores
List<StoreRef> lockedStores = new ArrayList<StoreRef>(storeRefs.size());
try
{
for (StoreRef storeRef : storeRefs)
{
luceneIndexLock.getWriteLock(storeRef);
lockedStores.add(storeRef);
}
try
{
return lockWork.doWork();
}
catch (Throwable exception)
{
// Re-throw the exception
if (exception instanceof RuntimeException)
{
throw (RuntimeException) exception;
}
else
{
throw new RuntimeException("Error during run with lock.", exception);
}
}
}
finally
{
for (StoreRef storeRef : lockedStores)
{
try
{
luceneIndexLock.releaseWriteLock(storeRef);
}
catch (Throwable e)
{
logger.error("Failed to release index lock for store " + storeRef, e);
}
}
}
}
}

View File

@@ -17,7 +17,6 @@
package org.alfresco.repo.search.impl.lucene;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -34,6 +33,7 @@ import org.alfresco.repo.search.IndexerException;
import org.alfresco.repo.search.QueryRegisterComponent;
import org.alfresco.repo.search.SearcherException;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.search.impl.lucene.index.IndexInfo;
import org.alfresco.repo.search.transaction.LuceneIndexLock;
import org.alfresco.repo.search.transaction.SimpleTransaction;
import org.alfresco.repo.search.transaction.SimpleTransactionManager;
@@ -61,11 +61,9 @@ import org.quartz.JobExecutionException;
/**
* This class is resource manager LuceneIndexers and LuceneSearchers.
*
* It supports two phase commit inside XA transactions and outside transactions
* it provides thread local transaction support.
* It supports two phase commit inside XA transactions and outside transactions it provides thread local transaction support.
*
* TODO: Provide pluggable support for a transaction manager TODO: Integrate
* with Spring transactions
* TODO: Provide pluggable support for a transaction manager TODO: Integrate with Spring transactions
*
* @author andyh
*
@@ -73,6 +71,8 @@ import org.quartz.JobExecutionException;
public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearcher, XAResource
{
private static Log logger = LogFactory.getLog(LuceneIndexerAndSearcherFactory2.class);
private DictionaryService dictionaryService;
private NamespaceService nameSpaceService;
@@ -90,9 +90,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
private String lockDirectory;
/**
* A map of active global transactions . It contains all the indexers a
* transaction has used, with at most one indexer for each store within a
* transaction
* A map of active global transactions . It contains all the indexers a transaction has used, with at most one indexer for each store within a transaction
*/
private static Map<Xid, Map<StoreRef, LuceneIndexer2>> activeIndexersInGlobalTx = new HashMap<Xid, Map<StoreRef, LuceneIndexer2>>();
@@ -191,20 +189,18 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
}
/**
* Set the maximum average transformation time allowed to a transformer in order to have
* the transformation performed in the current transaction. The default is 20ms.
* Set the maximum average transformation time allowed to a transformer in order to have the transformation performed in the current transaction. The default is 20ms.
*
* @param maxAtomicTransformationTime the maximum average time that a text transformation may
* take in order to be performed atomically.
* @param maxAtomicTransformationTime
* the maximum average time that a text transformation may take in order to be performed atomically.
*/
public void setMaxAtomicTransformationTime(long maxAtomicTransformationTime)
{
this.maxAtomicTransformationTime = maxAtomicTransformationTime;
}
/**
* Check if we are in a global transactoin according to the transaction
* manager
* Check if we are in a global transactoin according to the transaction manager
*
* @return
*/
@@ -240,8 +236,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
}
/**
* Get an indexer for the store to use in the current transaction for this
* thread of control.
* Get an indexer for the store to use in the current transaction for this thread of control.
*
* @param storeRef -
* the id of the store
@@ -357,7 +352,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(storeRef, deltaId, this);
indexer.setNodeService(nodeService);
indexer.setDictionaryService(dictionaryService);
//indexer.setLuceneIndexLock(luceneIndexLock);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setLuceneFullTextSearchIndexer(luceneFullTextSearchIndexer);
indexer.setContentService(contentService);
indexer.setMaxAtomicTransformationTime(maxAtomicTransformationTime);
@@ -395,7 +390,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
{
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(storeRef, indexer, this);
searcher.setNamespacePrefixResolver(nameSpaceService);
//searcher.setLuceneIndexLock(luceneIndexLock);
// searcher.setLuceneIndexLock(luceneIndexLock);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setQueryRegister(queryRegister);
@@ -454,7 +449,8 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
}
return;
}
} finally
}
finally
{
activeIndexersInGlobalTx.remove(xid);
}
@@ -587,7 +583,8 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
{
indexer.rollback();
}
} finally
}
finally
{
activeIndexersInGlobalTx.remove(xid);
}
@@ -677,7 +674,8 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
}
}
}
} finally
}
finally
{
if (threadLocalIndexers.get() != null)
{
@@ -875,17 +873,17 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
{
IndexWriter.WRITE_LOCK_TIMEOUT = timeout;
}
public void setCommitLockTimeout(long timeout)
{
IndexWriter.COMMIT_LOCK_TIMEOUT = timeout;
}
public void setLockPollInterval(long time)
{
Lock.LOCK_POLL_INTERVAL = time;
}
public int getIndexerMaxFieldLength()
{
return indexerMaxFieldLength;
@@ -896,25 +894,25 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
this.indexerMaxFieldLength = indexerMaxFieldLength;
System.setProperty("org.apache.lucene.maxFieldLength", "" + indexerMaxFieldLength);
}
/**
* This component is able to <i>safely</i> perform backups of the Lucene indexes while
* the server is running.
* This component is able to <i>safely</i> perform backups of the Lucene indexes while the server is running.
* <p>
* It can be run directly by calling the {@link #backup() } method, but the convenience
* {@link LuceneIndexBackupJob} can be used to call it as well.
* It can be run directly by calling the {@link #backup() } method, but the convenience {@link LuceneIndexBackupJob} can be used to call it as well.
*
* @author Derek Hulley
*/
public static class LuceneIndexBackupComponent
{
private static Log logger = LogFactory.getLog(LuceneIndexerAndSearcherFactory2.class);
private TransactionService transactionService;
private LuceneIndexerAndSearcherFactory2 factory;
private LuceneIndexerAndSearcher factory;
private NodeService nodeService;
private String targetLocation;
public LuceneIndexBackupComponent()
{
}
@@ -932,9 +930,10 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
/**
* Set the Lucene index factory that will be used to control the index locks
*
* @param factory the index factory
* @param factory
* the index factory
*/
public void setFactory(LuceneIndexerAndSearcherFactory2 factory)
public void setFactory(LuceneIndexerAndSearcher factory)
{
this.factory = factory;
}
@@ -942,7 +941,8 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
/**
* Used to retrieve the stores
*
* @param nodeService the node service
* @param nodeService
* the node service
*/
public void setNodeService(NodeService nodeService)
{
@@ -952,13 +952,14 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
/**
* Set the directory to which the backup will be copied
*
* @param targetLocation the backup directory
* @param targetLocation
* the backup directory
*/
public void setTargetLocation(String targetLocation)
{
this.targetLocation = targetLocation;
}
/**
* Backup the Lucene indexes
*/
@@ -978,62 +979,45 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
private void backupImpl()
{
// create the location to copy to
File targetDir = new File(targetLocation);
final File targetDir = new File(targetLocation);
if (targetDir.exists() && !targetDir.isDirectory())
{
throw new AlfrescoRuntimeException("Target location is a file and not a directory: " + targetDir);
}
File targetParentDir = targetDir.getParentFile();
final File targetParentDir = targetDir.getParentFile();
if (targetParentDir == null)
{
throw new AlfrescoRuntimeException("Target location may not be a root directory: " + targetDir);
}
File tempDir = new File(targetParentDir, "indexbackup_temp");
final File tempDir = new File(targetParentDir, "indexbackup_temp");
// get all the available stores
List<StoreRef> storeRefs = nodeService.getStores();
// lock all the stores
List<StoreRef> lockedStores = new ArrayList<StoreRef>(storeRefs.size());
try
factory.doWithAllWriteLocks(new WithAllWriteLocksWork<Object>()
{
for (StoreRef storeRef : storeRefs)
{
factory.luceneIndexLock.getWriteLock(storeRef);
lockedStores.add(storeRef);
}
File indexRootDir = new File(factory.indexRootLocation);
// perform the copy
backupDirectory(indexRootDir, tempDir, targetDir);
}
catch (Throwable e)
{
throw new AlfrescoRuntimeException("Failed to copy Lucene index root: \n" +
" Index root: " + factory.indexRootLocation + "\n" +
" Target: " + targetDir,
e);
}
finally
{
for (StoreRef storeRef : lockedStores)
public Object doWork()
{
try
{
factory.luceneIndexLock.releaseWriteLock(storeRef);
File indexRootDir = new File(factory.getIndexRootLocation());
// perform the copy
backupDirectory(indexRootDir, tempDir, targetDir);
return null;
}
catch (Throwable e)
{
logger.error("Failed to release index lock for store " + storeRef, e);
throw new AlfrescoRuntimeException(
"Failed to copy Lucene index root: \n"
+ " Index root: " + factory.getIndexRootLocation() + "\n" + " Target: "
+ targetDir, e);
}
}
}
});
if (logger.isDebugEnabled())
{
logger.debug("Backed up Lucene indexes: \n" +
" Target directory: " + targetDir);
logger.debug("Backed up Lucene indexes: \n" + " Target directory: " + targetDir);
}
}
/**
* Makes a backup of the source directory via a temporary folder
*/
@@ -1085,14 +1069,15 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
{
/** KEY_LUCENE_INDEX_BACKUP_COMPONENT = 'luceneIndexBackupComponent' */
public static final String KEY_LUCENE_INDEX_BACKUP_COMPONENT = "luceneIndexBackupComponent";
/**
* Locks the Lucene indexes and copies them to a backup location
*/
public void execute(JobExecutionContext context) throws JobExecutionException
{
JobDataMap jobData = context.getJobDetail().getJobDataMap();
LuceneIndexBackupComponent backupComponent = (LuceneIndexBackupComponent) jobData.get(KEY_LUCENE_INDEX_BACKUP_COMPONENT);
LuceneIndexBackupComponent backupComponent = (LuceneIndexBackupComponent) jobData
.get(KEY_LUCENE_INDEX_BACKUP_COMPONENT);
if (backupComponent == null)
{
throw new JobExecutionException("Missing job data: " + KEY_LUCENE_INDEX_BACKUP_COMPONENT);
@@ -1101,4 +1086,108 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
backupComponent.backup();
}
}
public <R> R doWithAllWriteLocks(WithAllWriteLocksWork<R> lockWork)
{
// get all the available stores
List<StoreRef> storeRefs = nodeService.getStores();
IndexInfo.LockWork<R> currentLockWork = null;
for (int i = storeRefs.size() - 1; i >= 0; i--)
{
if (currentLockWork == null)
{
currentLockWork = new CoreLockWork<R>(getIndexer(storeRefs.get(i)), lockWork);
}
else
{
currentLockWork = new NestingLockWork<R>(getIndexer(storeRefs.get(i)), currentLockWork);
}
}
if (currentLockWork != null)
{
try
{
return currentLockWork.doWork();
}
catch (Throwable exception)
{
// Re-throw the exception
if (exception instanceof RuntimeException)
{
throw (RuntimeException) exception;
}
else
{
throw new RuntimeException("Error during run with lock.", exception);
}
}
}
else
{
return null;
}
}
private static class NestingLockWork<R> implements IndexInfo.LockWork<R>
{
IndexInfo.LockWork<R> lockWork;
LuceneIndexer2 indexer;
NestingLockWork(LuceneIndexer2 indexer, IndexInfo.LockWork<R> lockWork)
{
this.indexer = indexer;
this.lockWork = lockWork;
}
public R doWork() throws Exception
{
return indexer.doWithWriteLock(lockWork);
}
}
private static class CoreLockWork<R> implements IndexInfo.LockWork<R>
{
WithAllWriteLocksWork<R> lockWork;
LuceneIndexer2 indexer;
CoreLockWork(LuceneIndexer2 indexer, WithAllWriteLocksWork<R> lockWork)
{
this.indexer = indexer;
this.lockWork = lockWork;
}
public R doWork() throws Exception
{
return indexer.doWithWriteLock(new IndexInfo.LockWork<R>()
{
public R doWork()
{
try
{
return lockWork.doWork();
}
catch (Throwable exception)
{
// Re-throw the exception
if (exception instanceof RuntimeException)
{
throw (RuntimeException) exception;
}
else
{
throw new RuntimeException("Error during run with lock.", exception);
}
}
}
});
}
}
}

View File

@@ -43,6 +43,7 @@ import org.alfresco.repo.search.IndexerException;
import org.alfresco.repo.search.impl.lucene.fts.FTSIndexerAware;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.search.impl.lucene.index.TransactionStatus;
import org.alfresco.repo.search.impl.lucene.index.IndexInfo.LockWork;
import org.alfresco.service.cmr.dictionary.AspectDefinition;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService;
@@ -1970,5 +1971,9 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
}
}
}

View File

@@ -236,7 +236,7 @@ public class LuceneSearcherImpl extends LuceneBase implements LuceneSearcher
switch (sd.getSortType())
{
case FIELD:
fields[index++] = new SortField(sd.getField(), !sd.isAscending());
fields[index++] = new SortField(sd.getField(), SortField.STRING, !sd.isAscending());
break;
case DOCUMENT:
fields[index++] = new SortField(null, SortField.DOC, !sd.isAscending());

View File

@@ -59,6 +59,7 @@ import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.Path;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.cmr.repository.datatype.Duration;
import org.alfresco.service.cmr.search.QueryParameter;
import org.alfresco.service.cmr.search.QueryParameterDefinition;
import org.alfresco.service.cmr.search.ResultSet;
@@ -93,6 +94,16 @@ public class LuceneTest2 extends TestCase
private static Log logger = LogFactory.getLog(LuceneTest2.class);
QName createdDate = QName.createQName(TEST_NAMESPACE, "createdDate");
QName orderDouble = QName.createQName(TEST_NAMESPACE, "orderDouble");
QName orderFloat = QName.createQName(TEST_NAMESPACE, "orderFloat");
QName orderLong = QName.createQName(TEST_NAMESPACE, "orderLong");
QName orderInt = QName.createQName(TEST_NAMESPACE, "orderInt");
TransactionService transactionService;
NodeService nodeService;
@@ -201,16 +212,16 @@ public class LuceneTest2 extends TestCase
rootNodeRef = nodeService.getRootNode(storeRef);
n1 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}one"),
testSuperType).getChildRef();
testSuperType, getOrderProperties()).getChildRef();
nodeService.setProperty(n1, QName.createQName("{namespace}property-1"), "ValueOne");
n2 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}two"),
testSuperType).getChildRef();
testSuperType, getOrderProperties()).getChildRef();
nodeService.setProperty(n2, QName.createQName("{namespace}property-1"), "valueone");
nodeService.setProperty(n2, QName.createQName("{namespace}property-2"), "valuetwo");
n3 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}three"),
testSuperType).getChildRef();
testSuperType, getOrderProperties()).getChildRef();
ObjectOutputStream oos;
try
@@ -270,24 +281,24 @@ public class LuceneTest2 extends TestCase
nodeService.getProperties(n3);
nodeService.getProperties(n4);
n5 = nodeService.createNode(n1, ASSOC_TYPE_QNAME, QName.createQName("{namespace}five"), testSuperType)
.getChildRef();
n6 = nodeService.createNode(n1, ASSOC_TYPE_QNAME, QName.createQName("{namespace}six"), testSuperType)
.getChildRef();
n7 = nodeService.createNode(n2, ASSOC_TYPE_QNAME, QName.createQName("{namespace}seven"), testSuperType)
.getChildRef();
n8 = nodeService.createNode(n2, ASSOC_TYPE_QNAME, QName.createQName("{namespace}eight-2"), testSuperType)
.getChildRef();
n9 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}nine"), testSuperType)
.getChildRef();
n10 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}ten"), testSuperType)
.getChildRef();
n11 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}eleven"), testSuperType)
.getChildRef();
n12 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}twelve"), testSuperType)
.getChildRef();
n13 = nodeService.createNode(n12, ASSOC_TYPE_QNAME, QName.createQName("{namespace}thirteen"), testSuperType)
.getChildRef();
n5 = nodeService.createNode(n1, ASSOC_TYPE_QNAME, QName.createQName("{namespace}five"), testSuperType,
getOrderProperties()).getChildRef();
n6 = nodeService.createNode(n1, ASSOC_TYPE_QNAME, QName.createQName("{namespace}six"), testSuperType,
getOrderProperties()).getChildRef();
n7 = nodeService.createNode(n2, ASSOC_TYPE_QNAME, QName.createQName("{namespace}seven"), testSuperType,
getOrderProperties()).getChildRef();
n8 = nodeService.createNode(n2, ASSOC_TYPE_QNAME, QName.createQName("{namespace}eight-2"), testSuperType,
getOrderProperties()).getChildRef();
n9 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}nine"), testSuperType,
getOrderProperties()).getChildRef();
n10 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}ten"), testSuperType,
getOrderProperties()).getChildRef();
n11 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}eleven"), testSuperType,
getOrderProperties()).getChildRef();
n12 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}twelve"), testSuperType,
getOrderProperties()).getChildRef();
n13 = nodeService.createNode(n12, ASSOC_TYPE_QNAME, QName.createQName("{namespace}thirteen"), testSuperType,
getOrderProperties()).getChildRef();
Map<QName, Serializable> properties = new HashMap<QName, Serializable>();
properties.put(ContentModel.PROP_CONTENT, new ContentData(null, "text/plain", 0L, "UTF-16"));
@@ -317,6 +328,32 @@ public class LuceneTest2 extends TestCase
}
private double orderDoubleCount = -0.11d;
private Date orderDate = new Date();
private float orderFloatCount = -3.5556f;
private long orderLongCount = -1999999999999999l;
private int orderIntCount = -45764576;
public Map<QName, Serializable> getOrderProperties()
{
Map<QName, Serializable> testProperties = new HashMap<QName, Serializable>();
testProperties.put(createdDate, orderDate);
testProperties.put(orderDouble, orderDoubleCount);
testProperties.put(orderFloat, orderFloatCount);
testProperties.put(orderLong, orderLongCount);
testProperties.put(orderInt, orderIntCount);
orderDate = Duration.subtract(orderDate, new Duration("P1D"));
orderDoubleCount += 0.1d;
orderFloatCount += 0.82f;
orderLongCount += 299999999999999l;
orderIntCount += 8576457;
return testProperties;
}
@Override
protected void tearDown() throws Exception
{
@@ -794,6 +831,226 @@ public class LuceneTest2 extends TestCase
}
results.close();
// sort by created date
SearchParameters sp7 = new SearchParameters();
sp7.addStore(rootNodeRef.getStoreRef());
sp7.setLanguage(SearchService.LANGUAGE_LUCENE);
sp7.setQuery("PATH:\"//.\"");
sp7.addSort("@" + createdDate, true);
results = searcher.query(sp7);
Date date = null;
for (ResultSetRow row : results)
{
Date currentBun = DefaultTypeConverter.INSTANCE.convert(Date.class, nodeService.getProperty(row
.getNodeRef(), createdDate));
//System.out.println(currentBun);
if (date != null)
{
assertTrue(date.compareTo(currentBun) <= 0);
}
date = currentBun;
}
results.close();
SearchParameters sp8 = new SearchParameters();
sp8.addStore(rootNodeRef.getStoreRef());
sp8.setLanguage(SearchService.LANGUAGE_LUCENE);
sp8.setQuery("PATH:\"//.\"");
sp8.addSort("@" + createdDate, false);
results = searcher.query(sp8);
date = null;
for (ResultSetRow row : results)
{
Date currentBun = DefaultTypeConverter.INSTANCE.convert(Date.class, nodeService.getProperty(row
.getNodeRef(), createdDate));
//System.out.println(currentBun);
if ((date != null) && (currentBun != null))
{
assertTrue(date.compareTo(currentBun) >= 0);
}
date = currentBun;
}
results.close();
// sort by double
SearchParameters sp9 = new SearchParameters();
sp9.addStore(rootNodeRef.getStoreRef());
sp9.setLanguage(SearchService.LANGUAGE_LUCENE);
sp9.setQuery("PATH:\"//.\"");
sp9.addSort("@" + orderDouble, true);
results = searcher.query(sp9);
Double d = null;
for (ResultSetRow row : results)
{
Double currentBun = DefaultTypeConverter.INSTANCE.convert(Double.class, nodeService.getProperty(row
.getNodeRef(), orderDouble));
//System.out.println( (currentBun == null ? "null" : NumericEncoder.encode(currentBun))+ " "+currentBun);
if (d != null)
{
assertTrue(d.compareTo(currentBun) <= 0);
}
d = currentBun;
}
results.close();
SearchParameters sp10 = new SearchParameters();
sp10.addStore(rootNodeRef.getStoreRef());
sp10.setLanguage(SearchService.LANGUAGE_LUCENE);
sp10.setQuery("PATH:\"//.\"");
sp10.addSort("@" + orderDouble, false);
results = searcher.query(sp10);
d = null;
for (ResultSetRow row : results)
{
Double currentBun = DefaultTypeConverter.INSTANCE.convert(Double.class, nodeService.getProperty(row
.getNodeRef(), orderDouble));
//System.out.println(currentBun);
if ((d != null) && (currentBun != null))
{
assertTrue(d.compareTo(currentBun) >= 0);
}
d = currentBun;
}
results.close();
// sort by float
SearchParameters sp11 = new SearchParameters();
sp11.addStore(rootNodeRef.getStoreRef());
sp11.setLanguage(SearchService.LANGUAGE_LUCENE);
sp11.setQuery("PATH:\"//.\"");
sp11.addSort("@" + orderFloat, true);
results = searcher.query(sp11);
Float f = null;
for (ResultSetRow row : results)
{
Float currentBun = DefaultTypeConverter.INSTANCE.convert(Float.class, nodeService.getProperty(row
.getNodeRef(), orderFloat));
//System.out.println( (currentBun == null ? "null" : NumericEncoder.encode(currentBun))+ " "+currentBun);
if (f != null)
{
assertTrue(f.compareTo(currentBun) <= 0);
}
f = currentBun;
}
results.close();
SearchParameters sp12 = new SearchParameters();
sp12.addStore(rootNodeRef.getStoreRef());
sp12.setLanguage(SearchService.LANGUAGE_LUCENE);
sp12.setQuery("PATH:\"//.\"");
sp12.addSort("@" + orderFloat, false);
results = searcher.query(sp12);
f = null;
for (ResultSetRow row : results)
{
Float currentBun = DefaultTypeConverter.INSTANCE.convert(Float.class, nodeService.getProperty(row
.getNodeRef(), orderFloat));
//System.out.println(currentBun);
if ((f != null) && (currentBun != null))
{
assertTrue(f.compareTo(currentBun) >= 0);
}
f = currentBun;
}
results.close();
// sort by long
SearchParameters sp13 = new SearchParameters();
sp13.addStore(rootNodeRef.getStoreRef());
sp13.setLanguage(SearchService.LANGUAGE_LUCENE);
sp13.setQuery("PATH:\"//.\"");
sp13.addSort("@" + orderLong, true);
results = searcher.query(sp13);
Long l = null;
for (ResultSetRow row : results)
{
Long currentBun = DefaultTypeConverter.INSTANCE.convert(Long.class, nodeService.getProperty(row
.getNodeRef(), orderLong));
//System.out.println( (currentBun == null ? "null" : NumericEncoder.encode(currentBun))+ " "+currentBun);
if (l != null)
{
assertTrue(l.compareTo(currentBun) <= 0);
}
l = currentBun;
}
results.close();
SearchParameters sp14 = new SearchParameters();
sp14.addStore(rootNodeRef.getStoreRef());
sp14.setLanguage(SearchService.LANGUAGE_LUCENE);
sp14.setQuery("PATH:\"//.\"");
sp14.addSort("@" + orderLong, false);
results = searcher.query(sp14);
l = null;
for (ResultSetRow row : results)
{
Long currentBun = DefaultTypeConverter.INSTANCE.convert(Long.class, nodeService.getProperty(row
.getNodeRef(), orderLong));
//System.out.println(currentBun);
if ((l != null) && (currentBun != null))
{
assertTrue(l.compareTo(currentBun) >= 0);
}
l = currentBun;
}
results.close();
// sort by int
SearchParameters sp15 = new SearchParameters();
sp15.addStore(rootNodeRef.getStoreRef());
sp15.setLanguage(SearchService.LANGUAGE_LUCENE);
sp15.setQuery("PATH:\"//.\"");
sp15.addSort("@" + orderInt, true);
results = searcher.query(sp15);
Integer i = null;
for (ResultSetRow row : results)
{
Integer currentBun = DefaultTypeConverter.INSTANCE.convert(Integer.class, nodeService.getProperty(row
.getNodeRef(), orderInt));
//System.out.println( (currentBun == null ? "null" : NumericEncoder.encode(currentBun))+ " "+currentBun);
if (i != null)
{
assertTrue(i.compareTo(currentBun) <= 0);
}
i = currentBun;
}
results.close();
SearchParameters sp16 = new SearchParameters();
sp16.addStore(rootNodeRef.getStoreRef());
sp16.setLanguage(SearchService.LANGUAGE_LUCENE);
sp16.setQuery("PATH:\"//.\"");
sp16.addSort("@" + orderInt, false);
results = searcher.query(sp16);
i = null;
for (ResultSetRow row : results)
{
Integer currentBun = DefaultTypeConverter.INSTANCE.convert(Integer.class, nodeService.getProperty(row
.getNodeRef(), orderInt));
//System.out.println(currentBun);
if ((i != null) && (currentBun != null))
{
assertTrue(i.compareTo(currentBun) >= 0);
}
i = currentBun;
}
results.close();
luceneFTS.resume();
}

View File

@@ -14,165 +14,217 @@
<namespace uri="http://www.alfresco.org/test/lucenetest" prefix="test"/>
</namespaces>
<types>
<type name="test:testSuperType">
<title>Test Super Type</title>
<parent>sys:container</parent>
<associations>
<child-association name="test:assoc">
<source>
<mandatory>false</mandatory>
<many>true</many>
</source>
<target>
<class>sys:base</class>
<mandatory>false</mandatory>
<many>true</many>
</target>
</child-association>
</associations>
</type>
<type name="test:testType">
<title>Test Type</title>
<parent>test:testSuperType</parent>
<properties>
<property name="test:text-indexed-stored-tokenised-atomic">
<type>d:text</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:text-indexed-unstored-tokenised-atomic">
<type>d:text</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>false</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:text-indexed-stored-tokenised-nonatomic">
<type>d:text</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>false</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:int-ista">
<type>d:int</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:long-ista">
<type>d:long</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:float-ista">
<type>d:float</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:double-ista">
<type>d:double</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:date-ista">
<type>d:date</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:datetime-ista">
<type>d:datetime</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:boolean-ista">
<type>d:boolean</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:qname-ista">
<type>d:qname</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:category-ista">
<type>d:category</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:noderef-ista">
<type>d:noderef</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
</properties>
<mandatory-aspects>
<aspect>test:testAspect</aspect>
</mandatory-aspects>
</type>
</types>
<types>
<type name="test:testSuperType">
<title>Test Super Type</title>
<parent>sys:container</parent>
<properties>
<property name="test:createdDate">
<type>d:date</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:orderDouble">
<type>d:double</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:orderFloat">
<type>d:float</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:orderLong">
<type>d:long</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:orderInt">
<type>d:int</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
</properties>
<associations>
<child-association name="test:assoc">
<source>
<mandatory>false</mandatory>
<many>true</many>
</source>
<target>
<class>sys:base</class>
<mandatory>false</mandatory>
<many>true</many>
</target>
</child-association>
</associations>
</type>
<type name="test:testType">
<title>Test Type</title>
<parent>test:testSuperType</parent>
<properties>
<property name="test:text-indexed-stored-tokenised-atomic">
<type>d:text</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:text-indexed-unstored-tokenised-atomic">
<type>d:text</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>false</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:text-indexed-stored-tokenised-nonatomic">
<type>d:text</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>false</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:int-ista">
<type>d:int</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:long-ista">
<type>d:long</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:float-ista">
<type>d:float</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:double-ista">
<type>d:double</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:date-ista">
<type>d:date</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:datetime-ista">
<type>d:datetime</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:boolean-ista">
<type>d:boolean</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:qname-ista">
<type>d:qname</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:category-ista">
<type>d:category</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
<property name="test:noderef-ista">
<type>d:noderef</type>
<mandatory>true</mandatory>
<multiple>false</multiple>
<index enabled="true">
<atomic>true</atomic>
<stored>true</stored>
<tokenised>true</tokenised>
</index>
</property>
</properties>
<mandatory-aspects>
<aspect>test:testAspect</aspect>
</mandatory-aspects>
</type>
</types>
<aspects>
<aspect name="test:testSuperAspect">

View File

@@ -396,12 +396,12 @@ public class IndexInfo
cleanerThread = new Thread(cleaner);
cleanerThread.setDaemon(true);
cleanerThread.setName("Index cleaner thread");
cleanerThread.start();
//cleanerThread.start();
mergerThread = new Thread(merger);
mergerThread.setDaemon(true);
mergerThread.setName("Index merger thread");
mergerThread.start();
//mergerThread.start();
IndexWriter writer;
try
@@ -1633,11 +1633,24 @@ public class IndexInfo
return size;
}
private interface LockWork<Result>
public interface LockWork<Result>
{
public Result doWork() throws Exception;
}
public <R> R doWithWriteLock(LockWork<R> lockWork)
{
getWriteLock();
try
{
return doWithFileLock(lockWork);
}
finally
{
releaseWriteLock();
}
}
private <R> R doWithFileLock(LockWork<R> lockWork)
{
FileLock fileLock = null;
@@ -1683,66 +1696,92 @@ public class IndexInfo
}
}
public static void main(String[] args) throws IOException
public static void main(String[] args)
{
System.setProperty("disableLuceneLocks", "true");
HashSet<NodeRef> deletions = new HashSet<NodeRef>();
for (int i = 0; i < 0; i++)
{
deletions.add(new NodeRef(new StoreRef("woof", "bingle"), GUID.generate()));
}
int repeat = 100;
int docs = 1;
final IndexInfo ii = new IndexInfo(new File("c:\\indexTest"));
long totalTimeA = 0;
long countA = 0;
String indexLocation = args[0];
IndexInfo ii = new IndexInfo(new File(indexLocation));
while (true)
{
long start = System.nanoTime();
for (int i = 0; i < repeat; i++)
ii.readWriteLock.writeLock().lock();
try
{
String guid = GUID.generate();
ii.setStatus(guid, TransactionStatus.ACTIVE, null, null);
IndexWriter writer = ii.getDeltaIndexWriter(guid, new StandardAnalyzer());
for (int j = 0; j < docs; j++)
System.out.println("Entry List for " + indexLocation);
System.out.println(" Size = " + ii.indexEntries.size());
int i = 0;
for (IndexEntry entry : ii.indexEntries.values())
{
Document doc = new Document();
for (int k = 0; k < 15; k++)
{
doc.add(new Field("ID" + k, guid + " " + j + " " + k, false, true, false));
}
writer.addDocument(doc);
}
ii.closeDeltaIndexWriter(guid);
ii.setStatus(guid, TransactionStatus.PREPARING, null, null);
ii.setPreparedState(guid, deletions, docs, false);
ii.getDeletions(guid);
ii.setStatus(guid, TransactionStatus.PREPARED, null, null);
ii.setStatus(guid, TransactionStatus.COMMITTING, null, null);
ii.setStatus(guid, TransactionStatus.COMMITTED, null, null);
for (int j = 0; j < 0; j++)
{
ii.getMainIndexReferenceCountingReadOnlyIndexReader();
System.out.println("\t" + (i++) + "\t" + entry.toString());
}
}
long end = System.nanoTime();
totalTimeA += (end - start);
countA += repeat;
float average = countA * 1000000000f / totalTimeA;
System.out.println("Repeated "
+ repeat + " in " + ((end - start) / 1000000000.0) + " average = " + average);
finally
{
ii.releaseWriteLock();
}
}
}
// public static void main(String[] args) throws IOException
// {
// System.setProperty("disableLuceneLocks", "true");
//
// HashSet<NodeRef> deletions = new HashSet<NodeRef>();
// for (int i = 0; i < 0; i++)
// {
// deletions.add(new NodeRef(new StoreRef("woof", "bingle"), GUID.generate()));
// }
//
// int repeat = 100;
// int docs = 1;
// final IndexInfo ii = new IndexInfo(new File("c:\\indexTest"));
//
// long totalTimeA = 0;
// long countA = 0;
//
// while (true)
// {
// long start = System.nanoTime();
// for (int i = 0; i < repeat; i++)
// {
// String guid = GUID.generate();
// ii.setStatus(guid, TransactionStatus.ACTIVE, null, null);
// IndexWriter writer = ii.getDeltaIndexWriter(guid, new StandardAnalyzer());
//
// for (int j = 0; j < docs; j++)
// {
// Document doc = new Document();
// for (int k = 0; k < 15; k++)
// {
// doc.add(new Field("ID" + k, guid + " " + j + " " + k, false, true, false));
// }
// writer.addDocument(doc);
// }
//
// ii.closeDeltaIndexWriter(guid);
// ii.setStatus(guid, TransactionStatus.PREPARING, null, null);
// ii.setPreparedState(guid, deletions, docs, false);
// ii.getDeletions(guid);
// ii.setStatus(guid, TransactionStatus.PREPARED, null, null);
// ii.setStatus(guid, TransactionStatus.COMMITTING, null, null);
// ii.setStatus(guid, TransactionStatus.COMMITTED, null, null);
// for (int j = 0; j < 0; j++)
// {
// ii.getMainIndexReferenceCountingReadOnlyIndexReader();
// }
// }
//
// long end = System.nanoTime();
//
// totalTimeA += (end - start);
// countA += repeat;
// float average = countA * 1000000000f / totalTimeA;
//
// System.out.println("Repeated "
// + repeat + " in " + ((end - start) / 1000000000.0) + " average = " + average);
// }
// }
/**
* Clean up support.
*
@@ -1862,120 +1901,132 @@ public class IndexInfo
while (running)
{
// Get the read local to decide what to do
// Single JVM to start with
MergeAction action = MergeAction.NONE;
getReadLock();
try
{
if (indexIsShared && !checkVersion())
{
releaseReadLock();
getWriteLock();
try
{
// Sync with disk image if required
doWithFileLock(new LockWork<Object>()
{
public Object doWork() throws Exception
{
return null;
}
});
}
finally
{
getReadLock();
releaseWriteLock();
}
}
// Get the read local to decide what to do
// Single JVM to start with
MergeAction action = MergeAction.NONE;
int indexes = 0;
boolean mergingIndexes = false;
int deltas = 0;
boolean applyingDeletions = false;
for (IndexEntry entry : indexEntries.values())
{
if (entry.getType() == IndexType.INDEX)
{
indexes++;
if (entry.getStatus() == TransactionStatus.MERGE)
{
mergingIndexes = true;
}
}
else if (entry.getType() == IndexType.DELTA)
{
if (entry.getStatus() == TransactionStatus.COMMITTED)
{
deltas++;
}
if (entry.getStatus() == TransactionStatus.COMMITTED_DELETING)
{
applyingDeletions = true;
}
}
}
if (s_logger.isDebugEnabled())
{
s_logger.debug("Indexes = " + indexes);
s_logger.debug("Merging = " + mergingIndexes);
s_logger.debug("Deltas = " + deltas);
s_logger.debug("Deleting = " + applyingDeletions);
}
if (!mergingIndexes && !applyingDeletions)
{
if ((indexes > 5) || (deltas > 5))
{
if (indexes > deltas)
{
// Try merge
action = MergeAction.MERGE_INDEX;
}
else
{
// Try delete
action = MergeAction.APPLY_DELTA_DELETION;
}
}
}
}
catch (IOException e)
{
e.printStackTrace();
// Ignore IO error and retry
}
finally
{
releaseReadLock();
}
if (action == MergeAction.APPLY_DELTA_DELETION)
{
mergeDeletions();
}
else if (action == MergeAction.MERGE_INDEX)
{
mergeIndexes();
}
synchronized (this)
{
getReadLock();
try
{
this.wait();
if (indexIsShared && !checkVersion())
{
releaseReadLock();
getWriteLock();
try
{
// Sync with disk image if required
doWithFileLock(new LockWork<Object>()
{
public Object doWork() throws Exception
{
return null;
}
});
}
finally
{
try
{
getReadLock();
}
finally
{
releaseWriteLock();
}
}
}
int indexes = 0;
boolean mergingIndexes = false;
int deltas = 0;
boolean applyingDeletions = false;
for (IndexEntry entry : indexEntries.values())
{
if (entry.getType() == IndexType.INDEX)
{
indexes++;
if (entry.getStatus() == TransactionStatus.MERGE)
{
mergingIndexes = true;
}
}
else if (entry.getType() == IndexType.DELTA)
{
if (entry.getStatus() == TransactionStatus.COMMITTED)
{
deltas++;
}
if (entry.getStatus() == TransactionStatus.COMMITTED_DELETING)
{
applyingDeletions = true;
}
}
}
if (s_logger.isDebugEnabled())
{
s_logger.debug("Indexes = " + indexes);
s_logger.debug("Merging = " + mergingIndexes);
s_logger.debug("Deltas = " + deltas);
s_logger.debug("Deleting = " + applyingDeletions);
}
if (!mergingIndexes && !applyingDeletions)
{
if ((indexes > 5) || (deltas > 5))
{
if (indexes > deltas)
{
// Try merge
action = MergeAction.MERGE_INDEX;
}
else
{
// Try delete
action = MergeAction.APPLY_DELTA_DELETION;
}
}
}
}
catch (InterruptedException e)
catch (IOException e)
{
running = false;
s_logger.error(e);
}
finally
{
releaseReadLock();
}
if (action == MergeAction.APPLY_DELTA_DELETION)
{
mergeDeletions();
}
else if (action == MergeAction.MERGE_INDEX)
{
mergeIndexes();
}
synchronized (this)
{
try
{
this.wait();
}
catch (InterruptedException e)
{
// No action - could signal thread termination
}
}
}
catch (Throwable t)
{
s_logger.error(t);
}
}
@@ -2151,7 +2202,7 @@ public class IndexInfo
}
catch (IOException e)
{
e.printStackTrace();
s_logger.error(e);
fail = true;
}
@@ -2411,9 +2462,9 @@ public class IndexInfo
}
}
}
catch (IOException e)
catch (Throwable e)
{
e.printStackTrace();
s_logger.error(e);
fail = true;
}
@@ -2528,10 +2579,10 @@ public class IndexInfo
private void dumpInfo()
{
readWriteLock.writeLock().lock();
try
if (s_logger.isDebugEnabled())
{
if (s_logger.isDebugEnabled())
readWriteLock.writeLock().lock();
try
{
s_logger.debug("");
s_logger.debug("Entry List");
@@ -2540,10 +2591,10 @@ public class IndexInfo
s_logger.debug(" " + entry.toString());
}
}
}
finally
{
readWriteLock.writeLock().unlock();
finally
{
readWriteLock.writeLock().unlock();
}
}
}
@@ -2584,4 +2635,8 @@ public class IndexInfo
readWriteLock.readLock().unlock();
}
public String toString()
{
return indexDirectory.toString();
}
}