Add WCM search with atomic indexing around snapshots

git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@5712 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Andrew Hind
2007-05-17 15:52:46 +00:00
parent ee7e8cb4d0
commit 19ce257baa
46 changed files with 7373 additions and 3258 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -33,7 +33,6 @@ import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import org.alfresco.repo.avm.AVMRepository;
import org.alfresco.repo.domain.DbAccessControlList;
import org.alfresco.repo.domain.PropertyValue;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
@@ -1397,6 +1396,9 @@ public class AVMServiceImpl implements AVMService
{
InputStream in = getFileInputStream(version, desc.getPath());
createFile(path, name, in);
ContentData cd = getContentDataForRead(version, desc.getPath());
setEncoding(newPath, cd.getEncoding());
setMimeType(newPath, cd.getMimetype());
}
else // desc is a directory.
{

File diff suppressed because it is too large Load Diff

View File

@@ -29,10 +29,21 @@ import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.search.IndexerAndSearcher;
import org.alfresco.repo.search.impl.lucene.LuceneQueryParser;
import org.alfresco.service.cmr.avm.AVMNodeDescriptor;
import org.alfresco.service.cmr.avm.AVMService;
import org.alfresco.service.cmr.avm.AVMStoreDescriptor;
import org.alfresco.service.cmr.avmsync.AVMSyncService;
import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.MimetypeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.ResultSetRow;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.cmr.security.AuthenticationService;
import org.springframework.context.support.FileSystemXmlApplicationContext;
@@ -68,6 +79,8 @@ public class AVMServiceTestBase extends TestCase
*/
private long fStartTime;
protected static IndexerAndSearcher fIndexerAndSearcher;
/**
* Setup for AVM tests. Note that we set the polling
* interval for the reaper to 4 seconds so that tests will
@@ -82,6 +95,7 @@ public class AVMServiceTestBase extends TestCase
fService = (AVMService)fContext.getBean("AVMService");
fReaper = (OrphanReaper)fContext.getBean("orphanReaper");
fSyncService = (AVMSyncService)fContext.getBean("AVMSyncService");
fIndexerAndSearcher = (IndexerAndSearcher)fContext.getBean("indexerAndSearcherFactory");
AuthenticationService authService = (AuthenticationService)fContext.getBean("AuthenticationService");
authService.authenticate("admin", "admin".toCharArray());
CreateStoreTxnListener cstl = (CreateStoreTxnListener)fContext.getBean("createStoreTxnListener");
@@ -215,17 +229,295 @@ public class AVMServiceTestBase extends TestCase
fService.createDirectory("main:/", "d");
fService.createDirectory("main:/d", "e");
fService.createDirectory("main:/d/e", "f");
fService.createFile("main:/a/b/c", "foo").close();
PrintStream out = new PrintStream(fService.getFileOutputStream("main:/a/b/c/foo"));
out.println("I am main:/a/b/c/foo");
out.flush();
out.close();
ContentWriter writer = fService.getContentWriter("main:/a/b/c/foo");
writer.setEncoding("UTF-8");
writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
writer.putContent("I am main:/a/b/c/foo");
fService.createFile("main:/a/b/c", "bar").close();
out = new PrintStream(fService.getFileOutputStream("main:/a/b/c/bar"));
out.println("I am main:/a/b/c/bar");
out.flush();
out.close();
writer = fService.getContentWriter("main:/a/b/c/bar");
// Force a conversion
writer.setEncoding("UTF-16");
writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
writer.putContent("I am main:/a/b/c/bar");
fService.createSnapshot("main", null, null);
runQueriesAgainstBasicTree("main");
}
protected void runQueriesAgainstBasicTree(String store)
{
StoreRef storeRef = AVMNodeConverter.ToStoreRef(store);
// Text index
SearchService searchService = fIndexerAndSearcher.getSearcher(AVMNodeConverter.ToStoreRef(store), true);
ResultSet results = searchService.query(storeRef, "lucene", "TEXT:\"I am main\"");
assertEquals(2, results.length());
results.close();
// Basic properties
// Note "a" is a stop word and therefore not findable ...
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_NAME)+":\"foo\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_NAME)+":foo");
assertEquals(1, results.length());
results.close();
// TODO: Fix auth in AVMDiskDriver and more??
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_CREATOR)+":admin");
assertEquals(9, results.length());
results.close();
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_MODIFIER)+":admin");
assertEquals(9, results.length());
results.close();
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_OWNER)+":admin");
assertEquals(9, results.length());
results.close();
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_NODE_UUID)+":unknown");
assertEquals(9, results.length());
results.close();
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_STORE_PROTOCOL)+":avm");
assertEquals(9, results.length());
results.close();
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_STORE_IDENTIFIER)+":"+store);
assertEquals(9, results.length());
results.close();
// Basic paths
results = searchService.query(storeRef, "lucene", "PATH:\"/\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a/b\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a/b/c\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a/b/c/foo\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a/b/c/bar\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/d\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/d/e\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/d/e/f\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"//.\"");
assertEquals(9, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"//*\"");
assertEquals(8, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a//.\"");
assertEquals(5, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a//*\"");
assertEquals(4, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a/*\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"//c/*\"");
assertEquals(2, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/*\"");
assertEquals(2, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/*/*\"");
assertEquals(2, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/*/*/*\"");
assertEquals(2, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/*/*/*/*\"");
assertEquals(2, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/*/*/*/*/*\"");
assertEquals(0, results.length());
results.close();
}
protected void runQueriesAgainstBasicTreeWithAOnly(String store)
{
StoreRef storeRef = AVMNodeConverter.ToStoreRef(store);
// Text index
SearchService searchService = fIndexerAndSearcher.getSearcher(AVMNodeConverter.ToStoreRef(store), true);
ResultSet results = searchService.query(storeRef, "lucene", "TEXT:\"I am main\"");
assertEquals(2, results.length());
results.close();
// Basic properties
// Note "a" is a stop word and therefore not findable ...
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_NAME)+":\"foo\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_NAME)+":foo");
assertEquals(1, results.length());
results.close();
// TODO: Fix auth in AVMDiskDriver and more??
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_CREATOR)+":admin");
if(results.length() == 10)
{
for (ResultSetRow row : results)
{
System.out.println(row.getNodeRef());
}
}
assertEquals(6, results.length());
results.close();
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_MODIFIER)+":admin");
assertEquals(6, results.length());
results.close();
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_OWNER)+":admin");
assertEquals(6, results.length());
results.close();
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_NODE_UUID)+":unknown");
assertEquals(6, results.length());
results.close();
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_STORE_PROTOCOL)+":avm");
assertEquals(6, results.length());
results.close();
results = searchService.query(storeRef, "lucene", LuceneQueryParser.escape("@"+ContentModel.PROP_STORE_IDENTIFIER)+":"+store);
assertEquals(6, results.length());
results.close();
// Basic paths
results = searchService.query(storeRef, "lucene", "PATH:\"/\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a/b\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a/b/c\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a/b/c/foo\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a/b/c/bar\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/d\"");
assertEquals(0, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/d/e\"");
assertEquals(0, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/d/e/f\"");
assertEquals(0, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"//.\"");
assertEquals(6, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"//*\"");
assertEquals(5, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a//.\"");
assertEquals(5, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a//*\"");
assertEquals(4, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/a/*\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"//c/*\"");
assertEquals(2, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/*\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/*/*\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/*/*/*\"");
assertEquals(1, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/*/*/*/*\"");
assertEquals(2, results.length());
results.close();
results = searchService.query(storeRef, "lucene", "PATH:\"/*/*/*/*/*\"");
assertEquals(0, results.length());
results.close();
}
/**

View File

@@ -15,13 +15,17 @@ import java.util.Set;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.avm.AVMNodeConverter;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.service.cmr.avm.AVMNodeDescriptor;
import org.alfresco.service.cmr.avm.AVMService;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.model.FileInfo;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.CopyService;
import org.alfresco.service.cmr.repository.CrossRepositoryCopyService;
import org.alfresco.service.cmr.repository.NodeRef;
@@ -32,36 +36,36 @@ import org.alfresco.util.Pair;
/**
* Cross Repository Copying.
*
* @author britt
*/
public class CrossRepositoryCopyServiceImpl implements
CrossRepositoryCopyService
public class CrossRepositoryCopyServiceImpl implements CrossRepositoryCopyService
{
/**
* The NodeService reference.
*/
private NodeService fNodeService;
/**
* The FileFolderService reference.
*/
private FileFolderService fFileFolderService;
/**
* The regular CopyService reference.
*/
private CopyService fCopyService;
/**
* The AVMService.
*/
private AVMService fAVMService;
/**
* The ContentService.
*/
private ContentService fContentService;
/**
* The DictionaryService.
*/
@@ -73,48 +77,51 @@ public class CrossRepositoryCopyServiceImpl implements
public CrossRepositoryCopyServiceImpl()
{
}
// Setters for Spring.
public void setAvmService(AVMService service)
{
fAVMService = service;
}
public void setContentService(ContentService service)
{
fContentService = service;
}
public void setCopyService(CopyService service)
{
fCopyService = service;
}
public void setDictionaryService(DictionaryService service)
{
fDictionaryService = service;
}
public void setFileFolderService(FileFolderService service)
{
fFileFolderService = service;
}
public void setNodeService(NodeService service)
{
fNodeService = service;
}
/**
* This copies recursively src, which may be a container or a content type
* to dst, which must be a container. Copied nodes will have the copied from aspect
* applied to them.
* @param src The node to copy.
* @param dst The container to copy it into.
* @param name The name to give the copy.
* This copies recursively src, which may be a container or a content type to dst, which must be a container. Copied
* nodes will have the copied from aspect applied to them.
*
* @param src
* The node to copy.
* @param dst
* The container to copy it into.
* @param name
* The name to give the copy.
*/
public void copy(NodeRef src, NodeRef dst, String name)
public void copy(NodeRef src, NodeRef dst, String name)
{
StoreRef srcStoreRef = src.getStoreRef();
StoreRef dstStoreRef = dst.getStoreRef();
@@ -141,26 +148,33 @@ public class CrossRepositoryCopyServiceImpl implements
}
}
}
/**
* Handle copying from AVM to AVM
* @param src Source node.
* @param dst Destination directory node.
* @param name Name to give copy.
*
* @param src
* Source node.
* @param dst
* Destination directory node.
* @param name
* Name to give copy.
*/
private void copyAVMToAVM(NodeRef src, NodeRef dst, String name)
{
Pair<Integer, String> srcStorePath = AVMNodeConverter.ToAVMVersionPath(src);
Pair<Integer, String> dstStorePath = AVMNodeConverter.ToAVMVersionPath(dst);
fAVMService.copy(srcStorePath.getFirst(), srcStorePath.getSecond(),
dstStorePath.getSecond(), name);
fAVMService.copy(srcStorePath.getFirst(), srcStorePath.getSecond(), dstStorePath.getSecond(), name);
}
/**
* Handle copying from AVM to Repo.
* @param src Source node.
* @param dst Destination Container.
* @param name The name to give the copy.
*
* @param src
* Source node.
* @param dst
* Destination Container.
* @param name
* The name to give the copy.
*/
private void copyAVMToRepo(NodeRef src, NodeRef dst, String name)
{
@@ -171,7 +185,11 @@ public class CrossRepositoryCopyServiceImpl implements
FileInfo newChild = fFileFolderService.create(dst, name, ContentModel.TYPE_CONTENT);
NodeRef childRef = newChild.getNodeRef();
InputStream in = fAVMService.getFileInputStream(desc);
OutputStream out = fContentService.getWriter(childRef, ContentModel.PROP_CONTENT, true).getContentOutputStream();
ContentData cd = fAVMService.getContentDataForRead(desc.getVersionID(), desc.getPath());
ContentWriter writer = fContentService.getWriter(childRef, ContentModel.PROP_CONTENT, true);
writer.setEncoding(cd.getEncoding());
writer.setMimetype(cd.getMimetype());
OutputStream out = writer.getContentOutputStream();
copyData(in, out);
copyPropsAndAspectsAVMToRepo(src, childRef);
}
@@ -188,11 +206,14 @@ public class CrossRepositoryCopyServiceImpl implements
}
}
}
/**
* Helper that copies aspects and properties.
* @param src The source AVM node.
* @param dst The destination Repo node.
*
* @param src
* The source AVM node.
* @param dst
* The destination Repo node.
*/
private void copyPropsAndAspectsAVMToRepo(NodeRef src, NodeRef dst)
{
@@ -217,9 +238,13 @@ public class CrossRepositoryCopyServiceImpl implements
/**
* Handle copying from Repo to AVM.
* @param src The source node.
* @param dst The destingation directory.
* @param name The name to give the copy.
*
* @param src
* The source node.
* @param dst
* The destingation directory.
* @param name
* The name to give the copy.
*/
private void copyRepoToAVM(NodeRef src, NodeRef dst, String name)
{
@@ -229,8 +254,20 @@ public class CrossRepositoryCopyServiceImpl implements
NodeRef childNodeRef = AVMNodeConverter.ToNodeRef(-1, childPath);
if (fDictionaryService.isSubClass(srcType, ContentModel.TYPE_CONTENT))
{
InputStream in = fContentService.getReader(src, ContentModel.PROP_CONTENT).getContentInputStream();
OutputStream out = fAVMService.createFile(versionPath.getSecond(), name);
ContentReader reader = fContentService.getReader(src, ContentModel.PROP_CONTENT);
InputStream in = reader.getContentInputStream();
try
{
fAVMService.createFile(versionPath.getSecond(), name).close();
}
catch (IOException e)
{
throw new AlfrescoRuntimeException("I/O Error.", e);
}
ContentWriter writer = fAVMService.getContentWriter(childPath);
writer.setEncoding(reader.getEncoding());
writer.setMimetype(reader.getMimetype());
OutputStream out = writer.getContentOutputStream();
copyData(in, out);
copyPropsAndAspectsRepoToAVM(src, childNodeRef, childPath);
return;
@@ -247,12 +284,16 @@ public class CrossRepositoryCopyServiceImpl implements
return;
}
}
/**
* Helper to copy properties and aspects.
* @param src The source node.
* @param dst The destination node.
* @param dstPath The destination AVM path.
*
* @param src
* The source node.
* @param dst
* The destination node.
* @param dstPath
* The destination AVM path.
*/
private void copyPropsAndAspectsRepoToAVM(NodeRef src, NodeRef dst, String dstPath)
{
@@ -269,24 +310,28 @@ public class CrossRepositoryCopyServiceImpl implements
}
fNodeService.setProperty(dst, ContentModel.PROP_COPY_REFERENCE, src);
}
/**
* Handle copying from Repo to Repo.
* @param src The source node.
* @param dst The destination container.
* @param name The name to give the copy.
*
* @param src
* The source node.
* @param dst
* The destination container.
* @param name
* The name to give the copy.
*/
private void copyRepoToRepo(NodeRef src, NodeRef dst, String name)
{
ChildAssociationRef assocRef = fNodeService.getPrimaryParent(src);
fCopyService.copyAndRename(src, dst, ContentModel.ASSOC_CONTAINS, assocRef.getQName(), true);
}
private void copyData(InputStream in, OutputStream out)
{
try
{
byte [] buff = new byte[8192];
byte[] buff = new byte[8192];
int read = 0;
while ((read = in.read(buff)) != -1)
{

View File

@@ -26,7 +26,7 @@ package org.alfresco.repo.node.index;
import java.util.List;
import org.alfresco.repo.search.impl.lucene.AbstractLuceneIndexerImpl2;
import org.alfresco.repo.search.impl.lucene.AbstractLuceneIndexerImpl;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.cmr.repository.NodeRef;
@@ -100,7 +100,7 @@ public class MissingContentReindexComponent extends AbstractReindexComponent
// search for it in the index, sorting with youngest first
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("TEXT:" + AbstractLuceneIndexerImpl2.NOT_INDEXED_CONTENT_MISSING);
sp.setQuery("TEXT:" + AbstractLuceneIndexerImpl.NOT_INDEXED_CONTENT_MISSING);
sp.addSort(SearchParameters.SORT_IN_DOCUMENT_ORDER_DESCENDING);
ResultSet results = null;
try

View File

@@ -31,7 +31,7 @@ import org.alfresco.repo.content.AbstractContentStore;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.impl.lucene.AbstractLuceneIndexerImpl2;
import org.alfresco.repo.search.impl.lucene.AbstractLuceneIndexerImpl;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.repo.transaction.TransactionComponent;
@@ -134,7 +134,7 @@ public class MissingContentReindexComponentTest extends TestCase
SearchParameters sp = new SearchParameters();
sp.addStore(rootNodeRef.getStoreRef());
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("TEXT:" + AbstractLuceneIndexerImpl2.NOT_INDEXED_CONTENT_MISSING);
sp.setQuery("TEXT:" + AbstractLuceneIndexerImpl.NOT_INDEXED_CONTENT_MISSING);
sp.addSort(SearchParameters.SORT_IN_DOCUMENT_ORDER_DESCENDING);
ResultSet results = null;
try

View File

@@ -0,0 +1,142 @@
/*
* Copyright (C) 2005-2007 Alfresco Software Limited.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing"
*/
package org.alfresco.repo.search;
import org.alfresco.repo.avm.AVMNodeConverter;
import org.alfresco.repo.search.impl.lucene.AVMLuceneIndexer;
import org.alfresco.service.cmr.avm.AVMService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
/**
* Method interceptor for atomic indexing of AVM entries
*
* @author andyh
*
*/
public class AVMSnapShotTriggeredIndexingMethodInterceptor implements MethodInterceptor
{
private AVMService avmService;
private IndexerAndSearcher indexerAndSearcher;
public Object invoke(MethodInvocation mi) throws Throwable
{
if(mi.getMethod().getName().equals("createSnapshot"))
{
String store = (String)mi.getArguments()[0];
int before = avmService.getLatestSnapshotID(store);
Object returnValue = mi.proceed();
int after = avmService.getLatestSnapshotID(store);
StoreRef storeRef = AVMNodeConverter.ToStoreRef(store);
Indexer indexer = indexerAndSearcher.getIndexer(storeRef);
if(indexer instanceof AVMLuceneIndexer)
{
AVMLuceneIndexer avmIndexer = (AVMLuceneIndexer)indexer;
avmIndexer.index(store, before, after);
}
return returnValue;
}
// TODO: Purge store
else if(mi.getMethod().getName().equals("purgeStore"))
{
String store = (String)mi.getArguments()[0];
Object returnValue = mi.proceed();
StoreRef storeRef = AVMNodeConverter.ToStoreRef(store);
Indexer indexer = indexerAndSearcher.getIndexer(storeRef);
if(indexer instanceof AVMLuceneIndexer)
{
AVMLuceneIndexer avmIndexer = (AVMLuceneIndexer)indexer;
avmIndexer.deleteIndex(store);
}
return returnValue;
}
else if(mi.getMethod().getName().equals("createStore"))
{
String store = (String)mi.getArguments()[0];
Object returnValue = mi.proceed();
StoreRef storeRef = AVMNodeConverter.ToStoreRef(store);
Indexer indexer = indexerAndSearcher.getIndexer(storeRef);
if(indexer instanceof AVMLuceneIndexer)
{
AVMLuceneIndexer avmIndexer = (AVMLuceneIndexer)indexer;
avmIndexer.createIndex(store);
}
return returnValue;
}
else if(mi.getMethod().getName().equals("renameStore"))
{
String from = (String)mi.getArguments()[0];
String to = (String)mi.getArguments()[1];
Object returnValue = mi.proceed();
int after = avmService.getLatestSnapshotID(to);
StoreRef fromRef = AVMNodeConverter.ToStoreRef(from);
StoreRef toRef = AVMNodeConverter.ToStoreRef(to);
Indexer indexer = indexerAndSearcher.getIndexer(fromRef);
if(indexer instanceof AVMLuceneIndexer)
{
AVMLuceneIndexer avmIndexer = (AVMLuceneIndexer)indexer;
avmIndexer.deleteIndex(from);
}
indexer = indexerAndSearcher.getIndexer(toRef);
if(indexer instanceof AVMLuceneIndexer)
{
AVMLuceneIndexer avmIndexer = (AVMLuceneIndexer)indexer;
avmIndexer.createIndex(to);
avmIndexer.index(to, 0, after);
}
return returnValue;
}
else
{
return mi.proceed();
}
}
/**
* Set the AVM service
* @param avmService
*/
public void setAvmService(AVMService avmService)
{
this.avmService = avmService;
}
/**
* Set the AVM indexer and searcher
* @param indexerAndSearcher
*/
public void setIndexerAndSearcher(IndexerAndSearcher indexerAndSearcher)
{
this.indexerAndSearcher = indexerAndSearcher;
}
}

View File

@@ -30,9 +30,8 @@ import org.alfresco.repo.search.impl.lucene.fts.FTSIndexerAware;
* Add support for FTS indexing
*
* @author andyh
*
*/
public interface IndexerSPI extends Indexer
public interface BackgroundIndexerAware extends SupportsBackgroundIndexing
{
/**
* Register call back handler when the indexing chunk is done
@@ -49,4 +48,5 @@ public interface IndexerSPI extends Indexer
*/
public int updateFullTextSearch(int i);
}

View File

@@ -42,7 +42,7 @@ public interface IndexerAndSearcher
* @return
* @throws IndexerException
*/
public abstract IndexerSPI getIndexer(StoreRef storeRef) throws IndexerException;
public abstract Indexer getIndexer(StoreRef storeRef) throws IndexerException;
/**
* Get a searcher for a store

View File

@@ -0,0 +1,43 @@
/*
* Copyright (C) 2005-2007 Alfresco Software Limited.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing"
*/
package org.alfresco.repo.search;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
/**
* Interface to support backgournd indexing updates
*
* @author andyh
*
*/
public interface SupportsBackgroundIndexing
{
/**
* Set the back ground indexer manager
*
* @param fullTextSearchIndexer
*/
public void setFullTextSearchIndexer(FullTextSearchIndexer fullTextSearchIndexer);
}

View File

@@ -0,0 +1,52 @@
/*
* Copyright (C) 2005-2007 Alfresco Software Limited.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing"
*/
package org.alfresco.repo.search;
/**
* Indexer implementations that work with spring transactions
* @author andyh
*
*/
public interface TransactionSynchronisationAwareIndexer
{
/**
* Commit
*/
public void commit();
/**
* Rollback
*/
public void rollback();
/**
* Prepare
* @return the return tx state
*/
public int prepare();
/**
* Report if there are any chenges to commit
* @return false if read only access (by use not declaration)
*/
public boolean isModified();
}

View File

@@ -59,7 +59,12 @@ import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper;
import org.springframework.context.ApplicationContext;
public class LuceneCategoryTest2 extends TestCase
/**
* Category tests
* @author andyh
*
*/
public class ADMLuceneCategoryTest extends TestCase
{
private ServiceRegistry serviceRegistry;
@@ -104,12 +109,20 @@ public class LuceneCategoryTest2 extends TestCase
private CategoryService categoryService;
public LuceneCategoryTest2()
/**
* Simple test constructor
*
*/
public ADMLuceneCategoryTest()
{
super();
}
public LuceneCategoryTest2(String arg0)
/**
* Named test constructor
* @param arg0
*/
public ADMLuceneCategoryTest(String arg0)
{
super(arg0);
}
@@ -121,7 +134,7 @@ public class LuceneCategoryTest2 extends TestCase
luceneFTS = (FullTextSearchIndexer) ctx.getBean("LuceneFullTextSearchIndexer");
dictionaryDAO = (DictionaryDAO) ctx.getBean("dictionaryDAO");
searcher = (SearchService) ctx.getBean("searchService");
indexerAndSearcher = (LuceneIndexerAndSearcher) ctx.getBean("luceneIndexerAndSearcherFactory");
indexerAndSearcher = (LuceneIndexerAndSearcher) ctx.getBean("admLuceneIndexerAndSearcherFactory");
categoryService = (CategoryService) ctx.getBean("categoryService");
serviceRegistry = (ServiceRegistry) ctx.getBean(ServiceRegistry.SERVICE_REGISTRY);
@@ -310,11 +323,11 @@ public class LuceneCategoryTest2 extends TestCase
private void buildBaseIndex()
{
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta" + System.currentTimeMillis() + "_" + (new Random().nextInt()), indexerAndSearcher);
ADMLuceneIndexerImpl indexer = ADMLuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta" + System.currentTimeMillis() + "_" + (new Random().nextInt()), indexerAndSearcher);
indexer.setNodeService(nodeService);
//indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setDictionaryService(dictionaryService);
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
indexer.setFullTextSearchIndexer(luceneFTS);
//indexer.clearIndex();
indexer.createNode(new ChildAssociationRef(null, null, null, rootNodeRef));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, rootNodeRef, QName.createQName("{namespace}one"), n1));
@@ -348,7 +361,10 @@ public class LuceneCategoryTest2 extends TestCase
indexer.commit();
}
/**
* Test multiple categories
* @throws Exception
*/
public void testMulti() throws Exception
{
TransactionService transactionService = serviceRegistry.getTransactionService();
@@ -356,7 +372,7 @@ public class LuceneCategoryTest2 extends TestCase
tx.begin();
buildBaseIndex();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
@@ -370,6 +386,11 @@ public class LuceneCategoryTest2 extends TestCase
tx.rollback();
}
/**
* Test basic categories.
*
* @throws Exception
*/
public void testBasic() throws Exception
{
TransactionService transactionService = serviceRegistry.getTransactionService();
@@ -377,7 +398,7 @@ public class LuceneCategoryTest2 extends TestCase
tx.begin();
buildBaseIndex();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
@@ -533,6 +554,11 @@ public class LuceneCategoryTest2 extends TestCase
tx.rollback();
}
/**
* Test the catgeory service.
*
* @throws Exception
*/
public void testCategoryServiceImpl() throws Exception
{
TransactionService transactionService = serviceRegistry.getTransactionService();
@@ -540,7 +566,7 @@ public class LuceneCategoryTest2 extends TestCase
tx.begin();
buildBaseIndex();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
@@ -611,6 +637,10 @@ public class LuceneCategoryTest2 extends TestCase
return nspr;
}
/**
*
* @throws Exception
*/
public void testCategoryService() throws Exception
{
TransactionService transactionService = serviceRegistry.getTransactionService();
@@ -665,6 +695,10 @@ public class LuceneCategoryTest2 extends TestCase
tx.rollback();
}
/**
*
* @throws Exception
*/
public void xtestManyCategories() throws Exception
{
TransactionService transactionService = serviceRegistry.getTransactionService();
@@ -718,6 +752,7 @@ public class LuceneCategoryTest2 extends TestCase
NodeRef fiveRef = categoryService.createCategory(fourRef, "child_"+i+"_"+j+"_"+k+"_"+l+"_"+m+"_"+n+"_"+o);
for(int p = 0; p < 5; p++)
{
@SuppressWarnings("unused")
NodeRef sixRef = categoryService.createCategory(fiveRef, "child_"+i+"_"+j+"_"+k+"_"+l+"_"+m+"_"+n+"_"+o+"_"+p);
}
}
@@ -748,12 +783,14 @@ public class LuceneCategoryTest2 extends TestCase
tx = transactionService.getUserTransaction();
tx.begin();
start = System.nanoTime();
@SuppressWarnings("unused")
ResultSet set = searcher.query(serviceRegistry.getPersonService().getPeopleContainer().getStoreRef(), "lucene", "@"+LuceneQueryParser.escape(ContentModel.ASPECT_GEN_CLASSIFIABLE.toString())+":second*");
System.out.println("Query complete in "+(System.nanoTime()-start)/1e9f);
tx.commit();
}
@SuppressWarnings("unused")
private int getTotalScore(ResultSet results)
{
int totalScore = 0;

View File

@@ -0,0 +1,38 @@
/*
* Copyright (C) 2005-2007 Alfresco Software Limited.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing"
*/
package org.alfresco.repo.search.impl.lucene;
import org.alfresco.repo.search.BackgroundIndexerAware;
/**
* ADM indexer implemtations
*
* @author andyh
*
*/
public interface ADMLuceneIndexer extends LuceneIndexer, BackgroundIndexerAware
{
}

View File

@@ -0,0 +1,125 @@
/*
* Copyright (C) 2005-2007 Alfresco Software Limited.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing"
*/
package org.alfresco.repo.search.impl.lucene;
import java.util.List;
import org.alfresco.repo.search.SearcherException;
import org.alfresco.repo.search.SupportsBackgroundIndexing;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.namespace.NamespaceService;
/**
* Factory for ADM indxers and searchers
* @author andyh
*
*/
public class ADMLuceneIndexerAndSearcherFactory extends AbstractLuceneIndexerAndSearcherFactory implements SupportsBackgroundIndexing
{
private DictionaryService dictionaryService;
private NamespaceService nameSpaceService;
private NodeService nodeService;
private FullTextSearchIndexer fullTextSearchIndexer;
private ContentService contentService;
/**
* Set the dictinary service
*
* @param dictionaryService
*/
public void setDictionaryService(DictionaryService dictionaryService)
{
this.dictionaryService = dictionaryService;
}
/**
* Set the name space service
* @param nameSpaceService
*/
public void setNameSpaceService(NamespaceService nameSpaceService)
{
this.nameSpaceService = nameSpaceService;
}
/**
* Set the node service
* @param nodeService
*/
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
public void setFullTextSearchIndexer(FullTextSearchIndexer fullTextSearchIndexer)
{
this.fullTextSearchIndexer = fullTextSearchIndexer;
}
/**
* Set the content service
* @param contentService
*/
public void setContentService(ContentService contentService)
{
this.contentService = contentService;
}
protected LuceneIndexer createIndexer(StoreRef storeRef, String deltaId)
{
ADMLuceneIndexerImpl indexer = ADMLuceneIndexerImpl.getUpdateIndexer(storeRef, deltaId, this);
indexer.setNodeService(nodeService);
indexer.setDictionaryService(dictionaryService);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setFullTextSearchIndexer(fullTextSearchIndexer);
indexer.setContentService(contentService);
indexer.setMaxAtomicTransformationTime(getMaxTransformationTime());
return indexer;
}
protected LuceneSearcher getSearcher(StoreRef storeRef, LuceneIndexer indexer) throws SearcherException
{
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(storeRef, indexer, this);
searcher.setNamespacePrefixResolver(nameSpaceService);
// searcher.setLuceneIndexLock(luceneIndexLock);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setQueryRegister(getQueryRegister());
return searcher;
}
protected List<StoreRef> getAllStores()
{
return nodeService.getStores();
}
}

View File

@@ -33,7 +33,6 @@ import java.io.StringReader;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
@@ -88,19 +87,19 @@ import org.apache.lucene.search.BooleanClause.Occur;
*
* @author andyh
*/
public class LuceneIndexerImpl2 extends AbstractLuceneIndexerImpl2<NodeRef> implements LuceneIndexer2
public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> implements ADMLuceneIndexer
{
private static Logger s_logger = Logger.getLogger(LuceneIndexerImpl2.class);
static Logger s_logger = Logger.getLogger(ADMLuceneIndexerImpl.class);
/**
* The node service we use to get information about nodes
*/
private NodeService nodeService;
NodeService nodeService;
/**
* Content service to get content for indexing.
*/
private ContentService contentService;
ContentService contentService;
/**
* Call back to make after doing non atomic indexing
@@ -120,7 +119,7 @@ public class LuceneIndexerImpl2 extends AbstractLuceneIndexerImpl2<NodeRef> impl
/**
* Default construction
*/
LuceneIndexerImpl2()
ADMLuceneIndexerImpl()
{
super();
}
@@ -339,14 +338,14 @@ public class LuceneIndexerImpl2 extends AbstractLuceneIndexerImpl2<NodeRef> impl
* @return - the indexer instance
* @throws LuceneIndexException
*/
public static LuceneIndexerImpl2 getUpdateIndexer(StoreRef storeRef, String deltaId, LuceneConfig config)
public static ADMLuceneIndexerImpl getUpdateIndexer(StoreRef storeRef, String deltaId, LuceneConfig config)
throws LuceneIndexException
{
if (s_logger.isDebugEnabled())
{
s_logger.debug("Creating indexer");
}
LuceneIndexerImpl2 indexer = new LuceneIndexerImpl2();
ADMLuceneIndexerImpl indexer = new ADMLuceneIndexerImpl();
indexer.setLuceneConfig(config);
indexer.initialise(storeRef, deltaId);
return indexer;
@@ -484,17 +483,30 @@ public class LuceneIndexerImpl2 extends AbstractLuceneIndexerImpl2<NodeRef> impl
private S second;
/**
* Helper class to hold two related objects
* @param first
* @param second
*/
public Pair(F first, S second)
{
this.first = first;
this.second = second;
}
/**
* Get the first
* @return - first
*/
public F getFirst()
{
return first;
}
/**
* Get the second
* @return -second
*/
public S getSecond()
{
return second;
@@ -697,6 +709,299 @@ public class LuceneIndexerImpl2 extends AbstractLuceneIndexerImpl2<NodeRef> impl
return docs;
}
/**
* @param indexAtomicPropertiesOnly
* true to ignore all properties that must be indexed non-atomically
* @return Returns true if the property was indexed atomically, or false if it should be done asynchronously
*/
protected boolean indexProperty(NodeRef nodeRef, QName propertyName, Serializable value, Document doc, boolean indexAtomicPropertiesOnly)
{
String attributeName = "@"
+ QName.createQName(propertyName.getNamespaceURI(), ISO9075.encode(propertyName.getLocalName()));
boolean store = true;
boolean index = true;
boolean tokenise = true;
boolean atomic = true;
boolean isContent = false;
boolean isMultiLingual = false;
boolean isText = false;
PropertyDefinition propertyDef = getDictionaryService().getProperty(propertyName);
if (propertyDef != null)
{
index = propertyDef.isIndexed();
store = propertyDef.isStoredInIndex();
tokenise = propertyDef.isTokenisedInIndex();
atomic = propertyDef.isIndexedAtomically();
isContent = propertyDef.getDataType().getName().equals(DataTypeDefinition.CONTENT);
isMultiLingual = propertyDef.getDataType().getName().equals(DataTypeDefinition.MLTEXT);
isText = propertyDef.getDataType().getName().equals(DataTypeDefinition.TEXT);
}
if (value == null)
{
// the value is null
return true;
}
else if (indexAtomicPropertiesOnly && !atomic)
{
// we are only doing atomic properties and the property is definitely non-atomic
return false;
}
if (!indexAtomicPropertiesOnly)
{
doc.removeFields(propertyName.toString());
}
boolean wereAllAtomic = true;
// convert value to String
for (Serializable serializableValue : DefaultTypeConverter.INSTANCE.getCollection(Serializable.class, value))
{
String strValue = null;
try
{
strValue = DefaultTypeConverter.INSTANCE.convert(String.class, serializableValue);
}
catch (TypeConversionException e)
{
doc.add(new Field(attributeName, NOT_INDEXED_NO_TYPE_CONVERSION, Field.Store.NO,
Field.Index.UN_TOKENIZED, Field.TermVector.NO));
continue;
}
if (strValue == null)
{
// nothing to index
continue;
}
if (isContent)
{
ContentData contentData = DefaultTypeConverter.INSTANCE.convert(ContentData.class, serializableValue);
if (!index || contentData.getMimetype() == null)
{
// no mimetype or property not indexed
continue;
}
// store mimetype in index - even if content does not index it is useful
// Added szie and locale - size needs to be tokenised correctly
doc.add(new Field(attributeName + ".mimetype", contentData.getMimetype(), Field.Store.NO,
Field.Index.UN_TOKENIZED, Field.TermVector.NO));
doc.add(new Field(attributeName + ".size", Long.toString(contentData.getSize()), Field.Store.NO,
Field.Index.TOKENIZED, Field.TermVector.NO));
// TODO: Use the node locale in preferanced to the system locale
Locale locale = contentData.getLocale();
if (locale == null)
{
Serializable localeProperty = nodeService.getProperty(nodeRef, ContentModel.PROP_LOCALE);
if (localeProperty != null)
{
locale = DefaultTypeConverter.INSTANCE.convert(Locale.class, localeProperty);
}
}
if (locale == null)
{
locale = Locale.getDefault();
}
doc.add(new Field(attributeName + ".locale", locale.toString().toLowerCase(), Field.Store.NO,
Field.Index.UN_TOKENIZED, Field.TermVector.NO));
ContentReader reader = contentService.getReader(nodeRef, propertyName);
if (reader != null && reader.exists())
{
boolean readerReady = true;
// transform if necessary (it is not a UTF-8 text document)
if (!EqualsHelper.nullSafeEquals(reader.getMimetype(), MimetypeMap.MIMETYPE_TEXT_PLAIN)
|| !EqualsHelper.nullSafeEquals(reader.getEncoding(), "UTF-8"))
{
// get the transformer
ContentTransformer transformer = contentService.getTransformer(reader.getMimetype(),
MimetypeMap.MIMETYPE_TEXT_PLAIN);
// is this transformer good enough?
if (transformer == null)
{
// log it
if (s_logger.isDebugEnabled())
{
s_logger.debug("Not indexed: No transformation: \n"
+ " source: " + reader + "\n" + " target: "
+ MimetypeMap.MIMETYPE_TEXT_PLAIN);
}
// don't index from the reader
readerReady = false;
// not indexed: no transformation
// doc.add(new Field("TEXT", NOT_INDEXED_NO_TRANSFORMATION, Field.Store.NO,
// Field.Index.TOKENIZED, Field.TermVector.NO));
doc.add(new Field(attributeName, NOT_INDEXED_NO_TRANSFORMATION, Field.Store.NO,
Field.Index.TOKENIZED, Field.TermVector.NO));
}
else if (indexAtomicPropertiesOnly
&& transformer.getTransformationTime() > maxAtomicTransformationTime)
{
// only indexing atomic properties
// indexing will take too long, so push it to the background
wereAllAtomic = false;
}
else
{
// We have a transformer that is fast enough
ContentWriter writer = contentService.getTempWriter();
writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
// this is what the analyzers expect on the stream
writer.setEncoding("UTF-8");
try
{
transformer.transform(reader, writer);
// point the reader to the new-written content
reader = writer.getReader();
}
catch (ContentIOException e)
{
// log it
if (s_logger.isDebugEnabled())
{
s_logger.debug("Not indexed: Transformation failed", e);
}
// don't index from the reader
readerReady = false;
// not indexed: transformation
// failed
// doc.add(new Field("TEXT", NOT_INDEXED_TRANSFORMATION_FAILED, Field.Store.NO,
// Field.Index.TOKENIZED, Field.TermVector.NO));
doc.add(new Field(attributeName, NOT_INDEXED_TRANSFORMATION_FAILED, Field.Store.NO,
Field.Index.TOKENIZED, Field.TermVector.NO));
}
}
}
// add the text field using the stream from the
// reader, but only if the reader is valid
if (readerReady)
{
InputStreamReader isr = null;
// InputStream ris = reader.getContentInputStream();
// try
// {
// isr = new InputStreamReader(ris, "UTF-8");
// }
// catch (UnsupportedEncodingException e)
// {
// isr = new InputStreamReader(ris);
// }
// doc.add(new Field("TEXT", isr, Field.TermVector.NO));
InputStream ris = reader.getReader().getContentInputStream();
try
{
isr = new InputStreamReader(ris, "UTF-8");
}
catch (UnsupportedEncodingException e)
{
isr = new InputStreamReader(ris);
}
StringBuilder builder = new StringBuilder();
builder.append("\u0000").append(locale.toString()).append("\u0000");
StringReader prefix = new StringReader(builder.toString());
Reader multiReader = new MultiReader(prefix, isr);
doc.add(new Field(attributeName, multiReader, Field.TermVector.NO));
}
}
else
// URL not present (null reader) or no content at the URL (file missing)
{
// log it
if (s_logger.isDebugEnabled())
{
s_logger.debug("Not indexed: Content Missing \n"
+ " node: " + nodeRef + "\n" + " reader: " + reader + "\n" + " content exists: "
+ (reader == null ? " --- " : Boolean.toString(reader.exists())));
}
// not indexed: content missing
doc.add(new Field("TEXT", NOT_INDEXED_CONTENT_MISSING, Field.Store.NO, Field.Index.TOKENIZED,
Field.TermVector.NO));
doc.add(new Field(attributeName, NOT_INDEXED_CONTENT_MISSING, Field.Store.NO,
Field.Index.TOKENIZED, Field.TermVector.NO));
}
}
else
{
Field.Store fieldStore = store ? Field.Store.YES : Field.Store.NO;
Field.Index fieldIndex;
if (index)
{
if (tokenise)
{
fieldIndex = Field.Index.TOKENIZED;
}
else
{
fieldIndex = Field.Index.UN_TOKENIZED;
}
}
else
{
fieldIndex = Field.Index.NO;
}
if ((fieldIndex != Field.Index.NO) || (fieldStore != Field.Store.NO))
{
if (isMultiLingual)
{
MLText mlText = DefaultTypeConverter.INSTANCE.convert(MLText.class, serializableValue);
for (Locale locale : mlText.getLocales())
{
String localeString = mlText.getValue(locale);
StringBuilder builder = new StringBuilder();
builder.append("\u0000").append(locale.toString()).append("\u0000").append(localeString);
doc.add(new Field(attributeName, builder.toString(), fieldStore, fieldIndex,
Field.TermVector.NO));
}
}
else if (isText)
{
// Temporary special case for uids and gids
if(propertyName.equals(ContentModel.PROP_USER_USERNAME) || propertyName.equals(ContentModel.PROP_USERNAME) || propertyName.equals(ContentModel.PROP_AUTHORITY_NAME))
{
doc.add(new Field(attributeName, strValue, fieldStore, fieldIndex, Field.TermVector.NO));
}
// TODO: Use the node locale in preferanced to the system locale
Locale locale = null;
Serializable localeProperty = nodeService.getProperty(nodeRef, ContentModel.PROP_LOCALE);
if (localeProperty != null)
{
locale = DefaultTypeConverter.INSTANCE.convert(Locale.class, localeProperty);
}
if (locale == null)
{
locale = Locale.getDefault();
}
if (tokenise)
{
StringBuilder builder = new StringBuilder();
builder.append("\u0000").append(locale.toString()).append("\u0000").append(strValue);
doc.add(new Field(attributeName, builder.toString(), fieldStore, fieldIndex,
Field.TermVector.NO));
}
else
{
doc.add(new Field(attributeName, strValue, fieldStore, fieldIndex, Field.TermVector.NO));
}
}
else
{
doc.add(new Field(attributeName, strValue, fieldStore, fieldIndex, Field.TermVector.NO));
}
}
}
}
return wereAllAtomic;
}
/**
* Does the node type or any applied aspect allow this node to have child associations?
*
@@ -755,300 +1060,6 @@ public class LuceneIndexerImpl2 extends AbstractLuceneIndexerImpl2<NodeRef> impl
}
}
/**
* @param indexAtomicPropertiesOnly
* true to ignore all properties that must be indexed non-atomically
* @return Returns true if the property was indexed atomically, or false if it should be done asynchronously
*/
private boolean indexProperty(NodeRef nodeRef, QName propertyName, Serializable value, Document doc,
boolean indexAtomicPropertiesOnly)
{
String attributeName = "@"
+ QName.createQName(propertyName.getNamespaceURI(), ISO9075.encode(propertyName.getLocalName()));
boolean store = true;
boolean index = true;
boolean tokenise = true;
boolean atomic = true;
boolean isContent = false;
boolean isMultiLingual = false;
boolean isText = false;
PropertyDefinition propertyDef = getDictionaryService().getProperty(propertyName);
if (propertyDef != null)
{
index = propertyDef.isIndexed();
store = propertyDef.isStoredInIndex();
tokenise = propertyDef.isTokenisedInIndex();
atomic = propertyDef.isIndexedAtomically();
isContent = propertyDef.getDataType().getName().equals(DataTypeDefinition.CONTENT);
isMultiLingual = propertyDef.getDataType().getName().equals(DataTypeDefinition.MLTEXT);
isText = propertyDef.getDataType().getName().equals(DataTypeDefinition.TEXT);
}
if (value == null)
{
// the value is null
return true;
}
else if (indexAtomicPropertiesOnly && !atomic)
{
// we are only doing atomic properties and the property is definitely non-atomic
return false;
}
if (!indexAtomicPropertiesOnly)
{
doc.removeFields(propertyName.toString());
}
boolean wereAllAtomic = true;
// convert value to String
for (Serializable serializableValue : DefaultTypeConverter.INSTANCE.getCollection(Serializable.class, value))
{
String strValue = null;
try
{
strValue = DefaultTypeConverter.INSTANCE.convert(String.class, serializableValue);
}
catch (TypeConversionException e)
{
doc.add(new Field(attributeName, NOT_INDEXED_NO_TYPE_CONVERSION, Field.Store.NO,
Field.Index.UN_TOKENIZED, Field.TermVector.NO));
continue;
}
if (strValue == null)
{
// nothing to index
continue;
}
if (isContent)
{
ContentData contentData = DefaultTypeConverter.INSTANCE.convert(ContentData.class, serializableValue);
if (!index || contentData.getMimetype() == null)
{
// no mimetype or property not indexed
continue;
}
// store mimetype in index - even if content does not index it is useful
// Added szie and locale - size needs to be tokenised correctly
doc.add(new Field(attributeName + ".mimetype", contentData.getMimetype(), Field.Store.NO,
Field.Index.UN_TOKENIZED, Field.TermVector.NO));
doc.add(new Field(attributeName + ".size", Long.toString(contentData.getSize()), Field.Store.NO,
Field.Index.TOKENIZED, Field.TermVector.NO));
// TODO: Use the node locale in preferanced to the system locale
Locale locale = contentData.getLocale();
if (locale == null)
{
Serializable localeProperty = nodeService.getProperty(nodeRef, ContentModel.PROP_LOCALE);
if (localeProperty != null)
{
locale = DefaultTypeConverter.INSTANCE.convert(Locale.class, localeProperty);
}
}
if (locale == null)
{
locale = Locale.getDefault();
}
doc.add(new Field(attributeName + ".locale", locale.toString().toLowerCase(), Field.Store.NO,
Field.Index.UN_TOKENIZED, Field.TermVector.NO));
ContentReader reader = contentService.getReader(nodeRef, propertyName);
if (reader != null && reader.exists())
{
boolean readerReady = true;
// transform if necessary (it is not a UTF-8 text document)
if (!EqualsHelper.nullSafeEquals(reader.getMimetype(), MimetypeMap.MIMETYPE_TEXT_PLAIN)
|| !EqualsHelper.nullSafeEquals(reader.getEncoding(), "UTF-8"))
{
// get the transformer
ContentTransformer transformer = contentService.getTransformer(reader.getMimetype(),
MimetypeMap.MIMETYPE_TEXT_PLAIN);
// is this transformer good enough?
if (transformer == null)
{
// log it
if (s_logger.isDebugEnabled())
{
s_logger.debug("Not indexed: No transformation: \n"
+ " source: " + reader + "\n" + " target: "
+ MimetypeMap.MIMETYPE_TEXT_PLAIN);
}
// don't index from the reader
readerReady = false;
// not indexed: no transformation
// doc.add(new Field("TEXT", NOT_INDEXED_NO_TRANSFORMATION, Field.Store.NO,
// Field.Index.TOKENIZED, Field.TermVector.NO));
doc.add(new Field(attributeName, NOT_INDEXED_NO_TRANSFORMATION, Field.Store.NO,
Field.Index.TOKENIZED, Field.TermVector.NO));
}
else if (indexAtomicPropertiesOnly
&& transformer.getTransformationTime() > maxAtomicTransformationTime)
{
// only indexing atomic properties
// indexing will take too long, so push it to the background
wereAllAtomic = false;
}
else
{
// We have a transformer that is fast enough
ContentWriter writer = contentService.getTempWriter();
writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
// this is what the analyzers expect on the stream
writer.setEncoding("UTF-8");
try
{
transformer.transform(reader, writer);
// point the reader to the new-written content
reader = writer.getReader();
}
catch (ContentIOException e)
{
// log it
if (s_logger.isDebugEnabled())
{
s_logger.debug("Not indexed: Transformation failed", e);
}
// don't index from the reader
readerReady = false;
// not indexed: transformation
// failed
// doc.add(new Field("TEXT", NOT_INDEXED_TRANSFORMATION_FAILED, Field.Store.NO,
// Field.Index.TOKENIZED, Field.TermVector.NO));
doc.add(new Field(attributeName, NOT_INDEXED_TRANSFORMATION_FAILED, Field.Store.NO,
Field.Index.TOKENIZED, Field.TermVector.NO));
}
}
}
// add the text field using the stream from the
// reader, but only if the reader is valid
if (readerReady)
{
InputStreamReader isr = null;
// InputStream ris = reader.getContentInputStream();
// try
// {
// isr = new InputStreamReader(ris, "UTF-8");
// }
// catch (UnsupportedEncodingException e)
// {
// isr = new InputStreamReader(ris);
// }
// doc.add(new Field("TEXT", isr, Field.TermVector.NO));
InputStream ris = reader.getReader().getContentInputStream();
try
{
isr = new InputStreamReader(ris, "UTF-8");
}
catch (UnsupportedEncodingException e)
{
isr = new InputStreamReader(ris);
}
StringBuilder builder = new StringBuilder();
builder.append("\u0000").append(locale.toString()).append("\u0000");
StringReader prefix = new StringReader(builder.toString());
Reader multiReader = new MultiReader(prefix, isr);
doc.add(new Field(attributeName, multiReader, Field.TermVector.NO));
}
}
else
// URL not present (null reader) or no content at the URL (file missing)
{
// log it
if (s_logger.isDebugEnabled())
{
s_logger.debug("Not indexed: Content Missing \n"
+ " node: " + nodeRef + "\n" + " reader: " + reader + "\n" + " content exists: "
+ (reader == null ? " --- " : Boolean.toString(reader.exists())));
}
// not indexed: content missing
doc.add(new Field("TEXT", NOT_INDEXED_CONTENT_MISSING, Field.Store.NO, Field.Index.TOKENIZED,
Field.TermVector.NO));
doc.add(new Field(attributeName, NOT_INDEXED_CONTENT_MISSING, Field.Store.NO,
Field.Index.TOKENIZED, Field.TermVector.NO));
}
}
else
{
Field.Store fieldStore = store ? Field.Store.YES : Field.Store.NO;
Field.Index fieldIndex;
if (index)
{
if (tokenise)
{
fieldIndex = Field.Index.TOKENIZED;
}
else
{
fieldIndex = Field.Index.UN_TOKENIZED;
}
}
else
{
fieldIndex = Field.Index.NO;
}
if ((fieldIndex != Field.Index.NO) || (fieldStore != Field.Store.NO))
{
if (isMultiLingual)
{
MLText mlText = DefaultTypeConverter.INSTANCE.convert(MLText.class, serializableValue);
for (Locale locale : mlText.getLocales())
{
String localeString = mlText.getValue(locale);
StringBuilder builder = new StringBuilder();
builder.append("\u0000").append(locale.toString()).append("\u0000").append(localeString);
doc.add(new Field(attributeName, builder.toString(), fieldStore, fieldIndex,
Field.TermVector.NO));
}
}
else if (isText)
{
// Temporary special case for uids and gids
if(propertyName.equals(ContentModel.PROP_USER_USERNAME) || propertyName.equals(ContentModel.PROP_USERNAME) || propertyName.equals(ContentModel.PROP_AUTHORITY_NAME))
{
doc.add(new Field(attributeName, strValue, fieldStore, fieldIndex, Field.TermVector.NO));
}
// TODO: Use the node locale in preferanced to the system locale
Locale locale = null;
Serializable localeProperty = nodeService.getProperty(nodeRef, ContentModel.PROP_LOCALE);
if (localeProperty != null)
{
locale = DefaultTypeConverter.INSTANCE.convert(Locale.class, localeProperty);
}
if (locale == null)
{
locale = Locale.getDefault();
}
if (tokenise)
{
StringBuilder builder = new StringBuilder();
builder.append("\u0000").append(locale.toString()).append("\u0000").append(strValue);
doc.add(new Field(attributeName, builder.toString(), fieldStore, fieldIndex,
Field.TermVector.NO));
}
else
{
doc.add(new Field(attributeName, strValue, fieldStore, fieldIndex, Field.TermVector.NO));
}
}
else
{
doc.add(new Field(attributeName, strValue, fieldStore, fieldIndex, Field.TermVector.NO));
}
}
}
}
return wereAllAtomic;
}
private Map<ChildAssociationRef, Counter> getNodeCounts(NodeRef nodeRef)
{
Map<ChildAssociationRef, Counter> nodeCounts = new HashMap<ChildAssociationRef, Counter>(5);
@@ -1348,23 +1359,13 @@ public class LuceneIndexerImpl2 extends AbstractLuceneIndexerImpl2<NodeRef> impl
}
}
FullTextSearchIndexer luceneFullTextSearchIndexer;
FullTextSearchIndexer fullTextSearchIndexer;
public void setLuceneFullTextSearchIndexer(FullTextSearchIndexer luceneFullTextSearchIndexer)
public void setFullTextSearchIndexer(FullTextSearchIndexer fullTextSearchIndexer)
{
this.luceneFullTextSearchIndexer = luceneFullTextSearchIndexer;
this.fullTextSearchIndexer = fullTextSearchIndexer;
}
public boolean getDeleteOnlyNodes()
{
return indexUpdateStatus == IndexUpdateStatus.ASYNCHRONOUS;
}
public Set<String> getDeletions()
{
return Collections.unmodifiableSet(deletions);
}
protected void doPrepare() throws IOException
{
saveDelta();
@@ -1382,7 +1383,7 @@ public class LuceneIndexerImpl2 extends AbstractLuceneIndexerImpl2<NodeRef> impl
else
{
setInfo(docs, getDeletions(), false);
luceneFullTextSearchIndexer.requiresIndex(store);
fullTextSearchIndexer.requiresIndex(store);
}
if (callBack != null)
{

View File

@@ -36,7 +36,6 @@ import java.util.Set;
import org.alfresco.repo.search.CannedQueryDef;
import org.alfresco.repo.search.EmptyResultSet;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.QueryRegisterComponent;
import org.alfresco.repo.search.SearcherException;
import org.alfresco.repo.search.impl.NodeSearcher;
@@ -75,7 +74,7 @@ import com.werken.saxpath.XPathReader;
*
* @author andyh
*/
public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
public class ADMLuceneSearcherImpl extends AbstractLuceneBase implements LuceneSearcher
{
/**
@@ -91,7 +90,7 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
private QueryRegisterComponent queryRegister;
private LuceneIndexer2 indexer;
private LuceneIndexer indexer;
/*
* Searcher implementation
@@ -101,12 +100,13 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
* Get an initialised searcher for the store and transaction Normally we do not search against a a store and delta. Currently only gets the searcher against the main index.
*
* @param storeRef
* @param deltaId
* @return
* @param indexer
* @param config
* @return - the searcher implementation
*/
public static LuceneSearcherImpl2 getSearcher(StoreRef storeRef, LuceneIndexer2 indexer, LuceneConfig config)
public static ADMLuceneSearcherImpl getSearcher(StoreRef storeRef, LuceneIndexer indexer, LuceneConfig config)
{
LuceneSearcherImpl2 searcher = new LuceneSearcherImpl2();
ADMLuceneSearcherImpl searcher = new ADMLuceneSearcherImpl();
searcher.setLuceneConfig(config);
try
{
@@ -124,9 +124,10 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
* Get an intialised searcher for the store. No transactional ammendsmends are searched.
*
* @param storeRef
* @return
* @param config
* @return the searcher
*/
public static LuceneSearcherImpl2 getSearcher(StoreRef storeRef, LuceneConfig config)
public static ADMLuceneSearcherImpl getSearcher(StoreRef storeRef, LuceneConfig config)
{
return getSearcher(storeRef, null, config);
}
@@ -152,6 +153,10 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
this.dictionaryService = dictionaryService;
}
/**
* Set the query register
* @param queryRegister
*/
public void setQueryRegister(QueryRegisterComponent queryRegister)
{
this.queryRegister = queryRegister;
@@ -608,8 +613,6 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
/**
* @return Returns true if the pattern is present, otherwise false.
* @see #setIndexer(Indexer)
* @see #setSearcher(SearchService)
*/
public boolean like(NodeRef nodeRef, QName propertyQName, String sqlLikePattern, boolean includeFTS)
{

View File

@@ -95,7 +95,7 @@ import org.springframework.context.ApplicationContext;
* @author andyh
*/
@SuppressWarnings("unused")
public class LuceneTest2 extends TestCase
public class ADMLuceneTest extends TestCase
{
private static final String TEST_NAMESPACE = "http://www.alfresco.org/test/lucenetest";
@@ -104,7 +104,7 @@ public class LuceneTest2 extends TestCase
private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
private static Log logger = LogFactory.getLog(LuceneTest2.class);
private static Log logger = LogFactory.getLog(ADMLuceneTest.class);
QName createdDate = QName.createQName(TEST_NAMESPACE, "createdDate");
@@ -184,7 +184,10 @@ public class LuceneTest2 extends TestCase
private NamespaceDAOImpl namespaceDao;
public LuceneTest2()
/**
*
*/
public ADMLuceneTest()
{
super();
}
@@ -198,8 +201,8 @@ public class LuceneTest2 extends TestCase
contentService = (ContentService) ctx.getBean("contentService");
queryRegisterComponent = (QueryRegisterComponent) ctx.getBean("queryRegisterComponent");
namespacePrefixResolver = (DictionaryNamespaceComponent) ctx.getBean("namespaceService");
indexerAndSearcher = (LuceneIndexerAndSearcher) ctx.getBean("luceneIndexerAndSearcherFactory");
((LuceneIndexerAndSearcherFactory2)indexerAndSearcher).setMaxAtomicTransformationTime(1000000);
indexerAndSearcher = (LuceneIndexerAndSearcher) ctx.getBean("admLuceneIndexerAndSearcherFactory");
((AbstractLuceneIndexerAndSearcherFactory)indexerAndSearcher).setMaxAtomicTransformationTime(1000000);
transactionService = (TransactionService) ctx.getBean("transactionComponent");
serviceRegistry = (ServiceRegistry) ctx.getBean(ServiceRegistry.SERVICE_REGISTRY);
@@ -412,6 +415,9 @@ public class LuceneTest2 extends TestCase
private int orderIntCount = -45764576;
/**
* @return properties
*/
public Map<QName, Serializable> getOrderProperties()
{
Map<QName, Serializable> testProperties = new HashMap<QName, Serializable>();
@@ -440,11 +446,17 @@ public class LuceneTest2 extends TestCase
super.tearDown();
}
public LuceneTest2(String arg0)
/**
* @param arg0
*/
public ADMLuceneTest(String arg0)
{
super(arg0);
}
/**
* @throws Exception
*/
public void testAuxDataIsPresent() throws Exception
{
luceneFTS.pause();
@@ -454,7 +466,7 @@ public class LuceneTest2 extends TestCase
testTX.begin();
runBaseTests();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -468,7 +480,7 @@ public class LuceneTest2 extends TestCase
testTX = transactionService.getUserTransaction();
testTX.begin();
searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -477,11 +489,17 @@ public class LuceneTest2 extends TestCase
results.close();
}
/**
* @throws Exception
*/
public void testFirst() throws Exception
{
testReadAgainstDelta();
}
/**
* @throws Exception
*/
public void test0() throws Exception
{
luceneFTS.pause();
@@ -491,6 +509,9 @@ public class LuceneTest2 extends TestCase
}
/**
* @throws Exception
*/
public void testDeleteSecondaryAssocToContainer() throws Exception
{
luceneFTS.pause();
@@ -506,7 +527,7 @@ public class LuceneTest2 extends TestCase
testTX = transactionService.getUserTransaction();
testTX.begin();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -740,6 +761,9 @@ public class LuceneTest2 extends TestCase
}
/**
* @throws Exception
*/
public void testDeleteSecondaryAssocToLeaf() throws Exception
{
luceneFTS.pause();
@@ -755,7 +779,7 @@ public class LuceneTest2 extends TestCase
testTX = transactionService.getUserTransaction();
testTX.begin();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -979,6 +1003,9 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void testDeleteIssue() throws Exception
{
@@ -990,7 +1017,7 @@ public class LuceneTest2 extends TestCase
.createQName("{namespace}testFind"), testSuperType);
testTX.commit();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -1028,6 +1055,9 @@ public class LuceneTest2 extends TestCase
tx3.commit();
}
/**
* @throws Exception
*/
public void testMTDeleteIssue() throws Exception
{
luceneFTS.pause();
@@ -1041,7 +1071,7 @@ public class LuceneTest2 extends TestCase
testTX = transactionService.getUserTransaction();
testTX.begin();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -1144,6 +1174,9 @@ public class LuceneTest2 extends TestCase
}
/**
* @throws Exception
*/
public void testDeltaIssue() throws Exception
{
luceneFTS.pause();
@@ -1274,13 +1307,16 @@ public class LuceneTest2 extends TestCase
}
/**
* @throws Exception
*/
public void testRepeatPerformance() throws Exception
{
luceneFTS.pause();
buildBaseIndex();
runBaseTests();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -1317,13 +1353,16 @@ public class LuceneTest2 extends TestCase
}
}
/**
* @throws Exception
*/
public void testSort() throws Exception
{
luceneFTS.pause();
buildBaseIndex();
runBaseTests();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -1666,6 +1705,9 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void test1() throws Exception
{
luceneFTS.pause();
@@ -1674,6 +1716,9 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void test2() throws Exception
{
luceneFTS.pause();
@@ -1682,6 +1727,9 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void test3() throws Exception
{
luceneFTS.pause();
@@ -1690,12 +1738,15 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void test4() throws Exception
{
luceneFTS.pause();
buildBaseIndex();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setDictionaryService(dictionaryService);
ResultSet results = searcher.query(rootNodeRef.getStoreRef(), "lucene",
@@ -1704,6 +1755,9 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void test5() throws Exception
{
luceneFTS.pause();
@@ -1712,6 +1766,9 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void test6() throws Exception
{
luceneFTS.pause();
@@ -1720,16 +1777,19 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void testNoOp() throws Exception
{
luceneFTS.pause();
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
ADMLuceneIndexerImpl indexer = ADMLuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
+ System.currentTimeMillis() + "_1", indexerAndSearcher);
indexer.setMaxAtomicTransformationTime(1000000);
indexer.setNodeService(nodeService);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setDictionaryService(dictionaryService);
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
indexer.prepare();
@@ -1739,6 +1799,7 @@ public class LuceneTest2 extends TestCase
/**
* Test basic index and search
* @throws Exception
*
* @throws InterruptedException
*/
@@ -1746,14 +1807,14 @@ public class LuceneTest2 extends TestCase
public void testStandAloneIndexerCommit() throws Exception
{
luceneFTS.pause();
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
ADMLuceneIndexerImpl indexer = ADMLuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
+ System.currentTimeMillis() + "_1", indexerAndSearcher);
indexer.setMaxAtomicTransformationTime(1000000);
indexer.setNodeService(nodeService);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setDictionaryService(dictionaryService);
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
// //indexer.clearIndex();
@@ -1770,7 +1831,7 @@ public class LuceneTest2 extends TestCase
indexer.prepare();
indexer.commit();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -1887,12 +1948,15 @@ public class LuceneTest2 extends TestCase
}
}
/**
* @throws Exception
*/
public void testStandAlonePathIndexer() throws Exception
{
luceneFTS.pause();
buildBaseIndex();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
@@ -1939,13 +2003,13 @@ public class LuceneTest2 extends TestCase
private void buildBaseIndex()
{
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
ADMLuceneIndexerImpl indexer = ADMLuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
+ System.currentTimeMillis() + "_" + (new Random().nextInt()), indexerAndSearcher);
indexer.setMaxAtomicTransformationTime(1000000);
indexer.setNodeService(nodeService);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setDictionaryService(dictionaryService);
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
// indexer.clearIndex();
indexer.createNode(new ChildAssociationRef(null, null, null, rootNodeRef));
@@ -1975,6 +2039,9 @@ public class LuceneTest2 extends TestCase
indexer.commit();
}
/**
* @throws Exception
*/
public void testAllPathSearch() throws Exception
{
luceneFTS.pause();
@@ -1986,7 +2053,7 @@ public class LuceneTest2 extends TestCase
private void runBaseTests()
{
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -3133,12 +3200,15 @@ public class LuceneTest2 extends TestCase
}
/**
* @throws Exception
*/
public void testPathSearch() throws Exception
{
luceneFTS.pause();
buildBaseIndex();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -3185,12 +3255,15 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void testXPathSearch() throws Exception
{
luceneFTS.pause();
buildBaseIndex();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -3212,11 +3285,14 @@ public class LuceneTest2 extends TestCase
results.close();
}
/**
* @throws Exception
*/
public void testMissingIndex() throws Exception
{
luceneFTS.pause();
StoreRef storeRef = new StoreRef(StoreRef.PROTOCOL_WORKSPACE, "_missing_");
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(storeRef, indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(storeRef, indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -3230,6 +3306,9 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void testUpdateIndex() throws Exception
{
luceneFTS.pause();
@@ -3237,13 +3316,13 @@ public class LuceneTest2 extends TestCase
runBaseTests();
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
ADMLuceneIndexerImpl indexer = ADMLuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
+ System.currentTimeMillis(), indexerAndSearcher);
indexer.setMaxAtomicTransformationTime(1000000);
indexer.setNodeService(nodeService);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setDictionaryService(dictionaryService);
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
indexer.updateNode(rootNodeRef);
@@ -3268,6 +3347,9 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void testDeleteLeaf() throws Exception
{
testTX.commit();
@@ -3292,7 +3374,7 @@ public class LuceneTest2 extends TestCase
testTX = transactionService.getUserTransaction();
testTX.begin();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -3507,6 +3589,9 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void testAddEscapedChild() throws Exception
{
String COMPLEX_LOCAL_NAME = "\u0020\u0060\u00ac\u00a6\u0021\"\u00a3\u0024\u0025\u005e\u0026\u002a\u0028\u0029\u002d\u005f\u003d\u002b\t\n\\\u0000\u005b\u005d\u007b\u007d\u003b\u0027\u0023\u003a\u0040\u007e\u002c\u002e\u002f\u003c\u003e\u003f\\u007c\u005f\u0078\u0054\u0036\u0035\u0041\u005f";
@@ -3516,13 +3601,13 @@ public class LuceneTest2 extends TestCase
buildBaseIndex();
runBaseTests();
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
ADMLuceneIndexerImpl indexer = ADMLuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
+ System.currentTimeMillis(), indexerAndSearcher);
indexer.setMaxAtomicTransformationTime(1000000);
indexer.setNodeService(nodeService);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setDictionaryService(dictionaryService);
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
ChildAssociationRef car = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName
@@ -3531,7 +3616,7 @@ public class LuceneTest2 extends TestCase
indexer.commit();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -3541,6 +3626,9 @@ public class LuceneTest2 extends TestCase
results.close();
}
/**
* @throws Exception
*/
public void testNumericInPath() throws Exception
{
String COMPLEX_LOCAL_NAME = "Woof12";
@@ -3549,13 +3637,13 @@ public class LuceneTest2 extends TestCase
buildBaseIndex();
runBaseTests();
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
ADMLuceneIndexerImpl indexer = ADMLuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
+ System.currentTimeMillis(), indexerAndSearcher);
indexer.setMaxAtomicTransformationTime(1000000);
indexer.setNodeService(nodeService);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setDictionaryService(dictionaryService);
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
ChildAssociationRef car = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName
@@ -3564,7 +3652,7 @@ public class LuceneTest2 extends TestCase
indexer.commit();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -3574,6 +3662,9 @@ public class LuceneTest2 extends TestCase
results.close();
}
/**
* @throws Exception
*/
public void testDeleteContainer() throws Exception
{
testTX.commit();
@@ -3584,12 +3675,12 @@ public class LuceneTest2 extends TestCase
runBaseTests();
testTX.commit();
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
ADMLuceneIndexerImpl indexer = ADMLuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
+ System.currentTimeMillis(), indexerAndSearcher);
indexer.setNodeService(nodeService);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setDictionaryService(dictionaryService);
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
testTX = transactionService.getUserTransaction();
@@ -3604,7 +3695,7 @@ public class LuceneTest2 extends TestCase
testTX = transactionService.getUserTransaction();
testTX.begin();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -3822,19 +3913,22 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void testDeleteAndAddReference() throws Exception
{
luceneFTS.pause();
buildBaseIndex();
runBaseTests();
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
ADMLuceneIndexerImpl indexer = ADMLuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
+ System.currentTimeMillis(), indexerAndSearcher);
indexer.setMaxAtomicTransformationTime(1000000);
indexer.setNodeService(nodeService);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setDictionaryService(dictionaryService);
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
nodeService.removeChild(n2, n13);
@@ -3843,7 +3937,7 @@ public class LuceneTest2 extends TestCase
indexer.commit();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
@@ -4072,13 +4166,13 @@ public class LuceneTest2 extends TestCase
assertEquals(1, results.length());
results.close();
indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta" + System.currentTimeMillis(),
indexer = ADMLuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta" + System.currentTimeMillis(),
indexerAndSearcher);
indexer.setMaxAtomicTransformationTime(1000000);
indexer.setNodeService(nodeService);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setDictionaryService(dictionaryService);
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
nodeService.addChild(n2, n13, ASSOC_TYPE_QNAME, QName.createQName("{namespace}link"));
@@ -4091,13 +4185,16 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void testRenameReference() throws Exception
{
luceneFTS.pause();
buildBaseIndex();
runBaseTests();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -4117,13 +4214,13 @@ public class LuceneTest2 extends TestCase
assertEquals(0, results.length());
results.close();
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
ADMLuceneIndexerImpl indexer = ADMLuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
+ System.currentTimeMillis(), indexerAndSearcher);
indexer.setMaxAtomicTransformationTime(1000000);
indexer.setNodeService(nodeService);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setDictionaryService(dictionaryService);
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
nodeService.removeChild(n2, n13);
@@ -4137,7 +4234,7 @@ public class LuceneTest2 extends TestCase
runBaseTests();
searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
searcher.setDictionaryService(dictionaryService);
@@ -4156,13 +4253,16 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void testDelayIndex() throws Exception
{
luceneFTS.pause();
buildBaseIndex();
runBaseTests();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
@@ -4187,19 +4287,19 @@ public class LuceneTest2 extends TestCase
// Do index
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
ADMLuceneIndexerImpl indexer = ADMLuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
+ System.currentTimeMillis() + "_" + (new Random().nextInt()), indexerAndSearcher);
indexer.setMaxAtomicTransformationTime(1000000);
indexer.setNodeService(nodeService);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setDictionaryService(dictionaryService);
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
indexer.updateFullTextSearch(1000);
indexer.prepare();
indexer.commit();
searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
searcher.setDictionaryService(dictionaryService);
@@ -4225,13 +4325,16 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void testWaitForIndex() throws Exception
{
luceneFTS.pause();
buildBaseIndex();
runBaseTests();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -4256,7 +4359,7 @@ public class LuceneTest2 extends TestCase
// Do index
searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -4267,13 +4370,13 @@ public class LuceneTest2 extends TestCase
assertEquals(1, results.length());
results.close();
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
ADMLuceneIndexerImpl indexer = ADMLuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
+ System.currentTimeMillis() + "_" + (new Random().nextInt()), indexerAndSearcher);
indexer.setMaxAtomicTransformationTime(1000000);
indexer.setNodeService(nodeService);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setDictionaryService(dictionaryService);
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
indexer.updateFullTextSearch(1000);
indexer.prepare();
@@ -4307,13 +4410,16 @@ public class LuceneTest2 extends TestCase
return LuceneQueryParser.escape(qname.toString());
}
/**
* @throws Exception
*/
public void testForKev() throws Exception
{
luceneFTS.pause();
buildBaseIndex();
runBaseTests();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
@@ -4338,6 +4444,9 @@ public class LuceneTest2 extends TestCase
luceneFTS.resume();
}
/**
* @throws Exception
*/
public void testIssueAR47() throws Exception
{
// This bug arose from repeated deletes and adds creating empty index
@@ -4357,7 +4466,7 @@ public class LuceneTest2 extends TestCase
.createQName("{namespace}testFind"), testSuperType);
tx.commit();
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
@@ -4389,6 +4498,9 @@ public class LuceneTest2 extends TestCase
tx3.commit();
}
/**
* @throws Exception
*/
public void testReadAgainstDelta() throws Exception
{
testTX.commit();
@@ -4641,13 +4753,13 @@ public class LuceneTest2 extends TestCase
private void runPerformanceTest(double time, boolean clear)
{
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
ADMLuceneIndexerImpl indexer = ADMLuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
+ System.currentTimeMillis() + "_" + (new Random().nextInt()), indexerAndSearcher);
indexer.setMaxAtomicTransformationTime(1000000);
indexer.setNodeService(nodeService);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setDictionaryService(dictionaryService);
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
if (clear)
{
@@ -4692,9 +4804,13 @@ public class LuceneTest2 extends TestCase
return nspr;
}
/**
* @param args
* @throws Exception
*/
public static void main(String[] args) throws Exception
{
LuceneTest2 test = new LuceneTest2();
ADMLuceneTest test = new ADMLuceneTest();
test.setUp();
// test.testForKev();
// test.testDeleteContainer();
@@ -4715,6 +4831,10 @@ public class LuceneTest2 extends TestCase
// test.dictionaryService.getType(test.nodeService.getType(test.rootNodeRef)).getDefaultAspects();
}
/**
* @author andyh
*
*/
public static class UnknownDataType implements Serializable
{

View File

@@ -0,0 +1,58 @@
/*
* Copyright (C) 2005-2007 Alfresco Software Limited.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing"
*/
package org.alfresco.repo.search.impl.lucene;
/**
* AVM specific indxer support
*
* @author andyh
*
*/
public interface AVMLuceneIndexer extends LuceneIndexer
{
/**
* Index a specified change to a store between two snapshots
*
* @param store - the name of the store
* @param srcVersion - the id of the snapshot before the changeset
* @param dstVersion - the id of the snapshot created by the change set
*/
public void index(String store, int srcVersion, int dstVersion);
/**
* Delete the index for the specified store.
*
* @param store
*/
public void deleteIndex(String store);
/**
* Create an index for the specified store.
* This makes sure that the root node for the store is indexed correctly.
*
* @param store
*/
public void createIndex(String store);
}

View File

@@ -0,0 +1,168 @@
/*
* Copyright (C) 2005-2007 Alfresco Software Limited.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing"
*/
package org.alfresco.repo.search.impl.lucene;
import java.util.ArrayList;
import java.util.List;
import org.alfresco.repo.avm.AVMNodeConverter;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.search.SearcherException;
import org.alfresco.service.cmr.avm.AVMService;
import org.alfresco.service.cmr.avm.AVMStoreDescriptor;
import org.alfresco.service.cmr.avmsync.AVMSyncService;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.namespace.NamespaceService;
/**
* Factory for AVM indexers and searchers
*
* @author andyh
*
*/
public class AVMLuceneIndexerAndSearcherFactory extends AbstractLuceneIndexerAndSearcherFactory
{
private DictionaryService dictionaryService;
private NamespaceService nameSpaceService;
private ContentService contentService;
private AVMService avmService;
private AVMSyncService avmSyncService;
private NodeService nodeService;
private ContentStore contentStore;
/**
* Set the dictionary service
* @param dictionaryService
*/
public void setDictionaryService(DictionaryService dictionaryService)
{
this.dictionaryService = dictionaryService;
}
/**
* Set the name space service
* @param nameSpaceService
*/
public void setNameSpaceService(NamespaceService nameSpaceService)
{
this.nameSpaceService = nameSpaceService;
}
/**
* Set the content service
* @param contentService
*/
public void setContentService(ContentService contentService)
{
this.contentService = contentService;
}
/**
* Set the AVM service
* @param avmService
*/
public void setAvmService(AVMService avmService)
{
this.avmService = avmService;
}
/**
* Set the AVM sync service
* @param avmSyncService
*/
public void setAvmSyncService(AVMSyncService avmSyncService)
{
this.avmSyncService = avmSyncService;
}
/**
* Set the node service
* @param nodeService
*/
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
/**
* Set the content service
* @param contentStore
*/
public void setContentStore(ContentStore contentStore)
{
this.contentStore = contentStore;
}
@Override
protected LuceneIndexer createIndexer(StoreRef storeRef, String deltaId)
{
AVMLuceneIndexerImpl indexer = AVMLuceneIndexerImpl.getUpdateIndexer(storeRef, deltaId, this);
indexer.setDictionaryService(dictionaryService);
indexer.setContentService(contentService);
indexer.setMaxAtomicTransformationTime(getMaxTransformationTime());
indexer.setAvmService(avmService);
indexer.setAvmSyncService(avmSyncService);
indexer.setContentStore(contentStore);
return indexer;
}
@Override
protected List<StoreRef> getAllStores()
{
List<AVMStoreDescriptor> stores = avmService.getStores();
List<StoreRef> storeRefs = new ArrayList<StoreRef>(stores.size());
for(AVMStoreDescriptor storeDesc : stores)
{
StoreRef storeRef = AVMNodeConverter.ToStoreRef(storeDesc.getName());
storeRefs.add(storeRef);
}
return storeRefs;
}
@Override
protected LuceneSearcher getSearcher(StoreRef storeRef, LuceneIndexer indexer) throws SearcherException
{
//TODO: Store overlays
ADMLuceneSearcherImpl searcher = ADMLuceneSearcherImpl.getSearcher(storeRef, indexer, this);
searcher.setNamespacePrefixResolver(nameSpaceService);
// searcher.setLuceneIndexLock(luceneIndexLock);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setQueryRegister(getQueryRegister());
return searcher;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -33,7 +33,6 @@ import org.alfresco.repo.search.impl.lucene.index.IndexInfo;
import org.alfresco.repo.search.impl.lucene.index.TransactionStatus;
import org.alfresco.repo.search.impl.lucene.index.IndexInfo.LockWork;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.StoreRef;
import org.apache.log4j.Logger;
import org.apache.lucene.index.IndexReader;
@@ -64,9 +63,9 @@ import org.apache.lucene.search.IndexSearcher;
*
*/
public abstract class LuceneBase2
public abstract class AbstractLuceneBase
{
private static Logger s_logger = Logger.getLogger(LuceneBase2.class);
private static Logger s_logger = Logger.getLogger(AbstractLuceneBase.class);
private IndexInfo indexInfo;
@@ -115,13 +114,12 @@ public abstract class LuceneBase2
{
throw new IndexerException("Filed to set delta as active");
}
}
/**
* Utility method to find the path to the base index
*
* @return
* @return - the base path
*/
private String getBasePath()
{
@@ -137,7 +135,7 @@ public abstract class LuceneBase2
/**
* Get a searcher for the main index TODO: Split out support for the main index. We really only need this if we want to search over the changing index before it is committed
*
* @return
* @return - the searcher
* @throws IOException
*/
@@ -154,7 +152,7 @@ public abstract class LuceneBase2
}
}
protected ClosingIndexSearcher getSearcher(LuceneIndexer2 luceneIndexer) throws LuceneIndexException
protected ClosingIndexSearcher getSearcher(LuceneIndexer luceneIndexer) throws LuceneIndexException
{
// If we know the delta id we should do better
@@ -185,7 +183,7 @@ public abstract class LuceneBase2
/**
* Get a reader for the on file portion of the delta
*
* @return
* @return - the index reader
* @throws IOException
* @throws IOException
*/
@@ -211,7 +209,7 @@ public abstract class LuceneBase2
/**
* Get the on file writer for the delta
*
* @return
* @return - the writer for the delta
* @throws IOException
* @throws IOException
*/
@@ -273,32 +271,63 @@ public abstract class LuceneBase2
return indexInfo.getMainIndexReferenceCountingReadOnlyIndexReader();
}
/**
* Set the dictionary service
* @param dictionaryService
*/
public void setDictionaryService(DictionaryService dictionaryService)
{
this.dictionaryService = dictionaryService;
}
/**
* Get the dictionary service.
*
* @return - the service
*/
public DictionaryService getDictionaryService()
{
return dictionaryService;
}
/**
* Set the lucene configuration options
*
* @param config
*/
public void setLuceneConfig(LuceneConfig config)
{
this.config = config;
}
/**
* Get the lucene configuration options.
*
* @return - the config options object.
*/
public LuceneConfig getLuceneConfig()
{
return config;
}
/**
* Get the ID for the delat we are working with.
*
* @return - the id
*/
public String getDeltaId()
{
return deltaId;
}
/**
* Excute actions while holding the write lock oin the index
*
* @param <R>
* @param lockWork
* @return - the result returned by the action.
*/
public <R> R doWithWriteLock(LockWork<R> lockWork)
{
return indexInfo.doWithWriteLock(lockWork);

View File

@@ -28,6 +28,7 @@ import java.io.File;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.transaction.RollbackException;
import javax.transaction.SystemException;
@@ -41,18 +42,14 @@ import org.alfresco.repo.search.IndexerException;
import org.alfresco.repo.search.MLAnalysisMode;
import org.alfresco.repo.search.QueryRegisterComponent;
import org.alfresco.repo.search.SearcherException;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.search.impl.lucene.index.IndexInfo;
import org.alfresco.repo.search.transaction.SimpleTransaction;
import org.alfresco.repo.search.transaction.SimpleTransactionManager;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.GUID;
import org.apache.commons.io.FileUtils;
@@ -73,13 +70,9 @@ import org.quartz.JobExecutionException;
* @author andyh
*/
public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearcher, XAResource
public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher, XAResource
{
private static Log logger = LogFactory.getLog(LuceneIndexerAndSearcherFactory2.class);
private DictionaryService dictionaryService;
private NamespaceService nameSpaceService;
private static Log logger = LogFactory.getLog(AbstractLuceneIndexerAndSearcherFactory.class);
private int queryMaxClauses;
@@ -90,18 +83,18 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
* indexer for each store within a transaction
*/
private static Map<Xid, Map<StoreRef, LuceneIndexer2>> activeIndexersInGlobalTx = new HashMap<Xid, Map<StoreRef, LuceneIndexer2>>();
private static Map<Xid, Map<StoreRef, LuceneIndexer>> activeIndexersInGlobalTx = new HashMap<Xid, Map<StoreRef, LuceneIndexer>>();
/**
* Suspended global transactions.
*/
private static Map<Xid, Map<StoreRef, LuceneIndexer2>> suspendedIndexersInGlobalTx = new HashMap<Xid, Map<StoreRef, LuceneIndexer2>>();
private static Map<Xid, Map<StoreRef, LuceneIndexer>> suspendedIndexersInGlobalTx = new HashMap<Xid, Map<StoreRef, LuceneIndexer>>();
/**
* Thread local indexers - used outside a global transaction
*/
private static ThreadLocal<Map<StoreRef, LuceneIndexer2>> threadLocalIndexers = new ThreadLocal<Map<StoreRef, LuceneIndexer2>>();
private static ThreadLocal<Map<StoreRef, LuceneIndexer>> threadLocalIndexers = new ThreadLocal<Map<StoreRef, LuceneIndexer>>();
/**
* The dafault timeout for transactions TODO: Respect this
@@ -118,14 +111,8 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
* The node service we use to get information about nodes
*/
private NodeService nodeService;
private FullTextSearchIndexer luceneFullTextSearchIndexer;
private String indexRootLocation;
private ContentService contentService;
private QueryRegisterComponent queryRegister;
/** the maximum transformation time to allow atomically, defaulting to 20ms */
@@ -147,47 +134,42 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
* Private constructor for the singleton TODO: FIt in with IOC
*/
public LuceneIndexerAndSearcherFactory2()
public AbstractLuceneIndexerAndSearcherFactory()
{
super();
}
/**
* Setter for getting the node service via IOC Used in the Spring container
*
* @param nodeService
*/
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
public void setDictionaryService(DictionaryService dictionaryService)
{
this.dictionaryService = dictionaryService;
}
public void setNameSpaceService(NamespaceService nameSpaceService)
{
this.nameSpaceService = nameSpaceService;
}
public void setLuceneFullTextSearchIndexer(FullTextSearchIndexer luceneFullTextSearchIndexer)
{
this.luceneFullTextSearchIndexer = luceneFullTextSearchIndexer;
}
/**
* Set the directory that contains the indexes
*
* @param indexRootLocation
*/
public void setIndexRootLocation(String indexRootLocation)
{
this.indexRootLocation = indexRootLocation;
}
/**
* Set the query register
*
* @param queryRegister
*/
public void setQueryRegister(QueryRegisterComponent queryRegister)
{
this.queryRegister = queryRegister;
}
/**
* Get the query register.
*
* @return - the query register.
*/
public QueryRegisterComponent getQueryRegister()
{
return queryRegister;
}
/**
* Set the maximum average transformation time allowed to a transformer in order to have the transformation
* performed in the current transaction. The default is 20ms.
@@ -200,10 +182,20 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
this.maxAtomicTransformationTime = maxAtomicTransformationTime;
}
/**
* Get the max time for an atomic transform
*
* @return - milliseconds as a long
*/
public long getMaxTransformationTime()
{
return maxAtomicTransformationTime;
}
/**
* Check if we are in a global transactoin according to the transaction manager
*
* @return
* @return - true if in a global transaction
*/
private boolean inGlobalTransaction()
@@ -221,7 +213,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
/**
* Get the local transaction - may be null oif we are outside a transaction.
*
* @return
* @return - the transaction
* @throws IndexerException
*/
private SimpleTransaction getTransaction() throws IndexerException
@@ -242,7 +234,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
* @param storeRef -
* the id of the store
*/
public LuceneIndexer2 getIndexer(StoreRef storeRef) throws IndexerException
public LuceneIndexer getIndexer(StoreRef storeRef) throws IndexerException
{
// register to receive txn callbacks
// TODO: make this conditional on whether the XA stuff is being used
@@ -253,14 +245,14 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
{
SimpleTransaction tx = getTransaction();
// Only find indexers in the active list
Map<StoreRef, LuceneIndexer2> indexers = activeIndexersInGlobalTx.get(tx);
Map<StoreRef, LuceneIndexer> indexers = activeIndexersInGlobalTx.get(tx);
if (indexers == null)
{
if (suspendedIndexersInGlobalTx.containsKey(tx))
{
throw new IndexerException("Trying to obtain an index for a suspended transaction.");
}
indexers = new HashMap<StoreRef, LuceneIndexer2>();
indexers = new HashMap<StoreRef, LuceneIndexer>();
activeIndexersInGlobalTx.put(tx, indexers);
try
{
@@ -280,7 +272,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
throw new IndexerException("", e);
}
}
LuceneIndexer2 indexer = indexers.get(storeRef);
LuceneIndexer indexer = indexers.get(storeRef);
if (indexer == null)
{
indexer = createIndexer(storeRef, getTransactionId(tx, storeRef));
@@ -296,15 +288,15 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
}
private LuceneIndexer2 getThreadLocalIndexer(StoreRef storeRef)
private LuceneIndexer getThreadLocalIndexer(StoreRef storeRef)
{
Map<StoreRef, LuceneIndexer2> indexers = threadLocalIndexers.get();
Map<StoreRef, LuceneIndexer> indexers = threadLocalIndexers.get();
if (indexers == null)
{
indexers = new HashMap<StoreRef, LuceneIndexer2>();
indexers = new HashMap<StoreRef, LuceneIndexer>();
threadLocalIndexers.set(indexers);
}
LuceneIndexer2 indexer = indexers.get(storeRef);
LuceneIndexer indexer = indexers.get(storeRef);
if (indexer == null)
{
indexer = createIndexer(storeRef, GUID.generate());
@@ -317,7 +309,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
* Get the transaction identifier uised to store it in the transaction map.
*
* @param tx
* @return
* @return - the transaction id
*/
private static String getTransactionId(Transaction tx, StoreRef storeRef)
{
@@ -328,10 +320,10 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
}
else
{
Map<StoreRef, LuceneIndexer2> indexers = threadLocalIndexers.get();
Map<StoreRef, LuceneIndexer> indexers = threadLocalIndexers.get();
if (indexers != null)
{
LuceneIndexer2 indexer = indexers.get(storeRef);
LuceneIndexer indexer = indexers.get(storeRef);
if (indexer != null)
{
return indexer.getDeltaId();
@@ -346,27 +338,17 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
*
* @param storeRef
* @param deltaId
* @return
* @return - the indexer made by the concrete implemntation
*/
private LuceneIndexerImpl2 createIndexer(StoreRef storeRef, String deltaId)
{
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(storeRef, deltaId, this);
indexer.setNodeService(nodeService);
indexer.setDictionaryService(dictionaryService);
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setLuceneFullTextSearchIndexer(luceneFullTextSearchIndexer);
indexer.setContentService(contentService);
indexer.setMaxAtomicTransformationTime(maxAtomicTransformationTime);
return indexer;
}
protected abstract LuceneIndexer createIndexer(StoreRef storeRef, String deltaId);
/**
* Encapsulate creating a searcher over the main index
*/
public LuceneSearcher2 getSearcher(StoreRef storeRef, boolean searchDelta) throws SearcherException
public LuceneSearcher getSearcher(StoreRef storeRef, boolean searchDelta) throws SearcherException
{
String deltaId = null;
LuceneIndexer2 indexer = null;
LuceneIndexer indexer = null;
if (searchDelta)
{
deltaId = getTransactionId(getTransaction(), storeRef);
@@ -375,7 +357,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
indexer = getIndexer(storeRef);
}
}
LuceneSearcher2 searcher = getSearcher(storeRef, indexer);
LuceneSearcher searcher = getSearcher(storeRef, indexer);
return searcher;
}
@@ -384,19 +366,11 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
*
* @param storeRef
* @param deltaId
* @return
* @return - the searcher made by the concrete implementation.
* @throws SearcherException
*/
private LuceneSearcher2 getSearcher(StoreRef storeRef, LuceneIndexer2 indexer) throws SearcherException
{
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(storeRef, indexer, this);
searcher.setNamespacePrefixResolver(nameSpaceService);
// searcher.setLuceneIndexLock(luceneIndexLock);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setQueryRegister(queryRegister);
return searcher;
}
protected abstract LuceneSearcher getSearcher(StoreRef storeRef, LuceneIndexer indexer) throws SearcherException;
/*
* XAResource implementation
@@ -408,7 +382,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
{
// TODO: Should be remembering overall state
// TODO: Keep track of prepare responses
Map<StoreRef, LuceneIndexer2> indexers = activeIndexersInGlobalTx.get(xid);
Map<StoreRef, LuceneIndexer> indexers = activeIndexersInGlobalTx.get(xid);
if (indexers == null)
{
if (suspendedIndexersInGlobalTx.containsKey(xid))
@@ -430,7 +404,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
}
else if (indexers.size() == 1)
{
for (LuceneIndexer2 indexer : indexers.values())
for (LuceneIndexer indexer : indexers.values())
{
indexer.commit();
}
@@ -444,7 +418,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
else
// two phase
{
for (LuceneIndexer2 indexer : indexers.values())
for (LuceneIndexer indexer : indexers.values())
{
indexer.commit();
}
@@ -459,7 +433,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
public void end(Xid xid, int flag) throws XAException
{
Map<StoreRef, LuceneIndexer2> indexers = activeIndexersInGlobalTx.get(xid);
Map<StoreRef, LuceneIndexer> indexers = activeIndexersInGlobalTx.get(xid);
if (indexers == null)
{
if (suspendedIndexersInGlobalTx.containsKey(xid))
@@ -501,13 +475,13 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
public boolean isSameRM(XAResource xar) throws XAException
{
return (xar instanceof LuceneIndexerAndSearcherFactory2);
return (xar instanceof AbstractLuceneIndexerAndSearcherFactory);
}
public int prepare(Xid xid) throws XAException
{
// TODO: Track state OK, ReadOnly, Exception (=> rolled back?)
Map<StoreRef, LuceneIndexer2> indexers = activeIndexersInGlobalTx.get(xid);
Map<StoreRef, LuceneIndexer> indexers = activeIndexersInGlobalTx.get(xid);
if (indexers == null)
{
if (suspendedIndexersInGlobalTx.containsKey(xid))
@@ -522,7 +496,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
}
boolean isPrepared = true;
boolean isModified = false;
for (LuceneIndexer2 indexer : indexers.values())
for (LuceneIndexer indexer : indexers.values())
{
try
{
@@ -567,7 +541,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
// TODO: What to do if all do not roll back?
try
{
Map<StoreRef, LuceneIndexer2> indexers = activeIndexersInGlobalTx.get(xid);
Map<StoreRef, LuceneIndexer> indexers = activeIndexersInGlobalTx.get(xid);
if (indexers == null)
{
if (suspendedIndexersInGlobalTx.containsKey(xid))
@@ -580,7 +554,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
return;
}
}
for (LuceneIndexer2 indexer : indexers.values())
for (LuceneIndexer indexer : indexers.values())
{
indexer.rollback();
}
@@ -599,8 +573,8 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
public void start(Xid xid, int flag) throws XAException
{
Map<StoreRef, LuceneIndexer2> active = activeIndexersInGlobalTx.get(xid);
Map<StoreRef, LuceneIndexer2> suspended = suspendedIndexersInGlobalTx.get(xid);
Map<StoreRef, LuceneIndexer> active = activeIndexersInGlobalTx.get(xid);
Map<StoreRef, LuceneIndexer> suspended = suspendedIndexersInGlobalTx.get(xid);
if (flag == XAResource.TMJOIN)
{
// must be active
@@ -659,10 +633,10 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
{
try
{
Map<StoreRef, LuceneIndexer2> indexers = threadLocalIndexers.get();
Map<StoreRef, LuceneIndexer> indexers = threadLocalIndexers.get();
if (indexers != null)
{
for (LuceneIndexer2 indexer : indexers.values())
for (LuceneIndexer indexer : indexers.values())
{
try
{
@@ -689,16 +663,16 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
/**
* Prepare the transaction TODO: Store prepare results
*
* @return
* @return - the tx code
*/
public int prepare() throws IndexerException
{
boolean isPrepared = true;
boolean isModified = false;
Map<StoreRef, LuceneIndexer2> indexers = threadLocalIndexers.get();
Map<StoreRef, LuceneIndexer> indexers = threadLocalIndexers.get();
if (indexers != null)
{
for (LuceneIndexer2 indexer : indexers.values())
for (LuceneIndexer indexer : indexers.values())
{
try
{
@@ -734,11 +708,11 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
*/
public void rollback()
{
Map<StoreRef, LuceneIndexer2> indexers = threadLocalIndexers.get();
Map<StoreRef, LuceneIndexer> indexers = threadLocalIndexers.get();
if (indexers != null)
{
for (LuceneIndexer2 indexer : indexers.values())
for (LuceneIndexer indexer : indexers.values())
{
try
{
@@ -762,22 +736,17 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
public void flush()
{
// TODO: Needs fixing if we expose the indexer in JTA
Map<StoreRef, LuceneIndexer2> indexers = threadLocalIndexers.get();
Map<StoreRef, LuceneIndexer> indexers = threadLocalIndexers.get();
if (indexers != null)
{
for (LuceneIndexer2 indexer : indexers.values())
for (LuceneIndexer indexer : indexers.values())
{
indexer.flushPending();
}
}
}
public void setContentService(ContentService contentService)
{
this.contentService = contentService;
}
public String getIndexRootLocation()
{
return indexRootLocation;
@@ -788,11 +757,21 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
return indexerBatchSize;
}
/**
* Set the batch six to use for background indexing
*
* @param indexerBatchSize
*/
public void setIndexerBatchSize(int indexerBatchSize)
{
this.indexerBatchSize = indexerBatchSize;
}
/**
* Get the directory where any lock files are written (by default there are none)
*
* @return - the path to the directory
*/
public String getLockDirectory()
{
return lockDirectory;
@@ -834,42 +813,76 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
return queryMaxClauses;
}
/**
* Set the max number of queries in a llucen boolean query
*
* @param queryMaxClauses
*/
public void setQueryMaxClauses(int queryMaxClauses)
{
this.queryMaxClauses = queryMaxClauses;
BooleanQuery.setMaxClauseCount(this.queryMaxClauses);
}
/**
* Set the lucene write lock timeout
* @param timeout
*/
public void setWriteLockTimeout(long timeout)
{
this.writeLockTimeout = timeout;
}
/**
* Set the lucene commit lock timeout (no longer used with lucene 2.1)
* @param timeout
*/
public void setCommitLockTimeout(long timeout)
{
this.commitLockTimeout = timeout;
}
/**
* Get the commit lock timout.
* @return - the timeout
*/
public long getCommitLockTimeout()
{
return commitLockTimeout;
}
/**
* Get the write lock timeout
* @return - the timeout in ms
*/
public long getWriteLockTimeout()
{
return writeLockTimeout;
}
/**
* Set the lock poll interval in ms
*
* @param time
*/
public void setLockPollInterval(long time)
{
Lock.LOCK_POLL_INTERVAL = time;
}
/**
* Get the max number of tokens in the field
* @return - the max tokens considered.
*/
public int getIndexerMaxFieldLength()
{
return indexerMaxFieldLength;
}
/**
* Set the max field length.
* @param indexerMaxFieldLength
*/
public void setIndexerMaxFieldLength(int indexerMaxFieldLength)
{
this.indexerMaxFieldLength = indexerMaxFieldLength;
@@ -889,12 +902,17 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
private TransactionService transactionService;
private LuceneIndexerAndSearcher factory;
private Set<LuceneIndexerAndSearcher> factories;
@SuppressWarnings("unused")
private NodeService nodeService;
private String targetLocation;
/**
* Default constructor
*
*/
public LuceneIndexBackupComponent()
{
}
@@ -912,12 +930,12 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
/**
* Set the Lucene index factory that will be used to control the index locks
*
* @param factory
* the index factory
* @param factories
* the index factories
*/
public void setFactory(LuceneIndexerAndSearcher factory)
public void setFactories(Set<LuceneIndexerAndSearcher> factories)
{
this.factory = factory;
this.factories = factories;
}
/**
@@ -961,83 +979,101 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
private void backupImpl()
{
// create the location to copy to
final File targetDir = new File(targetLocation);
File targetDir = new File(targetLocation);
if (targetDir.exists() && !targetDir.isDirectory())
{
throw new AlfrescoRuntimeException("Target location is a file and not a directory: " + targetDir);
}
final File targetParentDir = targetDir.getParentFile();
File targetParentDir = targetDir.getParentFile();
if (targetParentDir == null)
{
throw new AlfrescoRuntimeException("Target location may not be a root directory: " + targetDir);
}
final File tempDir = new File(targetParentDir, "indexbackup_temp");
File tempDir = new File(targetParentDir, "indexbackup_temp");
factory.doWithAllWriteLocks(new WithAllWriteLocksWork<Object>()
for (LuceneIndexerAndSearcher factory : factories)
{
public Object doWork()
WithAllWriteLocksWork<Object> backupWork = new BackUpWithAllWriteLocksWork(factory, tempDir, targetDir);
factory.doWithAllWriteLocks(backupWork);
if (logger.isDebugEnabled())
{
try
{
File indexRootDir = new File(factory.getIndexRootLocation());
// perform the copy
backupDirectory(indexRootDir, tempDir, targetDir);
return null;
}
catch (Throwable e)
{
throw new AlfrescoRuntimeException(
"Failed to copy Lucene index root: \n"
+ " Index root: " + factory.getIndexRootLocation() + "\n" + " Target: "
+ targetDir, e);
}
logger.debug("Backed up Lucene indexes: \n" + " Target directory: " + targetDir);
}
});
if (logger.isDebugEnabled())
{
logger.debug("Backed up Lucene indexes: \n" + " Target directory: " + targetDir);
}
}
/**
* Makes a backup of the source directory via a temporary folder
*/
private void backupDirectory(File sourceDir, File tempDir, File targetDir) throws Exception
static class BackUpWithAllWriteLocksWork implements WithAllWriteLocksWork<Object>
{
if (!sourceDir.exists())
LuceneIndexerAndSearcher factory;
File tempDir;
File targetDir;
BackUpWithAllWriteLocksWork(LuceneIndexerAndSearcher factory, File tempDir, File targetDir)
{
// there is nothing to copy
return;
this.factory = factory;
this.tempDir = tempDir;
this.targetDir = targetDir;
}
// delete the files from the temp directory
if (tempDir.exists())
public Object doWork()
{
FileUtils.deleteDirectory(tempDir);
if (tempDir.exists())
try
{
throw new AlfrescoRuntimeException("Temp directory exists and cannot be deleted: " + tempDir);
File indexRootDir = new File(factory.getIndexRootLocation());
// perform the copy
backupDirectory(indexRootDir, tempDir, targetDir);
return null;
}
catch (Throwable e)
{
throw new AlfrescoRuntimeException("Failed to copy Lucene index root: \n"
+ " Index root: " + factory.getIndexRootLocation() + "\n" + " Target: " + targetDir, e);
}
}
// copy to the temp directory
FileUtils.copyDirectory(sourceDir, tempDir, true);
// check that the temp directory was created
if (!tempDir.exists())
/**
* Makes a backup of the source directory via a temporary folder
*/
private static void backupDirectory(File sourceDir, File tempDir, File targetDir) throws Exception
{
throw new AlfrescoRuntimeException("Copy to temp location failed");
}
// delete the target directory
FileUtils.deleteDirectory(targetDir);
if (targetDir.exists())
{
throw new AlfrescoRuntimeException("Failed to delete older files from target location");
}
// rename the temp to be the target
tempDir.renameTo(targetDir);
// make sure the rename worked
if (!targetDir.exists())
{
throw new AlfrescoRuntimeException("Failed to rename temporary directory to target backup directory");
if (!sourceDir.exists())
{
// there is nothing to copy
return;
}
// delete the files from the temp directory
if (tempDir.exists())
{
FileUtils.deleteDirectory(tempDir);
if (tempDir.exists())
{
throw new AlfrescoRuntimeException("Temp directory exists and cannot be deleted: " + tempDir);
}
}
// copy to the temp directory
FileUtils.copyDirectory(sourceDir, tempDir, true);
// check that the temp directory was created
if (!tempDir.exists())
{
throw new AlfrescoRuntimeException("Copy to temp location failed");
}
// delete the target directory
FileUtils.deleteDirectory(targetDir);
if (targetDir.exists())
{
throw new AlfrescoRuntimeException("Failed to delete older files from target location");
}
// rename the temp to be the target
tempDir.renameTo(targetDir);
// make sure the rename worked
if (!targetDir.exists())
{
throw new AlfrescoRuntimeException(
"Failed to rename temporary directory to target backup directory");
}
}
}
}
@@ -1069,10 +1105,43 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
}
}
public MLAnalysisMode getDefaultMLIndexAnalysisMode()
{
return defaultMLIndexAnalysisMode;
}
/**
* Set the ML analysis mode at index time.
*
* @param mode
*/
public void setDefaultMLIndexAnalysisMode(MLAnalysisMode mode)
{
// defaultMLIndexAnalysisMode = MLAnalysisMode.getMLAnalysisMode(mode);
defaultMLIndexAnalysisMode = mode;
}
public MLAnalysisMode getDefaultMLSearchAnalysisMode()
{
return defaultMLSearchAnalysisMode;
}
/**
* Set the ML analysis mode at search time
* @param mode
*/
public void setDefaultMLSearchAnalysisMode(MLAnalysisMode mode)
{
// defaultMLSearchAnalysisMode = MLAnalysisMode.getMLAnalysisMode(mode);
defaultMLSearchAnalysisMode = mode;
}
protected abstract List<StoreRef> getAllStores();
public <R> R doWithAllWriteLocks(WithAllWriteLocksWork<R> lockWork)
{
// get all the available stores
List<StoreRef> storeRefs = nodeService.getStores();
List<StoreRef> storeRefs = getAllStores();
IndexInfo.LockWork<R> currentLockWork = null;
@@ -1119,9 +1188,9 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
{
IndexInfo.LockWork<R> lockWork;
LuceneIndexer2 indexer;
LuceneIndexer indexer;
NestingLockWork(LuceneIndexer2 indexer, IndexInfo.LockWork<R> lockWork)
NestingLockWork(LuceneIndexer indexer, IndexInfo.LockWork<R> lockWork)
{
this.indexer = indexer;
this.lockWork = lockWork;
@@ -1137,9 +1206,9 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
{
WithAllWriteLocksWork<R> lockWork;
LuceneIndexer2 indexer;
LuceneIndexer indexer;
CoreLockWork(LuceneIndexer2 indexer, WithAllWriteLocksWork<R> lockWork)
CoreLockWork(LuceneIndexer indexer, WithAllWriteLocksWork<R> lockWork)
{
this.indexer = indexer;
this.lockWork = lockWork;
@@ -1173,28 +1242,4 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
}
}
public MLAnalysisMode getDefaultMLIndexAnalysisMode()
{
return defaultMLIndexAnalysisMode;
}
public void setDefaultMLIndexAnalysisMode(MLAnalysisMode mode)
{
//defaultMLIndexAnalysisMode = MLAnalysisMode.getMLAnalysisMode(mode);
defaultMLIndexAnalysisMode = mode;
}
public MLAnalysisMode getDefaultMLSearchAnalysisMode()
{
return defaultMLSearchAnalysisMode;
}
public void setDefaultMLSearchAnalysisMode(MLAnalysisMode mode)
{
//defaultMLSearchAnalysisMode = MLAnalysisMode.getMLAnalysisMode(mode);
defaultMLSearchAnalysisMode = mode;
}
}

View File

@@ -27,6 +27,7 @@ package org.alfresco.repo.search.impl.lucene;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.ListIterator;
@@ -38,7 +39,6 @@ import javax.transaction.xa.XAResource;
import org.alfresco.repo.search.IndexerException;
import org.alfresco.repo.search.impl.lucene.index.TransactionStatus;
import org.alfresco.service.cmr.repository.InvalidNodeRefException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.apache.log4j.Logger;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
@@ -50,25 +50,52 @@ import org.apache.lucene.index.TermDocs;
* Common support for indexing across implementations
*
* @author andyh
* @param <T> -
* the type used to generate the key in the index file
*/
public abstract class AbstractLuceneIndexerImpl2<T> extends LuceneBase2
public abstract class AbstractLuceneIndexerImpl<T> extends AbstractLuceneBase
{
/**
* Enum for indexing actions against a node
*/
protected enum Action
{
INDEX, REINDEX, DELETE, CASCADEREINDEX
/**
* An index
*/
INDEX,
/**
* A reindex
*/
REINDEX,
/**
* A delete
*/
DELETE,
/**
* A cascaded reindex (ensures directory structre is ok)
*/
CASCADEREINDEX
}
protected enum IndexUpdateStatus
{
UNMODIFIED, SYNCRONOUS, ASYNCHRONOUS;
/**
* Inde is unchanged
*/
UNMODIFIED,
/**
* Index is being changein in TX
*/
SYNCRONOUS,
/**
* Index is eiong changed by a background upate
*/
ASYNCHRONOUS;
}
protected long docs;
// Failure codes to index when problems occur indexing content
protected static class Command<S>
@@ -131,7 +158,8 @@ public abstract class AbstractLuceneIndexerImpl2<T> extends LuceneBase2
/**
* Logger
*/
private static Logger s_logger = Logger.getLogger(AbstractLuceneIndexerImpl2.class);
@SuppressWarnings("unused")
private static Logger s_logger = Logger.getLogger(AbstractLuceneIndexerImpl.class);
protected static Set<String> deletePrimary(Collection<String> nodeRefs, IndexReader reader, boolean delete)
throws LuceneIndexException
@@ -149,8 +177,8 @@ public abstract class AbstractLuceneIndexerImpl2<T> extends LuceneBase2
{
int doc = td.doc();
Document document = reader.document(doc);
String id = document.get("ID");
refs.add(id);
String[] ids = document.getValues("ID");
refs.add(ids[ids.length - 1]);
if (delete)
{
reader.deleteDocument(doc);
@@ -183,8 +211,8 @@ public abstract class AbstractLuceneIndexerImpl2<T> extends LuceneBase2
{
int doc = td.doc();
Document document = reader.document(doc);
String id = document.get("ID");
refs.add(id);
String[] ids = document.getValues("ID");
refs.add(ids[ids.length - 1]);
if (delete)
{
reader.deleteDocument(doc);
@@ -220,8 +248,8 @@ public abstract class AbstractLuceneIndexerImpl2<T> extends LuceneBase2
{
int doc = td.doc();
Document document = reader.document(doc);
String id = document.get("ID");
refs.add(id);
String[] ids = document.getValues("ID");
refs.add(ids[ids.length - 1]);
if (delete)
{
reader.deleteDocument(doc);
@@ -351,6 +379,7 @@ public abstract class AbstractLuceneIndexerImpl2<T> extends LuceneBase2
/**
* Commit this index
* @throws LuceneIndexException
*/
public void commit() throws LuceneIndexException
{
@@ -411,7 +440,8 @@ public abstract class AbstractLuceneIndexerImpl2<T> extends LuceneBase2
* Prepare to commit At the moment this makes sure we have all the locks TODO: This is not doing proper
* serialisation against the index as would a data base transaction.
*
* @return
* @return the tx state
* @throws LuceneIndexException
*/
public int prepare() throws LuceneIndexException
{
@@ -459,7 +489,7 @@ public abstract class AbstractLuceneIndexerImpl2<T> extends LuceneBase2
/**
* Has this index been modified?
*
* @return
* @return true if modified
*/
public boolean isModified()
{
@@ -468,6 +498,7 @@ public abstract class AbstractLuceneIndexerImpl2<T> extends LuceneBase2
/**
* Roll back the index changes (this just means they are never added)
* @throws LuceneIndexException
*/
public void rollback() throws LuceneIndexException
{
@@ -530,7 +561,7 @@ public abstract class AbstractLuceneIndexerImpl2<T> extends LuceneBase2
protected abstract void doRollBack() throws IOException;
protected abstract void doSetRollbackOnly() throws IOException;
protected abstract List<Document> createDocuments(String stringNodeRef, boolean isNew, boolean indexAllProperties,
boolean includeDirectoryDocuments);
@@ -631,7 +662,7 @@ public abstract class AbstractLuceneIndexerImpl2<T> extends LuceneBase2
addCommand(new Command<T>(ref, Action.DELETE));
}
private void addCommand(Command command)
private void addCommand(Command<T> command)
{
if (commandList.size() > 0)
{
@@ -693,6 +724,9 @@ public abstract class AbstractLuceneIndexerImpl2<T> extends LuceneBase2
}
}
/**
* @throws LuceneIndexException
*/
public void flushPending() throws LuceneIndexException
{
IndexReader mainReader = null;
@@ -783,4 +817,64 @@ public abstract class AbstractLuceneIndexerImpl2<T> extends LuceneBase2
}
}
/**
* Are we deleting leaves only (not meta data)
* @return - deleting only nodes.
*/
public boolean getDeleteOnlyNodes()
{
return indexUpdateStatus == IndexUpdateStatus.ASYNCHRONOUS;
}
/**
* Get the deletions
* @return - the ids to delete
*/
public Set<String> getDeletions()
{
return Collections.unmodifiableSet(deletions);
}
/**
* Delete all entries from the index.
*
*/
public void deleteAll()
{
IndexReader mainReader = null;
try
{
mainReader = getReader();
for (int doc = 0; doc < mainReader.maxDoc(); doc++)
{
if (!mainReader.isDeleted(doc))
{
Document document = mainReader.document(doc);
String[] ids = document.getValues("ID");
deletions.add(ids[ids.length - 1]);
}
}
}
catch (IOException e)
{
// If anything goes wrong we try and do a roll back
throw new LuceneIndexException("Failed to delete all entries from the index", e);
}
finally
{
if (mainReader != null)
{
try
{
mainReader.close();
}
catch (IOException e)
{
throw new LuceneIndexException("Filed to close main reader", e);
}
}
}
}
}

View File

@@ -1,218 +0,0 @@
/*
* Copyright (C) 2005-2007 Alfresco Software Limited.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing"
*/
package org.alfresco.repo.search.impl.lucene;
import java.io.IOException;
import java.util.BitSet;
import java.util.Set;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.apache.lucene.index.FilterIndexReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.index.TermPositions;
public class FilterIndexReaderByNodeRefs extends FilterIndexReader
{
BitSet deletedDocuments;
public FilterIndexReaderByNodeRefs(IndexReader reader, Set<NodeRef> deletions)
{
super(reader);
deletedDocuments = new BitSet(reader.maxDoc());
try
{
for (NodeRef nodeRef : deletions)
{
TermDocs td = reader.termDocs(new Term("ID", nodeRef.toString()));
while (td.next())
{
deletedDocuments.set(td.doc(), true);
}
}
}
catch (IOException e)
{
throw new AlfrescoRuntimeException("Failed to construct filtering index reader", e);
}
}
public static class FilterTermDocs implements TermDocs
{
BitSet deletedDocuments;
protected TermDocs in;
public FilterTermDocs(TermDocs in, BitSet deletedDocuments)
{
this.in = in;
this.deletedDocuments = deletedDocuments;
}
public void seek(Term term) throws IOException
{
// Seek is left to the base implementation
in.seek(term);
}
public void seek(TermEnum termEnum) throws IOException
{
// Seek is left to the base implementation
in.seek(termEnum);
}
public int doc()
{
// The current document info is valid in the base implementation
return in.doc();
}
public int freq()
{
// The frequency is valid in the base implementation
return in.freq();
}
public boolean next() throws IOException
{
while(in.next())
{
if(!deletedDocuments.get(in.doc()))
{
// Not masked
return true;
}
}
return false;
}
public int read(int[] docs, int[] freqs) throws IOException
{
int[] innerDocs = new int[docs.length];
int[] innerFreq = new int[docs.length];
int count = in.read(innerDocs, innerFreq);
// Is the stream exhausted
if (count == 0)
{
return 0;
}
if(allDeleted(innerDocs, count))
{
// Did not find anything - try again
return read(docs, freqs);
}
// Add non deleted
int insertPosition = 0;
for(int i = 0; i < count; i++)
{
if(!deletedDocuments.get(innerDocs[i]))
{
docs[insertPosition] = innerDocs[i];
freqs[insertPosition] = innerFreq[i];
insertPosition++;
}
}
return insertPosition;
}
private boolean allDeleted(int[] docs, int fillSize)
{
for(int i = 0; i < fillSize; i++)
{
if(!deletedDocuments.get(docs[i]))
{
return false;
}
}
return true;
}
public boolean skipTo(int i) throws IOException
{
boolean result = in.skipTo(i);
if(result == false)
{
return false;
}
if(deletedDocuments.get(in.doc()))
{
return skipTo(i);
}
else
{
return true;
}
}
public void close() throws IOException
{
// Leave to internal implementation
in.close();
}
}
/** Base class for filtering {@link TermPositions} implementations. */
public static class FilterTermPositions extends FilterTermDocs implements TermPositions
{
public FilterTermPositions(TermPositions in, BitSet deletedDocuements)
{
super(in, deletedDocuements);
}
public int nextPosition() throws IOException
{
return ((TermPositions) this.in).nextPosition();
}
}
@Override
public int numDocs()
{
return super.numDocs() - deletedDocuments.cardinality();
}
@Override
public TermDocs termDocs() throws IOException
{
return new FilterTermDocs(super.termDocs(), deletedDocuments);
}
@Override
public TermPositions termPositions() throws IOException
{
return new FilterTermPositions(super.termPositions(), deletedDocuments);
}
}

View File

@@ -43,15 +43,30 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Searcher;
import org.apache.lucene.search.TermQuery;
public class FilterIndexReaderByNodeRefs2 extends FilterIndexReader
/**
* An index reader that filters documents from another.
*
* @author andyh
*
*/
public class FilterIndexReaderByStringId extends FilterIndexReader
{
private static Log s_logger = LogFactory.getLog(FilterIndexReaderByNodeRefs2.class);
private static Log s_logger = LogFactory.getLog(FilterIndexReaderByStringId.class);
BitSet deletedDocuments;
private String id;
public FilterIndexReaderByNodeRefs2(String id, IndexReader reader, Set<String> deletions, boolean deleteNodesOnly)
/**
* Apply the filter
*
* @param id
* @param reader
* @param deletions
* @param deleteNodesOnly
*/
public FilterIndexReaderByStringId(String id, IndexReader reader, Set<String> deletions, boolean deleteNodesOnly)
{
super(reader);
this.id = id;
@@ -108,6 +123,12 @@ public class FilterIndexReaderByNodeRefs2 extends FilterIndexReader
}
}
/**
* Filter implementation
*
* @author andyh
*
*/
public static class FilterTermDocs implements TermDocs
{
BitSet deletedDocuments;
@@ -116,6 +137,11 @@ public class FilterIndexReaderByNodeRefs2 extends FilterIndexReader
String id;
/**
* @param id
* @param in
* @param deletedDocuments
*/
public FilterTermDocs(String id, TermDocs in, BitSet deletedDocuments)
{
this.in = in;
@@ -242,6 +268,11 @@ public class FilterIndexReaderByNodeRefs2 extends FilterIndexReader
public static class FilterTermPositions extends FilterTermDocs implements TermPositions
{
/**
* @param id
* @param in
* @param deletedDocuements
*/
public FilterTermPositions(String id, TermPositions in, BitSet deletedDocuements)
{
super(id, in, deletedDocuements);

View File

@@ -35,6 +35,7 @@ import java.util.Set;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.search.IndexerAndSearcher;
import org.alfresco.repo.search.IndexerException;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
@@ -49,6 +50,12 @@ import org.alfresco.service.namespace.NamespacePrefixResolver;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.ISO9075;
/**
* Category service implementation
*
* @author andyh
*
*/
public class LuceneCategoryServiceImpl implements CategoryService
{
private NodeService nodeService;
@@ -57,8 +64,11 @@ public class LuceneCategoryServiceImpl implements CategoryService
private DictionaryService dictionaryService;
private LuceneIndexerAndSearcher indexerAndSearcher;
private IndexerAndSearcher indexerAndSearcher;
/**
*
*/
public LuceneCategoryServiceImpl()
{
super();
@@ -66,22 +76,38 @@ public class LuceneCategoryServiceImpl implements CategoryService
// Inversion of control support
/**
* Set the node service
* @param nodeService
*/
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
/**
* Set the service to map prefixes to uris
* @param namespacePrefixResolver
*/
public void setNamespacePrefixResolver(NamespacePrefixResolver namespacePrefixResolver)
{
this.namespacePrefixResolver = namespacePrefixResolver;
}
/**
* Set the dictionary service
* @param dictionaryService
*/
public void setDictionaryService(DictionaryService dictionaryService)
{
this.dictionaryService = dictionaryService;
}
public void setIndexerAndSearcher(LuceneIndexerAndSearcher indexerAndSearcher)
/**
* Set the indexer and searcher
* @param indexerAndSearcher
*/
public void setIndexerAndSearcher(IndexerAndSearcher indexerAndSearcher)
{
this.indexerAndSearcher = indexerAndSearcher;
}

View File

@@ -25,10 +25,11 @@
package org.alfresco.repo.search.impl.lucene;
import java.io.File;
import java.util.Collections;
import junit.framework.TestCase;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory2.LuceneIndexBackupComponent;
import org.alfresco.repo.search.impl.lucene.AbstractLuceneIndexerAndSearcherFactory.LuceneIndexBackupComponent;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.transaction.TransactionService;
@@ -37,7 +38,6 @@ import org.alfresco.util.TempFileProvider;
import org.springframework.context.ApplicationContext;
/**
* @see org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory.LuceneIndexBackupComponent
*
* @author Derek Hulley
*/
@@ -55,7 +55,7 @@ public class LuceneIndexBackupComponentTest extends TestCase
{
TransactionService transactionService = (TransactionService) ctx.getBean("transactionComponent");
NodeService nodeService = (NodeService) ctx.getBean("NodeService");
LuceneIndexerAndSearcher factory = (LuceneIndexerAndSearcher) ctx.getBean("luceneIndexerAndSearcherFactory");
LuceneIndexerAndSearcher factory = (LuceneIndexerAndSearcher) ctx.getBean("admLuceneIndexerAndSearcherFactory");
this.authenticationComponent = (AuthenticationComponent)ctx.getBean("authenticationComponent");
this.authenticationComponent.setSystemUserAsCurrentUser();
@@ -65,7 +65,7 @@ public class LuceneIndexBackupComponentTest extends TestCase
backupComponent = new LuceneIndexBackupComponent();
backupComponent.setTransactionService(transactionService);
backupComponent.setFactory(factory);
backupComponent.setFactories(Collections.singleton(factory));
backupComponent.setNodeService(nodeService);
backupComponent.setTargetLocation(tempTargetDir.toString());
}
@@ -77,6 +77,9 @@ public class LuceneIndexBackupComponentTest extends TestCase
super.tearDown();
}
/**
* Test back up
*/
public void testBackup()
{
backupComponent.backup();

View File

@@ -26,7 +26,9 @@ package org.alfresco.repo.search.impl.lucene;
import java.util.Set;
import org.alfresco.repo.search.IndexerSPI;
import org.alfresco.repo.search.BackgroundIndexerAware;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.TransactionSynchronisationAwareIndexer;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.search.impl.lucene.index.IndexInfo;
import org.alfresco.service.cmr.dictionary.DictionaryService;
@@ -36,21 +38,11 @@ import org.alfresco.service.cmr.repository.NodeService;
/**
* @author Andy Hind
*/
public interface LuceneIndexer2 extends IndexerSPI
{
public void commit();
public void rollback();
public int prepare();
public boolean isModified();
public void setNodeService(NodeService nodeService);
public void setDictionaryService(DictionaryService dictionaryService);
public void setLuceneFullTextSearchIndexer(FullTextSearchIndexer luceneFullTextSearchIndexer);
public interface LuceneIndexer extends Indexer, TransactionSynchronisationAwareIndexer
{
public String getDeltaId();
public void flushPending() throws LuceneIndexException;
public Set<String> getDeletions();
public boolean getDeleteOnlyNodes();
public boolean getDeleteOnlyNodes();
public <R> R doWithWriteLock(IndexInfo.LockWork <R> lockWork);
}

View File

@@ -28,9 +28,26 @@ import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.NamespacePrefixResolver;
public interface LuceneSearcher2 extends SearchService
/**
* Lucene implementation specific entension to the seracher API
* @author andyh
*
*/
public interface LuceneSearcher extends SearchService
{
/**
* Check if the index exists
* @return - true if it exists
*/
public boolean indexExists();
/**
* Ste the node service
* @param nodeService
*/
public void setNodeService(NodeService nodeService);
/**
* Set the name space service
* @param namespacePrefixResolver
*/
public void setNamespacePrefixResolver(NamespacePrefixResolver namespacePrefixResolver);
}

View File

@@ -39,7 +39,12 @@ import org.saxpath.Axis;
import org.saxpath.Operator;
import org.saxpath.SAXPathException;
import org.saxpath.XPathHandler;
/**
* XPATH handler for lucene - currently only does basic paths
*
* @author andyh
*
*/
public class LuceneXPathHandler implements XPathHandler
{
private PathQuery query;
@@ -52,11 +57,18 @@ public class LuceneXPathHandler implements XPathHandler
private DictionaryService dictionaryService;
/**
*
*/
public LuceneXPathHandler()
{
super();
}
/**
* Get the resulting query
* @return - the query
*/
public PathQuery getQuery()
{
return this.query;
@@ -348,6 +360,7 @@ public class LuceneXPathHandler implements XPathHandler
}
@SuppressWarnings("unused")
private void addAbsolute(String nameSpace, String localName)
{
ArrayList<StructuredFieldPosition> answer = new ArrayList<StructuredFieldPosition>(2);
@@ -401,6 +414,10 @@ public class LuceneXPathHandler implements XPathHandler
{
answer.add(new RelativeStructuredFieldPosition(PathTokenFilter.NO_NS_TOKEN_TEXT));
}
else if (namespacePrefixResolver.getNamespaceURI("").equals(""))
{
answer.add(new RelativeStructuredFieldPosition(PathTokenFilter.NO_NS_TOKEN_TEXT));
}
else
{
answer.add(new RelativeStructuredFieldPosition(namespacePrefixResolver.getNamespaceURI("")));
@@ -480,11 +497,19 @@ public class LuceneXPathHandler implements XPathHandler
}
/**
* Set the service to resolve name space prefixes
* @param namespacePrefixResolver
*/
public void setNamespacePrefixResolver(NamespacePrefixResolver namespacePrefixResolver)
{
this.namespacePrefixResolver = namespacePrefixResolver;
}
/**
* Set the dictionary service.
* @param dictionaryService
*/
public void setDictionaryService(DictionaryService dictionaryService)
{
this.dictionaryService = dictionaryService;

View File

@@ -90,7 +90,7 @@ public class NumericEncoder
* ffffffff.
*
* @param intToEncode
* @return
* @return the encoded string
*/
public static String encode(int intToEncode)
{
@@ -104,7 +104,7 @@ public class NumericEncoder
* ffffffffffffffff.
*
* @param longToEncode
* @return
* @return - the encoded string
*/
public static String encode(long longToEncode)
{
@@ -112,13 +112,23 @@ public class NumericEncoder
return encodeToHex(replacement);
}
/**
* Secode a long
* @param hex
* @return - the decoded string
*/
public static long decodeLong(String hex)
{
return decodeFromHex(hex) ^ LONG_SIGN_MASK;
}
/**
* Encode a float into a string that orders correctly according to string
* comparison. Note that there is no negative NaN but there are codings that
* imply this. So NaN and -Infinity may not compare as expected.
*
* @param floatToEncode
* @return
* @return - the encoded string
*/
public static String encode(float floatToEncode)
{
@@ -142,7 +152,7 @@ public class NumericEncoder
* imply this. So NaN and -Infinity may not compare as expected.
*
* @param doubleToEncode
* @return
* @return the encoded string
*/
public static String encode(double doubleToEncode)
{
@@ -185,6 +195,18 @@ public class NumericEncoder
while (l != 0);
return new String(buf);
}
private static long decodeFromHex(String hex)
{
long l = 0;
long factor = 1;
for(int i = 15; i >= 0; i--, factor <<= 4)
{
int digit = Character.digit(hex.charAt(i), 16);
l += digit*factor;
}
return l;
}
private static final char[] DIGITS = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e',
'f' };

View File

@@ -26,14 +26,27 @@ package org.alfresco.repo.search.impl.lucene.analysis;
import junit.framework.TestCase;
/**
* Tests for string encoding
* @author andyh
*
*/
public class NumericEncodingTest extends TestCase
{
/**
*
*
*/
public NumericEncodingTest()
{
super();
}
/**
*
* @param arg0
*/
public NumericEncodingTest(String arg0)
{
super(arg0);
@@ -126,7 +139,7 @@ public class NumericEncodingTest extends TestCase
}
}
/*
/**
* Sample test for int
*/
@@ -141,7 +154,7 @@ public class NumericEncodingTest extends TestCase
assertEquals("ffffffff", NumericEncoder.encode(Integer.MAX_VALUE));
}
/*
/**
* Sample test for long
*/
@@ -154,9 +167,17 @@ public class NumericEncodingTest extends TestCase
assertEquals("8000000000000001", NumericEncoder.encode(1L));
assertEquals("fffffffffffffffe", NumericEncoder.encode(Long.MAX_VALUE - 1));
assertEquals("ffffffffffffffff", NumericEncoder.encode(Long.MAX_VALUE));
assertEquals(NumericEncoder.decodeLong(NumericEncoder.encode(Long.MIN_VALUE)), Long.MIN_VALUE);
assertEquals(NumericEncoder.decodeLong(NumericEncoder.encode(Long.MIN_VALUE + 1)),Long.MIN_VALUE + 1);
assertEquals(NumericEncoder.decodeLong(NumericEncoder.encode(-1L)), -1L);
assertEquals(NumericEncoder.decodeLong(NumericEncoder.encode(0L)), 0L);
assertEquals(NumericEncoder.decodeLong(NumericEncoder.encode(1L)), 1L);
assertEquals(NumericEncoder.decodeLong(NumericEncoder.encode(Long.MAX_VALUE - 1)),Long.MAX_VALUE - 1);
assertEquals(NumericEncoder.decodeLong(NumericEncoder.encode(Long.MAX_VALUE)), Long.MAX_VALUE);
}
/*
/**
* Sample test for float
*/
@@ -174,7 +195,7 @@ public class NumericEncodingTest extends TestCase
}
/*
/**
* Sample test for double
*/

View File

@@ -35,9 +35,8 @@ import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.Tokenizer;
/**
* @author andyh
*
* TODO To change the template for this generated type comment go to Window - Preferences - Java - Code Style - Code Templates
* @author andyh TODO To change the template for this generated type comment go to Window - Preferences - Java - Code
* Style - Code Templates
*/
public class PathTokenFilter extends Tokenizer
{
@@ -60,7 +59,7 @@ public class PathTokenFilter extends Tokenizer
public final static String TOKEN_TYPE_PATH_ELEMENT_NAME = "PATH_ELEMENT_NAME";
public final static String TOKEN_TYPE_PATH_ELEMENT_NAMESPACE = "PATH_ELEMENT_NAMESPACE";
public final static String TOKEN_TYPE_PATH_ELEMENT_NAMESPACE_PREFIX = "PATH_ELEMENT_NAMESPACE_PREFIX";
char pathSeparator;
@@ -143,10 +142,9 @@ public class PathTokenFilter extends Tokenizer
if (text.length() == 0)
{
continue; // Skip if we find // or /; or ;; etc
continue; // Skip if we find // or /; or ;; etc
}
if (text.charAt(text.length() - 1) == pathSeparator)
{
text = text.substring(0, text.length() - 1);
@@ -182,8 +180,8 @@ public class PathTokenFilter extends Tokenizer
{
namespaceToken = new Token(text.substring(0, split), t.startOffset(), t.startOffset() + split,
TOKEN_TYPE_PATH_ELEMENT_NAMESPACE_PREFIX);
nameToken = new Token(text.substring(split + 1), t.startOffset()
+ split + 1, t.endOffset(), TOKEN_TYPE_PATH_ELEMENT_NAME);
nameToken = new Token(text.substring(split + 1), t.startOffset() + split + 1, t.endOffset(),
TOKEN_TYPE_PATH_ELEMENT_NAME);
}
else
{
@@ -200,7 +198,15 @@ public class PathTokenFilter extends Tokenizer
if (includeNamespace)
{
if (namespaceToken.termText().equals(""))
{
namespaceToken = new Token(noNsTokenText, t.startOffset(), t.startOffset(),
TOKEN_TYPE_PATH_ELEMENT_NAMESPACE);
namespaceToken.setPositionIncrement(1);
}
tokens.add(namespaceToken);
}
tokens.add(nameToken);
@@ -271,9 +277,9 @@ public class PathTokenFilter extends Tokenizer
else if (!inNameSpace && (c == ';'))
{
buffer.append(c);
return new Token(buffer.toString(), start, readerPosition , "LASTQNAME");
return new Token(buffer.toString(), start, readerPosition, "LASTQNAME");
}
buffer.append(c);
}
readerPosition = -1;

View File

@@ -26,8 +26,19 @@ package org.alfresco.repo.search.impl.lucene.fts;
import org.alfresco.service.cmr.repository.StoreRef;
/**
* Supports acll back to the FTS indexer to report what has been done
* @author andyh
*
*/
public interface FTSIndexerAware
{
public void indexCompleted(StoreRef storeRef, int remaining, Exception e);
/**
* Call back used by the background indexer
*
* @param storeRef
* @param remaining
* @param e
*/
public void indexCompleted(StoreRef storeRef, int remaining, Exception e);
}

View File

@@ -24,6 +24,12 @@
*/
package org.alfresco.repo.search.impl.lucene.fts;
/**
* FTS indexer exception
*
* @author andyh
*
*/
public class FTSIndexerException extends RuntimeException
{
@@ -32,21 +38,34 @@ public class FTSIndexerException extends RuntimeException
*/
private static final long serialVersionUID = 3258134635127912754L;
/**
*
*/
public FTSIndexerException()
{
super();
}
/**
* @param message
*/
public FTSIndexerException(String message)
{
super(message);
}
/**
* @param message
* @param cause
*/
public FTSIndexerException(String message, Throwable cause)
{
super(message, cause);
}
/**
* @param cause
*/
public FTSIndexerException(Throwable cause)
{
super(cause);

View File

@@ -28,8 +28,16 @@ import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
/**
* Job to run the FTS indexer
* @author andyh
*
*/
public class FTSIndexerJob implements Job
{
/**
*
*/
public FTSIndexerJob()
{
super();

View File

@@ -25,19 +25,48 @@
package org.alfresco.repo.search.impl.lucene.fts;
import org.alfresco.service.cmr.repository.StoreRef;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
public interface FullTextSearchIndexer {
/**
* API for full text search indexing in the background
*
* @author andyh
*/
public interface FullTextSearchIndexer extends BeanFactoryPostProcessor
{
/**
* Mark a store as dirty, requiring a background index update to fix it up.
*
* @param storeRef
*/
public abstract void requiresIndex(StoreRef storeRef);
/**
* Call back to report state back to the indexer
*
* @param storeRef
* @param remaining
* @param e
*/
public abstract void indexCompleted(StoreRef storeRef, int remaining, Exception e);
/**
* Pause indexing 9no back ground indexing until a resume is called)
* @throws InterruptedException
*/
public abstract void pause() throws InterruptedException;
/**
* Resume after a pause
*
* @throws InterruptedException
*/
public abstract void resume() throws InterruptedException;
/**
* Do a chunk of outstanding indexing work
*
*/
public abstract void index();
}

View File

@@ -28,29 +28,36 @@ import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Set;
import org.alfresco.repo.search.IndexerSPI;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcher;
import org.alfresco.repo.search.BackgroundIndexerAware;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.IndexerAndSearcher;
import org.alfresco.repo.search.SupportsBackgroundIndexing;
import org.alfresco.service.cmr.repository.StoreRef;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
/**
* Background index update scheduler
* @author andyh
*
*/
public class FullTextSearchIndexerImpl implements FTSIndexerAware, FullTextSearchIndexer
{
private enum State
{
ACTIVE, PAUSING, PAUSED
};
private static Set<StoreRef> requiresIndex = new LinkedHashSet<StoreRef>();
private static Set<StoreRef> indexing = new HashSet<StoreRef>();
LuceneIndexerAndSearcher luceneIndexerAndSearcherFactory;
private IndexerAndSearcher indexerAndSearcherFactory;
private int pauseCount = 0;
private boolean paused = false;
/**
*
*/
public FullTextSearchIndexerImpl()
{
super();
@@ -139,6 +146,7 @@ public class FullTextSearchIndexerImpl implements FTSIndexerAware, FullTextSearc
}
}
@SuppressWarnings("unused")
private synchronized boolean isPaused() throws InterruptedException
{
if (pauseCount == 0)
@@ -172,9 +180,13 @@ public class FullTextSearchIndexerImpl implements FTSIndexerAware, FullTextSearc
if (toIndex != null)
{
// System.out.println("Indexing "+toIndex+" at "+(new java.util.Date()));
IndexerSPI indexer = luceneIndexerAndSearcherFactory.getIndexer(toIndex);
indexer.registerCallBack(this);
done += indexer.updateFullTextSearch(1000);
Indexer indexer = indexerAndSearcherFactory.getIndexer(toIndex);
if(indexer instanceof BackgroundIndexerAware)
{
BackgroundIndexerAware backgroundIndexerAware = (BackgroundIndexerAware)indexer;
backgroundIndexerAware.registerCallBack(this);
done += backgroundIndexerAware.updateFullTextSearch(1000);
}
}
else
{
@@ -213,13 +225,34 @@ public class FullTextSearchIndexerImpl implements FTSIndexerAware, FullTextSearc
return nextStoreRef;
}
public void setLuceneIndexerAndSearcherFactory(LuceneIndexerAndSearcher luceneIndexerAndSearcherFactory)
/**
* @param indexerAndSearcherFactory
*/
public void setIndexerAndSearcherFactory(IndexerAndSearcher indexerAndSearcherFactory)
{
this.luceneIndexerAndSearcherFactory = luceneIndexerAndSearcherFactory;
this.indexerAndSearcherFactory = indexerAndSearcherFactory;
}
/**
* @param args
* @throws InterruptedException
*/
public static void main(String[] args) throws InterruptedException
{
@SuppressWarnings("unused")
ApplicationContext ctx = new ClassPathXmlApplicationContext("classpath:alfresco/application-context.xml");
}
public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException
{
// Find bean implementaing SupportsBackgroundIndexing and register
for(Object bgindexable : beanFactory.getBeansOfType(SupportsBackgroundIndexing.class).values())
{
if(bgindexable instanceof SupportsBackgroundIndexing)
{
((SupportsBackgroundIndexing)bgindexable).setFullTextSearchIndexer(this);
}
}
}
}

View File

@@ -55,7 +55,7 @@ import java.util.zip.CRC32;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.search.IndexerException;
import org.alfresco.repo.search.impl.lucene.FilterIndexReaderByNodeRefs2;
import org.alfresco.repo.search.impl.lucene.FilterIndexReaderByStringId;
import org.alfresco.repo.search.impl.lucene.analysis.AlfrescoStandardAnalyser;
import org.alfresco.util.GUID;
import org.apache.commons.logging.Log;
@@ -1068,7 +1068,7 @@ public class IndexInfo
else
{
reader = new MultiReader(new IndexReader[] {
new FilterIndexReaderByNodeRefs2("main+id", mainIndexReader, deletions, deleteOnlyNodes),
new FilterIndexReaderByStringId("main+id", mainIndexReader, deletions, deleteOnlyNodes),
deltaReader });
}
reader = ReferenceCountingReadOnlyIndexReaderFactory.createReader("MainReader" + id, reader);
@@ -1690,7 +1690,7 @@ public class IndexInfo
try
{
reader = new MultiReader(new IndexReader[] {
new FilterIndexReaderByNodeRefs2(id, reader, getDeletions(entry.getName()), entry
new FilterIndexReaderByStringId(id, reader, getDeletions(entry.getName()), entry
.isDeletOnlyNodes()), subReader });
}
catch (IOException ioe)

View File

@@ -50,6 +50,11 @@ import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Similarity;
import org.apache.lucene.search.Weight;
/**
* Leaf scorer to complete path queries
* @author andyh
*
*/
public class LeafScorer extends Scorer
{
static class Counter
@@ -112,6 +117,23 @@ public class LeafScorer extends Scorer
private TermPositions tp;
/**
* Constructor - should use an arg object ...
*
* @param weight
* @param root
* @param level0
* @param containerScorer
* @param sfps
* @param allNodes
* @param selfIds
* @param reader
* @param similarity
* @param norms
* @param dictionaryService
* @param repeat
* @param tp
*/
public LeafScorer(Weight weight, TermPositions root, TermPositions level0, ContainerScorer containerScorer,
StructuredFieldPosition[] sfps, TermPositions allNodes, HashMap<String, Counter> selfIds,
IndexReader reader, Similarity similarity, byte[] norms, DictionaryService dictionaryService,
@@ -157,7 +179,8 @@ public class LeafScorer extends Scorer
{
int doc = containerScorer.doc();
Document document = reader.document(doc);
Field id = document.getField("ID");
Field[] fields = document.getFields("ID");
Field id = fields[fields.length-1];
Counter counter = parentIds.get(id.stringValue());
if (counter == null)
{
@@ -203,7 +226,8 @@ public class LeafScorer extends Scorer
{
int doc = level0.doc();
Document document = reader.document(doc);
Field id = document.getField("ID");
Field[] fields = document.getFields("ID");
Field id = fields[fields.length-1];
if (id != null)
{
Counter counter = parentIds.get(id.stringValue());
@@ -701,7 +725,8 @@ public class LeafScorer extends Scorer
{
return;
}
String id = document.getField("ID").stringValue();
Field[] fields = document.getFields("ID");
String id = fields[fields.length-1].stringValue();
StructuredFieldPosition last = sfps[sfps.length - 1];
if ((last.linkSelf() && selfIds.containsKey(id)))
{