mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-07-24 17:32:48 +00:00
Merged up to HEAD.
git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/BRANCHES/WCM-DEV2/root@3129 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
@@ -0,0 +1,239 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Alfresco, Inc.
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.search.impl.lucene;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.BitSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.apache.lucene.index.FilterIndexReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermDocs;
|
||||
import org.apache.lucene.index.TermEnum;
|
||||
import org.apache.lucene.index.TermPositions;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Hits;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MultiSearcher;
|
||||
import org.apache.lucene.search.Searcher;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
|
||||
public class FilterIndexReaderByNodeRefs2 extends FilterIndexReader
|
||||
{
|
||||
BitSet deletedDocuments;
|
||||
|
||||
public FilterIndexReaderByNodeRefs2(IndexReader reader, Set<NodeRef> deletions, boolean deleteNodesOnly)
|
||||
{
|
||||
super(reader);
|
||||
deletedDocuments = new BitSet(reader.maxDoc());
|
||||
|
||||
try
|
||||
{
|
||||
if (!deleteNodesOnly)
|
||||
{
|
||||
for (NodeRef nodeRef : deletions)
|
||||
{
|
||||
TermDocs td = reader.termDocs(new Term("ID", nodeRef.toString()));
|
||||
while (td.next())
|
||||
{
|
||||
deletedDocuments.set(td.doc(), true);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
Searcher searcher = new IndexSearcher(reader);
|
||||
for (NodeRef nodeRef : deletions)
|
||||
{
|
||||
BooleanQuery query = new BooleanQuery();
|
||||
query.add(new TermQuery(new Term("ID", nodeRef.toString())), true, false);
|
||||
query.add(new TermQuery(new Term("ISNODE", "T")), false, false);
|
||||
Hits hits = searcher.search(query);
|
||||
if (hits.length() > 0)
|
||||
{
|
||||
for (int i = 0; i < hits.length(); i++)
|
||||
{
|
||||
deletedDocuments.set(hits.id(i), true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Failed to construct filtering index reader", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static class FilterTermDocs implements TermDocs
|
||||
{
|
||||
BitSet deletedDocuments;
|
||||
|
||||
protected TermDocs in;
|
||||
|
||||
public FilterTermDocs(TermDocs in, BitSet deletedDocuments)
|
||||
{
|
||||
this.in = in;
|
||||
this.deletedDocuments = deletedDocuments;
|
||||
}
|
||||
|
||||
public void seek(Term term) throws IOException
|
||||
{
|
||||
// Seek is left to the base implementation
|
||||
in.seek(term);
|
||||
}
|
||||
|
||||
public void seek(TermEnum termEnum) throws IOException
|
||||
{
|
||||
// Seek is left to the base implementation
|
||||
in.seek(termEnum);
|
||||
}
|
||||
|
||||
public int doc()
|
||||
{
|
||||
// The current document info is valid in the base implementation
|
||||
return in.doc();
|
||||
}
|
||||
|
||||
public int freq()
|
||||
{
|
||||
// The frequency is valid in the base implementation
|
||||
return in.freq();
|
||||
}
|
||||
|
||||
public boolean next() throws IOException
|
||||
{
|
||||
while (in.next())
|
||||
{
|
||||
if (!deletedDocuments.get(in.doc()))
|
||||
{
|
||||
// Not masked
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public int read(int[] docs, int[] freqs) throws IOException
|
||||
{
|
||||
int[] innerDocs = new int[docs.length];
|
||||
int[] innerFreq = new int[docs.length];
|
||||
int count = in.read(innerDocs, innerFreq);
|
||||
|
||||
// Is the stream exhausted
|
||||
if (count == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (allDeleted(innerDocs, count))
|
||||
{
|
||||
// Did not find anything - try again
|
||||
return read(docs, freqs);
|
||||
}
|
||||
|
||||
// Add non deleted
|
||||
|
||||
int insertPosition = 0;
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
if (!deletedDocuments.get(innerDocs[i]))
|
||||
{
|
||||
docs[insertPosition] = innerDocs[i];
|
||||
freqs[insertPosition] = innerFreq[i];
|
||||
insertPosition++;
|
||||
}
|
||||
}
|
||||
|
||||
return insertPosition;
|
||||
}
|
||||
|
||||
private boolean allDeleted(int[] docs, int fillSize)
|
||||
{
|
||||
for (int i = 0; i < fillSize; i++)
|
||||
{
|
||||
if (!deletedDocuments.get(docs[i]))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean skipTo(int i) throws IOException
|
||||
{
|
||||
boolean result = in.skipTo(i);
|
||||
if (result == false)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (deletedDocuments.get(in.doc()))
|
||||
{
|
||||
return skipTo(i);
|
||||
}
|
||||
else
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
public void close() throws IOException
|
||||
{
|
||||
// Leave to internal implementation
|
||||
in.close();
|
||||
}
|
||||
}
|
||||
|
||||
/** Base class for filtering {@link TermPositions} implementations. */
|
||||
public static class FilterTermPositions extends FilterTermDocs implements TermPositions
|
||||
{
|
||||
|
||||
public FilterTermPositions(TermPositions in, BitSet deletedDocuements)
|
||||
{
|
||||
super(in, deletedDocuements);
|
||||
}
|
||||
|
||||
public int nextPosition() throws IOException
|
||||
{
|
||||
return ((TermPositions) this.in).nextPosition();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int numDocs()
|
||||
{
|
||||
return super.numDocs() - deletedDocuments.cardinality();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TermDocs termDocs() throws IOException
|
||||
{
|
||||
return new FilterTermDocs(super.termDocs(), deletedDocuments);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TermPositions termPositions() throws IOException
|
||||
{
|
||||
return new FilterTermPositions(super.termPositions(), deletedDocuments);
|
||||
}
|
||||
}
|
@@ -0,0 +1,283 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Alfresco, Inc.
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.search.impl.lucene;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.repo.search.IndexerException;
|
||||
import org.alfresco.repo.search.impl.lucene.index.IndexInfo;
|
||||
import org.alfresco.repo.search.impl.lucene.index.TransactionStatus;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Searcher;
|
||||
|
||||
/**
|
||||
* Common support for abstracting the lucene indexer from its configuration and management requirements.
|
||||
*
|
||||
* <p>
|
||||
* This class defines where the indexes are stored. This should be via a configurable Bean property in Spring.
|
||||
*
|
||||
* <p>
|
||||
* The default file structure is
|
||||
* <ol>
|
||||
* <li><b>"base"/"protocol"/"name"/</b> for the main index
|
||||
* <li><b>"base"/"protocol"/"name"/deltas/"id"</b> for transactional updates
|
||||
* <li><b>"base"/"protocol"/"name"/undo/"id"</b> undo information
|
||||
* </ol>
|
||||
*
|
||||
* <p>
|
||||
* The IndexWriter and IndexReader for a given index are toggled (one should be used for delete and the other for write). These are reused/closed/initialised as required.
|
||||
*
|
||||
* <p>
|
||||
* The index deltas are buffered to memory and persisted in the file system as required.
|
||||
*
|
||||
* @author Andy Hind
|
||||
*
|
||||
*/
|
||||
|
||||
public abstract class LuceneBase2
|
||||
{
|
||||
private static Logger s_logger = Logger.getLogger(LuceneBase2.class);
|
||||
|
||||
private IndexInfo indexInfo;
|
||||
|
||||
/**
|
||||
* The identifier for the store
|
||||
*/
|
||||
|
||||
protected StoreRef store;
|
||||
|
||||
/**
|
||||
* The identifier for the delta
|
||||
*/
|
||||
|
||||
protected String deltaId;
|
||||
|
||||
private LuceneConfig config;
|
||||
|
||||
// "lucene-indexes";
|
||||
|
||||
/**
|
||||
* Initialise the configuration elements of the lucene store indexers and searchers.
|
||||
*
|
||||
* @param store
|
||||
* @param deltaId
|
||||
* @throws IOException
|
||||
*/
|
||||
protected void initialise(StoreRef store, String deltaId, boolean createMain, boolean createDelta)
|
||||
throws LuceneIndexException
|
||||
{
|
||||
this.store = store;
|
||||
this.deltaId = deltaId;
|
||||
|
||||
String basePath = getBasePath();
|
||||
File baseDir = new File(basePath);
|
||||
indexInfo = IndexInfo.getIndexInfo(baseDir);
|
||||
try
|
||||
{
|
||||
if (deltaId != null)
|
||||
{
|
||||
indexInfo.setStatus(deltaId, TransactionStatus.ACTIVE, null, null);
|
||||
}
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
throw new IndexerException("Filed to set delta as active");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility method to find the path to the base index
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private String getBasePath()
|
||||
{
|
||||
if (config.getIndexRootLocation() == null)
|
||||
{
|
||||
throw new IndexerException("No configuration for index location");
|
||||
}
|
||||
String basePath = config.getIndexRootLocation()
|
||||
+ File.separator + store.getProtocol() + File.separator + store.getIdentifier() + File.separator;
|
||||
return basePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a searcher for the main index TODO: Split out support for the main index. We really only need this if we want to search over the changing index before it is committed
|
||||
*
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
|
||||
protected IndexSearcher getSearcher() throws LuceneIndexException
|
||||
{
|
||||
try
|
||||
{
|
||||
return new ClosingIndexSearcher(indexInfo.getMainIndexReferenceCountingReadOnlyIndexReader());
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
s_logger.error("Error", e);
|
||||
throw new LuceneIndexException("Failed to open IndexSarcher for " + getBasePath(), e);
|
||||
}
|
||||
}
|
||||
|
||||
protected Searcher getSearcher(LuceneIndexer2 luceneIndexer) throws LuceneIndexException
|
||||
{
|
||||
// If we know the delta id we should do better
|
||||
|
||||
try
|
||||
{
|
||||
if (luceneIndexer == null)
|
||||
{
|
||||
return new ClosingIndexSearcher(indexInfo.getMainIndexReferenceCountingReadOnlyIndexReader());
|
||||
}
|
||||
else
|
||||
{
|
||||
// TODO: Create appropriate reader that lies about deletions
|
||||
// from the first
|
||||
//
|
||||
luceneIndexer.flushPending();
|
||||
return new ClosingIndexSearcher(indexInfo.getMainIndexReferenceCountingReadOnlyIndexReader(deltaId,
|
||||
luceneIndexer.getDeletions(), luceneIndexer.getDeleteOnlyNodes()));
|
||||
}
|
||||
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
s_logger.error("Error", e);
|
||||
throw new LuceneIndexException("Failed to open IndexSarcher for " + getBasePath(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a reader for the on file portion of the delta
|
||||
*
|
||||
* @return
|
||||
* @throws IOException
|
||||
* @throws IOException
|
||||
*/
|
||||
|
||||
protected IndexReader getDeltaReader() throws LuceneIndexException, IOException
|
||||
{
|
||||
return indexInfo.getDeltaIndexReader(deltaId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the on file reader for the delta if it is open
|
||||
*
|
||||
* @throws IOException
|
||||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
|
||||
protected void closeDeltaReader() throws LuceneIndexException, IOException
|
||||
{
|
||||
indexInfo.closeDeltaIndexReader(deltaId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the on file writer for the delta
|
||||
*
|
||||
* @return
|
||||
* @throws IOException
|
||||
* @throws IOException
|
||||
*/
|
||||
protected IndexWriter getDeltaWriter() throws LuceneIndexException, IOException
|
||||
{
|
||||
return indexInfo.getDeltaIndexWriter(deltaId, new LuceneAnalyser(dictionaryService));
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the on disk delta writer
|
||||
*
|
||||
* @throws IOException
|
||||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
|
||||
protected void closeDeltaWriter() throws LuceneIndexException, IOException
|
||||
{
|
||||
indexInfo.closeDeltaIndexWriter(deltaId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Save the in memory delta to the disk, make sure there is nothing held in memory
|
||||
*
|
||||
* @throws IOException
|
||||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
protected void saveDelta() throws LuceneIndexException, IOException
|
||||
{
|
||||
// Only one should exist so we do not need error trapping to execute the
|
||||
// other
|
||||
closeDeltaReader();
|
||||
closeDeltaWriter();
|
||||
}
|
||||
|
||||
protected void setInfo(long docs, Set<NodeRef> deletions, boolean deleteNodesOnly) throws IOException
|
||||
{
|
||||
indexInfo.setPreparedState(deltaId, deletions, docs, deleteNodesOnly);
|
||||
}
|
||||
|
||||
protected void setStatus(TransactionStatus status) throws IOException
|
||||
{
|
||||
indexInfo.setStatus(deltaId, status, null, null);
|
||||
}
|
||||
|
||||
private DictionaryService dictionaryService;
|
||||
|
||||
protected IndexReader getReader() throws LuceneIndexException, IOException
|
||||
{
|
||||
return indexInfo.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
}
|
||||
|
||||
public void setDictionaryService(DictionaryService dictionaryService)
|
||||
{
|
||||
this.dictionaryService = dictionaryService;
|
||||
}
|
||||
|
||||
public DictionaryService getDictionaryService()
|
||||
{
|
||||
return dictionaryService;
|
||||
}
|
||||
|
||||
public void setLuceneConfig(LuceneConfig config)
|
||||
{
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
public LuceneConfig getLuceneConfig()
|
||||
{
|
||||
return config;
|
||||
}
|
||||
|
||||
public String getDeltaId()
|
||||
{
|
||||
return deltaId;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,672 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Alfresco, Inc.
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.search.impl.lucene;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Random;
|
||||
|
||||
import javax.transaction.UserTransaction;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.dictionary.DictionaryDAO;
|
||||
import org.alfresco.repo.dictionary.M2Aspect;
|
||||
import org.alfresco.repo.dictionary.M2Model;
|
||||
import org.alfresco.repo.dictionary.M2Property;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
|
||||
import org.alfresco.repo.search.transaction.LuceneIndexLock;
|
||||
import org.alfresco.service.ServiceRegistry;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.search.CategoryService;
|
||||
import org.alfresco.service.cmr.search.ResultSet;
|
||||
import org.alfresco.service.cmr.search.ResultSetRow;
|
||||
import org.alfresco.service.cmr.search.SearchService;
|
||||
import org.alfresco.service.namespace.DynamicNamespacePrefixResolver;
|
||||
import org.alfresco.service.namespace.NamespacePrefixResolver;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.util.ApplicationContextHelper;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
|
||||
public class LuceneCategoryTest2 extends TestCase
|
||||
{
|
||||
private ServiceRegistry serviceRegistry;
|
||||
|
||||
static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
|
||||
NodeService nodeService;
|
||||
DictionaryService dictionaryService;
|
||||
LuceneIndexLock luceneIndexLock;
|
||||
private NodeRef rootNodeRef;
|
||||
private NodeRef n1;
|
||||
private NodeRef n2;
|
||||
private NodeRef n3;
|
||||
private NodeRef n4;
|
||||
private NodeRef n6;
|
||||
private NodeRef n5;
|
||||
private NodeRef n7;
|
||||
private NodeRef n8;
|
||||
private NodeRef n9;
|
||||
private NodeRef n10;
|
||||
private NodeRef n11;
|
||||
private NodeRef n12;
|
||||
private NodeRef n13;
|
||||
private NodeRef n14;
|
||||
|
||||
private NodeRef catContainer;
|
||||
private NodeRef catRoot;
|
||||
private NodeRef catACBase;
|
||||
private NodeRef catACOne;
|
||||
private NodeRef catACTwo;
|
||||
private NodeRef catACThree;
|
||||
private FullTextSearchIndexer luceneFTS;
|
||||
private DictionaryDAO dictionaryDAO;
|
||||
private String TEST_NAMESPACE = "http://www.alfresco.org/test/lucenecategorytest";
|
||||
private QName regionCategorisationQName;
|
||||
private QName assetClassCategorisationQName;
|
||||
private QName investmentRegionCategorisationQName;
|
||||
private QName marketingRegionCategorisationQName;
|
||||
private NodeRef catRBase;
|
||||
private NodeRef catROne;
|
||||
private NodeRef catRTwo;
|
||||
private NodeRef catRThree;
|
||||
private SearchService searcher;
|
||||
private LuceneIndexerAndSearcher indexerAndSearcher;
|
||||
|
||||
private CategoryService categoryService;
|
||||
|
||||
public LuceneCategoryTest2()
|
||||
{
|
||||
super();
|
||||
}
|
||||
|
||||
public LuceneCategoryTest2(String arg0)
|
||||
{
|
||||
super(arg0);
|
||||
}
|
||||
|
||||
public void setUp() throws Exception
|
||||
{
|
||||
nodeService = (NodeService)ctx.getBean("dbNodeService");
|
||||
luceneIndexLock = (LuceneIndexLock)ctx.getBean("luceneIndexLock");
|
||||
dictionaryService = (DictionaryService)ctx.getBean("dictionaryService");
|
||||
luceneFTS = (FullTextSearchIndexer) ctx.getBean("LuceneFullTextSearchIndexer");
|
||||
dictionaryDAO = (DictionaryDAO) ctx.getBean("dictionaryDAO");
|
||||
searcher = (SearchService) ctx.getBean("searchService");
|
||||
indexerAndSearcher = (LuceneIndexerAndSearcher) ctx.getBean("luceneIndexerAndSearcherFactory");
|
||||
categoryService = (CategoryService) ctx.getBean("categoryService");
|
||||
serviceRegistry = (ServiceRegistry) ctx.getBean(ServiceRegistry.SERVICE_REGISTRY);
|
||||
|
||||
createTestTypes();
|
||||
|
||||
TransactionService transactionService = serviceRegistry.getTransactionService();
|
||||
UserTransaction tx = transactionService.getUserTransaction();
|
||||
tx.begin();
|
||||
|
||||
StoreRef storeRef = nodeService.createStore(
|
||||
StoreRef.PROTOCOL_WORKSPACE,
|
||||
"Test_" + System.currentTimeMillis());
|
||||
rootNodeRef = nodeService.getRootNode(storeRef);
|
||||
|
||||
n1 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}one"), ContentModel.TYPE_CONTAINER).getChildRef();
|
||||
nodeService.setProperty(n1, QName.createQName("{namespace}property-1"), "value-1");
|
||||
n2 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}two"), ContentModel.TYPE_CONTAINER).getChildRef();
|
||||
nodeService.setProperty(n2, QName.createQName("{namespace}property-1"), "value-1");
|
||||
nodeService.setProperty(n2, QName.createQName("{namespace}property-2"), "value-2");
|
||||
n3 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}three"), ContentModel.TYPE_CONTAINER).getChildRef();
|
||||
n4 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}four"), ContentModel.TYPE_CONTAINER).getChildRef();
|
||||
n5 = nodeService.createNode(n1, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}five"), ContentModel.TYPE_CONTAINER).getChildRef();
|
||||
n6 = nodeService.createNode(n1, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}six"), ContentModel.TYPE_CONTAINER).getChildRef();
|
||||
n7 = nodeService.createNode(n2, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}seven"), ContentModel.TYPE_CONTAINER).getChildRef();
|
||||
n8 = nodeService.createNode(n2, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}eight-2"), ContentModel.TYPE_CONTAINER).getChildRef();
|
||||
n9 = nodeService.createNode(n5, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}nine"), ContentModel.TYPE_CONTAINER).getChildRef();
|
||||
n10 = nodeService.createNode(n5, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}ten"), ContentModel.TYPE_CONTAINER).getChildRef();
|
||||
n11 = nodeService.createNode(n5, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}eleven"), ContentModel.TYPE_CONTAINER).getChildRef();
|
||||
n12 = nodeService.createNode(n5, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}twelve"), ContentModel.TYPE_CONTAINER).getChildRef();
|
||||
n13 = nodeService.createNode(n12, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}thirteen"), ContentModel.TYPE_CONTAINER).getChildRef();
|
||||
n14 = nodeService.createNode(n13, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}fourteen"), ContentModel.TYPE_CONTAINER).getChildRef();
|
||||
|
||||
nodeService.addChild(rootNodeRef, n8, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}eight-0"));
|
||||
nodeService.addChild(n1, n8, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}eight-1"));
|
||||
nodeService.addChild(n2, n13, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}link"));
|
||||
|
||||
nodeService.addChild(n1, n14, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}common"));
|
||||
nodeService.addChild(n2, n14, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}common"));
|
||||
nodeService.addChild(n5, n14, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}common"));
|
||||
nodeService.addChild(n6, n14, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}common"));
|
||||
nodeService.addChild(n12, n14, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}common"));
|
||||
nodeService.addChild(n13, n14, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}common"));
|
||||
|
||||
// Categories
|
||||
|
||||
catContainer = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "categoryContainer"), ContentModel.TYPE_CONTAINER).getChildRef();
|
||||
catRoot = nodeService.createNode(catContainer, ContentModel.ASSOC_CHILDREN, QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "categoryRoot"), ContentModel.TYPE_CATEGORYROOT).getChildRef();
|
||||
|
||||
|
||||
|
||||
catRBase = nodeService.createNode(catRoot, ContentModel.ASSOC_CATEGORIES, QName.createQName(TEST_NAMESPACE, "Region"), ContentModel.TYPE_CATEGORY).getChildRef();
|
||||
catROne = nodeService.createNode(catRBase, ContentModel.ASSOC_SUBCATEGORIES, QName.createQName(TEST_NAMESPACE, "Europe"), ContentModel.TYPE_CATEGORY).getChildRef();
|
||||
catRTwo = nodeService.createNode(catRBase, ContentModel.ASSOC_SUBCATEGORIES, QName.createQName(TEST_NAMESPACE, "RestOfWorld"), ContentModel.TYPE_CATEGORY).getChildRef();
|
||||
catRThree = nodeService.createNode(catRTwo, ContentModel.ASSOC_SUBCATEGORIES, QName.createQName(TEST_NAMESPACE, "US"), ContentModel.TYPE_CATEGORY).getChildRef();
|
||||
|
||||
nodeService.addChild(catRoot, catRBase, ContentModel.ASSOC_CATEGORIES, QName.createQName(TEST_NAMESPACE, "InvestmentRegion"));
|
||||
nodeService.addChild(catRoot, catRBase, ContentModel.ASSOC_CATEGORIES, QName.createQName(TEST_NAMESPACE, "MarketingRegion"));
|
||||
|
||||
|
||||
catACBase = nodeService.createNode(catRoot, ContentModel.ASSOC_CATEGORIES, QName.createQName(TEST_NAMESPACE, "AssetClass"), ContentModel.TYPE_CATEGORY).getChildRef();
|
||||
catACOne = nodeService.createNode(catACBase, ContentModel.ASSOC_SUBCATEGORIES, QName.createQName(TEST_NAMESPACE, "Fixed"), ContentModel.TYPE_CATEGORY).getChildRef();
|
||||
catACTwo = nodeService.createNode(catACBase, ContentModel.ASSOC_SUBCATEGORIES, QName.createQName(TEST_NAMESPACE, "Equity"), ContentModel.TYPE_CATEGORY).getChildRef();
|
||||
catACThree = nodeService.createNode(catACTwo, ContentModel.ASSOC_SUBCATEGORIES, QName.createQName(TEST_NAMESPACE, "SpecialEquity"), ContentModel.TYPE_CATEGORY).getChildRef();
|
||||
|
||||
|
||||
|
||||
nodeService.addAspect(n1, assetClassCategorisationQName, createMap("assetClass", catACBase));
|
||||
nodeService.addAspect(n1, regionCategorisationQName, createMap("region", catRBase));
|
||||
|
||||
nodeService.addAspect(n2, assetClassCategorisationQName, createMap("assetClass", catACOne));
|
||||
nodeService.addAspect(n3, assetClassCategorisationQName, createMap("assetClass", catACOne));
|
||||
nodeService.addAspect(n4, assetClassCategorisationQName, createMap("assetClass", catACOne));
|
||||
nodeService.addAspect(n5, assetClassCategorisationQName, createMap("assetClass", catACOne));
|
||||
nodeService.addAspect(n6, assetClassCategorisationQName, createMap("assetClass", catACOne));
|
||||
|
||||
nodeService.addAspect(n7, assetClassCategorisationQName, createMap("assetClass", catACTwo));
|
||||
nodeService.addAspect(n8, assetClassCategorisationQName, createMap("assetClass", catACTwo));
|
||||
nodeService.addAspect(n9, assetClassCategorisationQName, createMap("assetClass", catACTwo));
|
||||
nodeService.addAspect(n10, assetClassCategorisationQName, createMap("assetClass", catACTwo));
|
||||
nodeService.addAspect(n11, assetClassCategorisationQName, createMap("assetClass", catACTwo));
|
||||
|
||||
nodeService.addAspect(n12, assetClassCategorisationQName, createMap("assetClass", catACOne, catACTwo));
|
||||
nodeService.addAspect(n13, assetClassCategorisationQName, createMap("assetClass", catACOne, catACTwo, catACThree));
|
||||
nodeService.addAspect(n14, assetClassCategorisationQName, createMap("assetClass", catACOne, catACTwo));
|
||||
|
||||
nodeService.addAspect(n2, regionCategorisationQName, createMap("region", catROne));
|
||||
nodeService.addAspect(n3, regionCategorisationQName, createMap("region", catROne));
|
||||
nodeService.addAspect(n4, regionCategorisationQName, createMap("region", catRTwo));
|
||||
nodeService.addAspect(n5, regionCategorisationQName, createMap("region", catRTwo));
|
||||
|
||||
nodeService.addAspect(n5, investmentRegionCategorisationQName, createMap("investmentRegion", catRBase));
|
||||
nodeService.addAspect(n5, marketingRegionCategorisationQName, createMap("marketingRegion", catRBase));
|
||||
nodeService.addAspect(n6, investmentRegionCategorisationQName, createMap("investmentRegion", catRBase));
|
||||
nodeService.addAspect(n7, investmentRegionCategorisationQName, createMap("investmentRegion", catRBase));
|
||||
nodeService.addAspect(n8, investmentRegionCategorisationQName, createMap("investmentRegion", catRBase));
|
||||
nodeService.addAspect(n9, investmentRegionCategorisationQName, createMap("investmentRegion", catRBase));
|
||||
nodeService.addAspect(n10, marketingRegionCategorisationQName, createMap("marketingRegion", catRBase));
|
||||
nodeService.addAspect(n11, marketingRegionCategorisationQName, createMap("marketingRegion", catRBase));
|
||||
nodeService.addAspect(n12, marketingRegionCategorisationQName, createMap("marketingRegion", catRBase));
|
||||
nodeService.addAspect(n13, marketingRegionCategorisationQName, createMap("marketingRegion", catRBase));
|
||||
nodeService.addAspect(n14, marketingRegionCategorisationQName, createMap("marketingRegion", catRBase));
|
||||
|
||||
tx.commit();
|
||||
}
|
||||
|
||||
private HashMap<QName, Serializable> createMap(String name, NodeRef[] nodeRefs)
|
||||
{
|
||||
HashMap<QName, Serializable> map = new HashMap<QName, Serializable>();
|
||||
Serializable value = (Serializable) Arrays.asList(nodeRefs);
|
||||
map.put(QName.createQName(TEST_NAMESPACE, name), value);
|
||||
return map;
|
||||
}
|
||||
|
||||
private HashMap<QName, Serializable> createMap(String name, NodeRef nodeRef)
|
||||
{
|
||||
return createMap(name, new NodeRef[]{nodeRef});
|
||||
}
|
||||
|
||||
private HashMap<QName, Serializable> createMap(String name, NodeRef nodeRef1, NodeRef nodeRef2)
|
||||
{
|
||||
return createMap(name, new NodeRef[]{nodeRef1, nodeRef2});
|
||||
}
|
||||
|
||||
private HashMap<QName, Serializable> createMap(String name, NodeRef nodeRef1, NodeRef nodeRef2, NodeRef nodeRef3)
|
||||
{
|
||||
return createMap(name, new NodeRef[]{nodeRef1, nodeRef2, nodeRef3});
|
||||
}
|
||||
|
||||
private void createTestTypes()
|
||||
{
|
||||
M2Model model = M2Model.createModel("test:lucenecategory");
|
||||
model.createNamespace(TEST_NAMESPACE, "test");
|
||||
model.createImport(NamespaceService.DICTIONARY_MODEL_1_0_URI, NamespaceService.DICTIONARY_MODEL_PREFIX);
|
||||
model.createImport(NamespaceService.CONTENT_MODEL_1_0_URI, NamespaceService.CONTENT_MODEL_PREFIX);
|
||||
|
||||
regionCategorisationQName = QName.createQName(TEST_NAMESPACE, "Region");
|
||||
M2Aspect generalCategorisation = model.createAspect("test:" + regionCategorisationQName.getLocalName());
|
||||
generalCategorisation.setParentName("cm:" + ContentModel.ASPECT_CLASSIFIABLE.getLocalName());
|
||||
M2Property genCatProp = generalCategorisation.createProperty("test:region");
|
||||
genCatProp.setIndexed(true);
|
||||
genCatProp.setIndexedAtomically(true);
|
||||
genCatProp.setMandatory(true);
|
||||
genCatProp.setMultiValued(true);
|
||||
genCatProp.setStoredInIndex(true);
|
||||
genCatProp.setTokenisedInIndex(true);
|
||||
genCatProp.setType("d:" + DataTypeDefinition.CATEGORY.getLocalName());
|
||||
|
||||
assetClassCategorisationQName = QName.createQName(TEST_NAMESPACE, "AssetClass");
|
||||
M2Aspect assetClassCategorisation = model.createAspect("test:" + assetClassCategorisationQName.getLocalName());
|
||||
assetClassCategorisation.setParentName("cm:" + ContentModel.ASPECT_CLASSIFIABLE.getLocalName());
|
||||
M2Property acProp = assetClassCategorisation.createProperty("test:assetClass");
|
||||
acProp.setIndexed(true);
|
||||
acProp.setIndexedAtomically(true);
|
||||
acProp.setMandatory(true);
|
||||
acProp.setMultiValued(true);
|
||||
acProp.setStoredInIndex(true);
|
||||
acProp.setTokenisedInIndex(true);
|
||||
acProp.setType("d:" + DataTypeDefinition.CATEGORY.getLocalName());
|
||||
|
||||
investmentRegionCategorisationQName = QName.createQName(TEST_NAMESPACE, "InvestmentRegion");
|
||||
M2Aspect investmentRegionCategorisation = model.createAspect("test:" + investmentRegionCategorisationQName.getLocalName());
|
||||
investmentRegionCategorisation.setParentName("cm:" + ContentModel.ASPECT_CLASSIFIABLE.getLocalName());
|
||||
M2Property irProp = investmentRegionCategorisation.createProperty("test:investmentRegion");
|
||||
irProp.setIndexed(true);
|
||||
irProp.setIndexedAtomically(true);
|
||||
irProp.setMandatory(true);
|
||||
irProp.setMultiValued(true);
|
||||
irProp.setStoredInIndex(true);
|
||||
irProp.setTokenisedInIndex(true);
|
||||
irProp.setType("d:" + DataTypeDefinition.CATEGORY.getLocalName());
|
||||
|
||||
marketingRegionCategorisationQName = QName.createQName(TEST_NAMESPACE, "MarketingRegion");
|
||||
M2Aspect marketingRegionCategorisation = model.createAspect("test:" + marketingRegionCategorisationQName.getLocalName());
|
||||
marketingRegionCategorisation.setParentName("cm:" + ContentModel.ASPECT_CLASSIFIABLE.getLocalName());
|
||||
M2Property mrProp = marketingRegionCategorisation.createProperty("test:marketingRegion");
|
||||
mrProp.setIndexed(true);
|
||||
mrProp.setIndexedAtomically(true);
|
||||
mrProp.setMandatory(true);
|
||||
mrProp.setMultiValued(true);
|
||||
mrProp.setStoredInIndex(true);
|
||||
mrProp.setTokenisedInIndex(true);
|
||||
mrProp.setType("d:" + DataTypeDefinition.CATEGORY.getLocalName());
|
||||
|
||||
dictionaryDAO.putModel(model);
|
||||
}
|
||||
|
||||
private void buildBaseIndex()
|
||||
{
|
||||
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta" + System.currentTimeMillis() + "_" + (new Random().nextInt()), indexerAndSearcher);
|
||||
indexer.setNodeService(nodeService);
|
||||
//indexer.setLuceneIndexLock(luceneIndexLock);
|
||||
indexer.setDictionaryService(dictionaryService);
|
||||
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
|
||||
//indexer.clearIndex();
|
||||
indexer.createNode(new ChildAssociationRef(null, null, null, rootNodeRef));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, rootNodeRef, QName.createQName("{namespace}one"), n1));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, rootNodeRef, QName.createQName("{namespace}two"), n2));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, rootNodeRef, QName.createQName("{namespace}three"), n3));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, rootNodeRef, QName.createQName("{namespace}four"), n4));
|
||||
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, rootNodeRef, QName.createQName("{namespace}categoryContainer"), catContainer));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, catContainer, QName.createQName("{cat}categoryRoot"), catRoot));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, catRoot, QName.createQName(TEST_NAMESPACE, "AssetClass"), catACBase));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_SUBCATEGORIES, catACBase, QName.createQName(TEST_NAMESPACE, "Fixed"), catACOne));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_SUBCATEGORIES, catACBase, QName.createQName(TEST_NAMESPACE, "Equity"), catACTwo));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_SUBCATEGORIES, catACTwo, QName.createQName(TEST_NAMESPACE, "SpecialEquity"), catACThree));
|
||||
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, catRoot, QName.createQName(TEST_NAMESPACE, "Region"), catRBase));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_SUBCATEGORIES, catRBase, QName.createQName(TEST_NAMESPACE, "Europe"), catROne));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_SUBCATEGORIES, catRBase, QName.createQName(TEST_NAMESPACE, "RestOfWorld"), catRTwo));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_SUBCATEGORIES, catRTwo, QName.createQName(TEST_NAMESPACE, "US"), catRThree));
|
||||
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n1, QName.createQName("{namespace}five"), n5));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n1, QName.createQName("{namespace}six"), n6));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n2, QName.createQName("{namespace}seven"), n7));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n2, QName.createQName("{namespace}eight"), n8));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n5, QName.createQName("{namespace}nine"), n9));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n5, QName.createQName("{namespace}ten"), n10));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n5, QName.createQName("{namespace}eleven"), n11));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n5, QName.createQName("{namespace}twelve"), n12));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n12, QName.createQName("{namespace}thirteen"), n13));
|
||||
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n13, QName.createQName("{namespace}fourteen"), n14));
|
||||
indexer.prepare();
|
||||
indexer.commit();
|
||||
}
|
||||
|
||||
|
||||
public void testMulti() throws Exception
|
||||
{
|
||||
TransactionService transactionService = serviceRegistry.getTransactionService();
|
||||
UserTransaction tx = transactionService.getUserTransaction();
|
||||
tx.begin();
|
||||
buildBaseIndex();
|
||||
|
||||
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
|
||||
|
||||
searcher.setNodeService(nodeService);
|
||||
searcher.setDictionaryService(dictionaryService);
|
||||
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver(""));
|
||||
ResultSet results;
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"//*\" AND (PATH:\"/test:AssetClass/test:Equity/member\" PATH:\"/test:MarketingRegion/member\")", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(9, results.length());
|
||||
results.close();
|
||||
tx.rollback();
|
||||
}
|
||||
|
||||
public void testBasic() throws Exception
|
||||
{
|
||||
TransactionService transactionService = serviceRegistry.getTransactionService();
|
||||
UserTransaction tx = transactionService.getUserTransaction();
|
||||
tx.begin();
|
||||
buildBaseIndex();
|
||||
|
||||
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
|
||||
|
||||
searcher.setNodeService(nodeService);
|
||||
searcher.setDictionaryService(dictionaryService);
|
||||
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver(""));
|
||||
ResultSet results;
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:MarketingRegion\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:MarketingRegion//member\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(6, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer\"", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot\"", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot\"", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot/test:AssetClass\"", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot/test:AssetClass/member\" ", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot/test:AssetClass/test:Fixed\"", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot/test:AssetClass/test:Equity\"", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass\"", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Fixed\"", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Equity\"", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:*\"", null, null);
|
||||
assertEquals(2, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass//test:*\"", null, null);
|
||||
assertEquals(3, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Fixed/member\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(8, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Equity/member\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(8, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Equity/test:SpecialEquity/member//.\"", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Equity/test:SpecialEquity/member//*\"", null, null);
|
||||
assertEquals(0, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Equity/test:SpecialEquity/member\"", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "+PATH:\"/test:AssetClass/test:Equity/member\" AND +PATH:\"/test:AssetClass/test:Fixed/member\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(3, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Equity/member\" PATH:\"/test:AssetClass/test:Fixed/member\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(13, results.length());
|
||||
results.close();
|
||||
|
||||
// Region
|
||||
|
||||
assertEquals(4, nodeService.getChildAssocs(catRoot).size());
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:Region\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:Region/member\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:Region/test:Europe/member\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(2, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:Region/test:RestOfWorld/member\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(2, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:Region//member\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(5, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:InvestmentRegion//member\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(5, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:MarketingRegion//member\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(6, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "+PATH:\"/test:AssetClass/test:Fixed/member\" AND +PATH:\"/test:Region/test:Europe/member\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(2, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "+PATH:\"/cm:categoryContainer/cm:categoryRoot/test:AssetClass/test:Fixed/member\" AND +PATH:\"/cm:categoryContainer/cm:categoryRoot/test:Region/test:Europe/member\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(2, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Equity/member\" PATH:\"/test:MarketingRegion/member\"", null, null);
|
||||
//printPaths(results);
|
||||
assertEquals(9, results.length());
|
||||
results.close();
|
||||
tx.rollback();
|
||||
}
|
||||
|
||||
public void testCategoryServiceImpl() throws Exception
|
||||
{
|
||||
TransactionService transactionService = serviceRegistry.getTransactionService();
|
||||
UserTransaction tx = transactionService.getUserTransaction();
|
||||
tx.begin();
|
||||
buildBaseIndex();
|
||||
|
||||
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
|
||||
|
||||
searcher.setNodeService(nodeService);
|
||||
searcher.setDictionaryService(dictionaryService);
|
||||
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver(""));
|
||||
|
||||
ResultSet
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot/test:AssetClass/*\" ", null, null);
|
||||
assertEquals(3, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot/test:AssetClass/member\" ", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
LuceneCategoryServiceImpl impl = new LuceneCategoryServiceImpl();
|
||||
impl.setNodeService(nodeService);
|
||||
impl.setNamespacePrefixResolver(getNamespacePrefixReolsver(""));
|
||||
impl.setIndexerAndSearcher(indexerAndSearcher);
|
||||
impl.setDictionaryService(dictionaryService);
|
||||
|
||||
Collection<ChildAssociationRef>
|
||||
result = impl.getChildren(catACBase , CategoryService.Mode.MEMBERS, CategoryService.Depth.IMMEDIATE);
|
||||
assertEquals(1, result.size());
|
||||
|
||||
|
||||
result = impl.getChildren(catACBase , CategoryService.Mode.ALL, CategoryService.Depth.IMMEDIATE);
|
||||
assertEquals(3, result.size());
|
||||
|
||||
|
||||
result = impl.getChildren(catACBase , CategoryService.Mode.SUB_CATEGORIES, CategoryService.Depth.IMMEDIATE);
|
||||
assertEquals(2, result.size());
|
||||
|
||||
|
||||
result = impl.getChildren(catACBase , CategoryService.Mode.MEMBERS, CategoryService.Depth.ANY);
|
||||
assertEquals(18, result.size());
|
||||
|
||||
|
||||
result = impl.getChildren(catACBase , CategoryService.Mode.ALL, CategoryService.Depth.ANY);
|
||||
assertEquals(21, result.size());
|
||||
|
||||
|
||||
result = impl.getChildren(catACBase , CategoryService.Mode.SUB_CATEGORIES, CategoryService.Depth.ANY);
|
||||
assertEquals(3, result.size());
|
||||
|
||||
|
||||
result = impl.getClassifications(rootNodeRef.getStoreRef());
|
||||
assertEquals(4, result.size());
|
||||
|
||||
|
||||
result = impl.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.IMMEDIATE);
|
||||
assertEquals(2, result.size());
|
||||
|
||||
|
||||
Collection<QName> aspects = impl.getClassificationAspects();
|
||||
assertEquals(6, aspects.size());
|
||||
|
||||
tx.rollback();
|
||||
}
|
||||
|
||||
private NamespacePrefixResolver getNamespacePrefixReolsver(String defaultURI)
|
||||
{
|
||||
DynamicNamespacePrefixResolver nspr = new DynamicNamespacePrefixResolver(null);
|
||||
nspr.registerNamespace(NamespaceService.CONTENT_MODEL_PREFIX, NamespaceService.CONTENT_MODEL_1_0_URI);
|
||||
nspr.registerNamespace("namespace", "namespace");
|
||||
nspr.registerNamespace("test", TEST_NAMESPACE);
|
||||
nspr.registerNamespace(NamespaceService.DEFAULT_PREFIX, defaultURI);
|
||||
return nspr;
|
||||
}
|
||||
|
||||
public void testCategoryService() throws Exception
|
||||
{
|
||||
TransactionService transactionService = serviceRegistry.getTransactionService();
|
||||
UserTransaction tx = transactionService.getUserTransaction();
|
||||
tx.begin();
|
||||
buildBaseIndex();
|
||||
assertEquals(1, categoryService.getChildren(catACBase , CategoryService.Mode.MEMBERS, CategoryService.Depth.IMMEDIATE).size());
|
||||
assertEquals(2, categoryService.getChildren(catACBase , CategoryService.Mode.SUB_CATEGORIES, CategoryService.Depth.IMMEDIATE).size());
|
||||
assertEquals(3, categoryService.getChildren(catACBase , CategoryService.Mode.ALL, CategoryService.Depth.IMMEDIATE).size());
|
||||
assertEquals(18, categoryService.getChildren(catACBase , CategoryService.Mode.MEMBERS, CategoryService.Depth.ANY).size());
|
||||
assertEquals(3, categoryService.getChildren(catACBase , CategoryService.Mode.SUB_CATEGORIES, CategoryService.Depth.ANY).size());
|
||||
assertEquals(21, categoryService.getChildren(catACBase , CategoryService.Mode.ALL, CategoryService.Depth.ANY).size());
|
||||
assertEquals(4, categoryService.getClassifications(rootNodeRef.getStoreRef()).size());
|
||||
assertEquals(2, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.IMMEDIATE).size());
|
||||
assertEquals(3, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.ANY).size());
|
||||
assertEquals(6, categoryService.getClassificationAspects().size());
|
||||
assertEquals(2, categoryService.getRootCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass")).size());
|
||||
|
||||
NodeRef newRoot = categoryService.createRootCategory(rootNodeRef.getStoreRef(),QName.createQName(TEST_NAMESPACE, "AssetClass"), "Fruit");
|
||||
tx.commit();
|
||||
tx = transactionService.getUserTransaction();
|
||||
tx.begin();
|
||||
assertEquals(3, categoryService.getRootCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass")).size());
|
||||
assertEquals(3, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.IMMEDIATE).size());
|
||||
assertEquals(4, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.ANY).size());
|
||||
|
||||
NodeRef newCat = categoryService.createCategory(newRoot, "Banana");
|
||||
tx.commit();
|
||||
tx = transactionService.getUserTransaction();
|
||||
tx.begin();
|
||||
assertEquals(3, categoryService.getRootCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass")).size());
|
||||
assertEquals(3, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.IMMEDIATE).size());
|
||||
assertEquals(5, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.ANY).size());
|
||||
|
||||
categoryService.deleteCategory(newCat);
|
||||
tx.commit();
|
||||
tx = transactionService.getUserTransaction();
|
||||
tx.begin();
|
||||
assertEquals(3, categoryService.getRootCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass")).size());
|
||||
assertEquals(3, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.IMMEDIATE).size());
|
||||
assertEquals(4, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.ANY).size());
|
||||
|
||||
categoryService.deleteCategory(newRoot);
|
||||
tx.commit();
|
||||
tx = transactionService.getUserTransaction();
|
||||
tx.begin();
|
||||
assertEquals(2, categoryService.getRootCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass")).size());
|
||||
assertEquals(2, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.IMMEDIATE).size());
|
||||
assertEquals(3, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.ANY).size());
|
||||
|
||||
|
||||
tx.rollback();
|
||||
}
|
||||
|
||||
private int getTotalScore(ResultSet results)
|
||||
{
|
||||
int totalScore = 0;
|
||||
for(ResultSetRow row: results)
|
||||
{
|
||||
totalScore += row.getScore();
|
||||
}
|
||||
return totalScore;
|
||||
}
|
||||
}
|
@@ -19,6 +19,7 @@ package org.alfresco.repo.search.impl.lucene;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.repo.search.Indexer;
|
||||
import org.alfresco.repo.search.IndexerSPI;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FTSIndexerAware;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
@@ -28,7 +29,7 @@ import org.alfresco.service.cmr.repository.NodeService;
|
||||
/**
|
||||
* @author Andy Hind
|
||||
*/
|
||||
public interface LuceneIndexer extends Indexer, Lockable
|
||||
public interface LuceneIndexer extends IndexerSPI, Lockable
|
||||
{
|
||||
|
||||
public void commit();
|
||||
@@ -39,9 +40,6 @@ public interface LuceneIndexer extends Indexer, Lockable
|
||||
public void setDictionaryService(DictionaryService dictionaryService);
|
||||
public void setLuceneFullTextSearchIndexer(FullTextSearchIndexer luceneFullTextSearchIndexer);
|
||||
|
||||
public void updateFullTextSearch(int size);
|
||||
public void registerCallBack(FTSIndexerAware indexer);
|
||||
|
||||
public String getDeltaId();
|
||||
public void flushPending() throws LuceneIndexException;
|
||||
public Set<NodeRef> getDeletions();
|
||||
|
@@ -0,0 +1,47 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Alfresco, Inc.
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.search.impl.lucene;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.repo.search.Indexer;
|
||||
import org.alfresco.repo.search.IndexerSPI;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FTSIndexerAware;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
|
||||
/**
|
||||
* @author Andy Hind
|
||||
*/
|
||||
public interface LuceneIndexer2 extends IndexerSPI
|
||||
{
|
||||
|
||||
public void commit();
|
||||
public void rollback();
|
||||
public int prepare();
|
||||
public boolean isModified();
|
||||
public void setNodeService(NodeService nodeService);
|
||||
public void setDictionaryService(DictionaryService dictionaryService);
|
||||
public void setLuceneFullTextSearchIndexer(FullTextSearchIndexer luceneFullTextSearchIndexer);
|
||||
|
||||
public String getDeltaId();
|
||||
public void flushPending() throws LuceneIndexException;
|
||||
public Set<NodeRef> getDeletions();
|
||||
public boolean getDeleteOnlyNodes();
|
||||
}
|
@@ -135,6 +135,9 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
|
||||
private QueryRegisterComponent queryRegister;
|
||||
|
||||
/** the maximum transformation time to allow atomically, defaulting to 20ms */
|
||||
private long maxAtomicTransformationTime = 20;
|
||||
|
||||
private int indexerMaxFieldLength;
|
||||
|
||||
/**
|
||||
@@ -187,6 +190,18 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
this.queryRegister = queryRegister;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum average transformation time allowed to a transformer in order to have
|
||||
* the transformation performed in the current transaction. The default is 20ms.
|
||||
*
|
||||
* @param maxAtomicTransformationTime the maximum average time that a text transformation may
|
||||
* take in order to be performed atomically.
|
||||
*/
|
||||
public void setMaxAtomicTransformationTime(long maxAtomicTransformationTime)
|
||||
{
|
||||
this.maxAtomicTransformationTime = maxAtomicTransformationTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if we are in a global transactoin according to the transaction
|
||||
* manager
|
||||
@@ -345,6 +360,7 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
indexer.setLuceneIndexLock(luceneIndexLock);
|
||||
indexer.setLuceneFullTextSearchIndexer(luceneFullTextSearchIndexer);
|
||||
indexer.setContentService(contentService);
|
||||
indexer.setMaxAtomicTransformationTime(maxAtomicTransformationTime);
|
||||
return indexer;
|
||||
}
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -38,6 +38,7 @@ import javax.transaction.xa.XAResource;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.repo.content.transform.ContentTransformer;
|
||||
import org.alfresco.repo.search.IndexerException;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FTSIndexerAware;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
|
||||
@@ -53,7 +54,6 @@ import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.cmr.repository.ContentService;
|
||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||
import org.alfresco.service.cmr.repository.InvalidNodeRefException;
|
||||
import org.alfresco.service.cmr.repository.NoTransformerException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.Path;
|
||||
@@ -110,6 +110,9 @@ public class LuceneIndexerImpl extends LuceneBase implements LuceneIndexer
|
||||
* Content service to get content for indexing.
|
||||
*/
|
||||
private ContentService contentService;
|
||||
|
||||
/** the maximum transformation time to allow atomically, defaulting to 20ms */
|
||||
private long maxAtomicTransformationTime = 20;
|
||||
|
||||
/**
|
||||
* A list of all deletions we have made - at merge these deletions need to be made against the main index.
|
||||
@@ -196,9 +199,14 @@ public class LuceneIndexerImpl extends LuceneBase implements LuceneIndexer
|
||||
this.contentService = contentService;
|
||||
}
|
||||
|
||||
/*******************************************************************************************************************************************************************************
|
||||
* * Indexer Implementation * **************************
|
||||
*/
|
||||
public void setMaxAtomicTransformationTime(long maxAtomicTransformationTime)
|
||||
{
|
||||
this.maxAtomicTransformationTime = maxAtomicTransformationTime;
|
||||
}
|
||||
|
||||
/*===========================
|
||||
* Indexer Implementation
|
||||
============================*/
|
||||
|
||||
/**
|
||||
* Utility method to check we are in the correct state to do work Also keeps track of the dirty flag.
|
||||
@@ -1177,19 +1185,20 @@ public class LuceneIndexerImpl extends LuceneBase implements LuceneIndexer
|
||||
for (QName propertyName : properties.keySet())
|
||||
{
|
||||
Serializable value = properties.get(propertyName);
|
||||
isAtomic = indexProperty(nodeRef, propertyName, value, xdoc, isAtomic, true);
|
||||
if (indexAllProperties)
|
||||
{
|
||||
indexProperty(nodeRef, propertyName, value, xdoc, false, false);
|
||||
indexProperty(nodeRef, propertyName, value, xdoc, false);
|
||||
}
|
||||
else
|
||||
{
|
||||
isAtomic &= indexProperty(nodeRef, propertyName, value, xdoc, true);
|
||||
}
|
||||
}
|
||||
|
||||
boolean isRoot = nodeRef.equals(nodeService.getRootNode(nodeRef.getStoreRef()));
|
||||
|
||||
StringBuilder parentBuffer = new StringBuilder();
|
||||
StringBuilder qNameBuffer = new StringBuilder(64);
|
||||
|
||||
int containerCount = 0;
|
||||
for (Iterator<Pair<Path, QName>> it = paths.iterator(); it.hasNext(); /**/)
|
||||
{
|
||||
Pair<Path, QName> pair = it.next();
|
||||
@@ -1361,11 +1370,19 @@ public class LuceneIndexerImpl extends LuceneBase implements LuceneIndexer
|
||||
}
|
||||
}
|
||||
|
||||
private boolean indexProperty(NodeRef nodeRef, QName propertyName, Serializable value, Document doc,
|
||||
boolean isAtomic, boolean indexAtomicProperties)
|
||||
/**
|
||||
* @param indexAtomicPropertiesOnly true to ignore all properties that must be indexed
|
||||
* non-atomically
|
||||
* @return Returns true if the property was indexed atomically, or false if it
|
||||
* should be done asynchronously
|
||||
*/
|
||||
private boolean indexProperty(
|
||||
NodeRef nodeRef, QName propertyName, Serializable value, Document doc,
|
||||
boolean indexAtomicPropertiesOnly)
|
||||
{
|
||||
String attributeName = "@"
|
||||
+ QName.createQName(propertyName.getNamespaceURI(), ISO9075.encode(propertyName.getLocalName()));
|
||||
String attributeName = "@" + QName.createQName(
|
||||
propertyName.getNamespaceURI(),
|
||||
ISO9075.encode(propertyName.getLocalName()));
|
||||
|
||||
boolean store = true;
|
||||
boolean index = true;
|
||||
@@ -1382,140 +1399,166 @@ public class LuceneIndexerImpl extends LuceneBase implements LuceneIndexer
|
||||
atomic = propertyDef.isIndexedAtomically();
|
||||
isContent = propertyDef.getDataType().getName().equals(DataTypeDefinition.CONTENT);
|
||||
}
|
||||
isAtomic &= atomic;
|
||||
|
||||
if (value != null)
|
||||
if (value == null)
|
||||
{
|
||||
if (indexAtomicProperties == atomic)
|
||||
// the value is null
|
||||
return true;
|
||||
}
|
||||
else if (indexAtomicPropertiesOnly && !atomic)
|
||||
{
|
||||
// we are only doing atomic properties and the property is definitely non-atomic
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!indexAtomicPropertiesOnly)
|
||||
{
|
||||
doc.removeFields(propertyName.toString());
|
||||
}
|
||||
boolean wereAllAtomic = true;
|
||||
// convert value to String
|
||||
for (String strValue : DefaultTypeConverter.INSTANCE.getCollection(String.class, value))
|
||||
{
|
||||
if (strValue == null)
|
||||
{
|
||||
if (!indexAtomicProperties)
|
||||
// nothing to index
|
||||
continue;
|
||||
}
|
||||
// String strValue = ValueConverter.convert(String.class, value);
|
||||
// TODO: Need to add with the correct language based analyser
|
||||
|
||||
if (isContent)
|
||||
{
|
||||
ContentData contentData = DefaultTypeConverter.INSTANCE.convert(ContentData.class, value);
|
||||
if (!index || contentData.getMimetype() == null)
|
||||
{
|
||||
doc.removeFields(propertyName.toString());
|
||||
// no mimetype or property not indexed
|
||||
continue;
|
||||
}
|
||||
// convert value to String
|
||||
for (String strValue : DefaultTypeConverter.INSTANCE.getCollection(String.class, value))
|
||||
// store mimetype in index - even if content does not index it is useful
|
||||
doc.add(new Field(
|
||||
attributeName + ".mimetype",
|
||||
contentData.getMimetype(),
|
||||
false, true, false));
|
||||
|
||||
ContentReader reader = contentService.getReader(nodeRef, propertyName);
|
||||
if (reader != null && reader.exists())
|
||||
{
|
||||
if (strValue != null)
|
||||
boolean readerReady = true;
|
||||
// transform if necessary (it is not a UTF-8 text document)
|
||||
if (!EqualsHelper.nullSafeEquals(reader.getMimetype(), MimetypeMap.MIMETYPE_TEXT_PLAIN)
|
||||
|| !EqualsHelper.nullSafeEquals(reader.getEncoding(), "UTF-8"))
|
||||
{
|
||||
// String strValue = ValueConverter.convert(String.class, value);
|
||||
// TODO: Need to add with the correct language based analyser
|
||||
|
||||
if (isContent)
|
||||
// get the transformer
|
||||
ContentTransformer transformer = contentService.getTransformer(
|
||||
reader.getMimetype(),
|
||||
MimetypeMap.MIMETYPE_TEXT_PLAIN);
|
||||
// is this transformer good enough?
|
||||
if (transformer == null)
|
||||
{
|
||||
ContentData contentData = DefaultTypeConverter.INSTANCE.convert(ContentData.class, value);
|
||||
if (contentData.getMimetype() != null && index)
|
||||
// log it
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
// store mimetype in index - even if content does not index it is useful
|
||||
doc.add(new Field(attributeName + ".mimetype", contentData.getMimetype(), false, true,
|
||||
false));
|
||||
|
||||
ContentReader reader = contentService.getReader(nodeRef, propertyName);
|
||||
if (reader != null && reader.exists())
|
||||
{
|
||||
boolean readerReady = true;
|
||||
// transform if necessary (it is not a UTF-8
|
||||
// text document)
|
||||
if (!EqualsHelper.nullSafeEquals(reader.getMimetype(),
|
||||
MimetypeMap.MIMETYPE_TEXT_PLAIN)
|
||||
|| !EqualsHelper.nullSafeEquals(reader.getEncoding(), "UTF-8"))
|
||||
{
|
||||
ContentWriter writer = contentService.getTempWriter();
|
||||
writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
|
||||
// this is what the analyzers expect on the stream
|
||||
writer.setEncoding("UTF-8");
|
||||
try
|
||||
{
|
||||
contentService.transform(reader, writer);
|
||||
// point the reader to the new-written content
|
||||
reader = writer.getReader();
|
||||
}
|
||||
catch (NoTransformerException e)
|
||||
{
|
||||
// log it
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug("Not indexed: No transformation", e);
|
||||
}
|
||||
// don't index from the reader
|
||||
readerReady = false;
|
||||
// not indexed: no transformation
|
||||
doc.add(Field.Text("TEXT", NOT_INDEXED_NO_TRANSFORMATION));
|
||||
doc.add(Field.Text(attributeName, NOT_INDEXED_NO_TRANSFORMATION));
|
||||
}
|
||||
catch (ContentIOException e)
|
||||
{
|
||||
// log it
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug("Not indexed: Transformation failed", e);
|
||||
}
|
||||
// don't index from the reader
|
||||
readerReady = false;
|
||||
// not indexed: transformation
|
||||
// failed
|
||||
doc.add(Field.Text("TEXT", NOT_INDEXED_TRANSFORMATION_FAILED));
|
||||
doc.add(Field.Text(attributeName, NOT_INDEXED_TRANSFORMATION_FAILED));
|
||||
}
|
||||
}
|
||||
// add the text field using the stream from the
|
||||
// reader, but only if the reader is valid
|
||||
if (readerReady)
|
||||
{
|
||||
InputStreamReader isr = null;
|
||||
InputStream ris = reader.getContentInputStream();
|
||||
try
|
||||
{
|
||||
isr = new InputStreamReader(ris, "UTF-8");
|
||||
}
|
||||
catch (UnsupportedEncodingException e)
|
||||
{
|
||||
isr = new InputStreamReader(ris);
|
||||
}
|
||||
doc.add(Field.Text("TEXT", isr));
|
||||
|
||||
ris = reader.getReader().getContentInputStream();
|
||||
try
|
||||
{
|
||||
isr = new InputStreamReader(ris, "UTF-8");
|
||||
}
|
||||
catch (UnsupportedEncodingException e)
|
||||
{
|
||||
isr = new InputStreamReader(ris);
|
||||
}
|
||||
|
||||
doc.add(Field.Text("@"
|
||||
+ QName.createQName(propertyName.getNamespaceURI(), ISO9075
|
||||
.encode(propertyName.getLocalName())), isr));
|
||||
}
|
||||
}
|
||||
|
||||
else
|
||||
// URL not present (null reader) or no content at the URL (file missing)
|
||||
{
|
||||
// log it
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug("Not indexed: Content Missing \n"
|
||||
+ " node: " + nodeRef + "\n" + " reader: " + reader + "\n"
|
||||
+ " content exists: "
|
||||
+ (reader == null ? " --- " : Boolean.toString(reader.exists())));
|
||||
}
|
||||
// not indexed: content missing
|
||||
doc.add(Field.Text("TEXT", NOT_INDEXED_CONTENT_MISSING));
|
||||
doc.add(Field.Text(attributeName, NOT_INDEXED_CONTENT_MISSING));
|
||||
}
|
||||
s_logger.debug(
|
||||
"Not indexed: No transformation: \n" +
|
||||
" source: " + reader + "\n" +
|
||||
" target: " + MimetypeMap.MIMETYPE_TEXT_PLAIN);
|
||||
}
|
||||
// don't index from the reader
|
||||
readerReady = false;
|
||||
// not indexed: no transformation
|
||||
doc.add(Field.Text("TEXT", NOT_INDEXED_NO_TRANSFORMATION));
|
||||
doc.add(Field.Text(attributeName, NOT_INDEXED_NO_TRANSFORMATION));
|
||||
}
|
||||
else if (indexAtomicPropertiesOnly && transformer.getTransformationTime() > maxAtomicTransformationTime)
|
||||
{
|
||||
// only indexing atomic properties
|
||||
// indexing will take too long, so push it to the background
|
||||
wereAllAtomic = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
doc.add(new Field(attributeName, strValue, store, index, tokenise));
|
||||
// We have a transformer that is fast enough
|
||||
ContentWriter writer = contentService.getTempWriter();
|
||||
writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
|
||||
// this is what the analyzers expect on the stream
|
||||
writer.setEncoding("UTF-8");
|
||||
try
|
||||
{
|
||||
|
||||
transformer.transform(reader, writer);
|
||||
// point the reader to the new-written content
|
||||
reader = writer.getReader();
|
||||
}
|
||||
catch (ContentIOException e)
|
||||
{
|
||||
// log it
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug("Not indexed: Transformation failed", e);
|
||||
}
|
||||
// don't index from the reader
|
||||
readerReady = false;
|
||||
// not indexed: transformation
|
||||
// failed
|
||||
doc.add(Field.Text("TEXT", NOT_INDEXED_TRANSFORMATION_FAILED));
|
||||
doc.add(Field.Text(attributeName, NOT_INDEXED_TRANSFORMATION_FAILED));
|
||||
}
|
||||
}
|
||||
}
|
||||
// add the text field using the stream from the
|
||||
// reader, but only if the reader is valid
|
||||
if (readerReady)
|
||||
{
|
||||
InputStreamReader isr = null;
|
||||
InputStream ris = reader.getContentInputStream();
|
||||
try
|
||||
{
|
||||
isr = new InputStreamReader(ris, "UTF-8");
|
||||
}
|
||||
catch (UnsupportedEncodingException e)
|
||||
{
|
||||
isr = new InputStreamReader(ris);
|
||||
}
|
||||
doc.add(Field.Text("TEXT", isr));
|
||||
|
||||
ris = reader.getReader().getContentInputStream();
|
||||
try
|
||||
{
|
||||
isr = new InputStreamReader(ris, "UTF-8");
|
||||
}
|
||||
catch (UnsupportedEncodingException e)
|
||||
{
|
||||
isr = new InputStreamReader(ris);
|
||||
}
|
||||
|
||||
doc.add(Field.Text("@" + QName.createQName(
|
||||
propertyName.getNamespaceURI(),
|
||||
ISO9075.encode(propertyName.getLocalName())), isr));
|
||||
}
|
||||
}
|
||||
else
|
||||
// URL not present (null reader) or no content at the URL (file missing)
|
||||
{
|
||||
// log it
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug("Not indexed: Content Missing \n"
|
||||
+ " node: " + nodeRef + "\n" + " reader: " + reader + "\n"
|
||||
+ " content exists: "
|
||||
+ (reader == null ? " --- " : Boolean.toString(reader.exists())));
|
||||
}
|
||||
// not indexed: content missing
|
||||
doc.add(Field.Text("TEXT", NOT_INDEXED_CONTENT_MISSING));
|
||||
doc.add(Field.Text(attributeName, NOT_INDEXED_CONTENT_MISSING));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
doc.add(new Field(attributeName, strValue, store, index, tokenise));
|
||||
}
|
||||
}
|
||||
|
||||
return isAtomic;
|
||||
return wereAllAtomic;
|
||||
}
|
||||
|
||||
private Map<ChildAssociationRef, Counter> getNodeCounts(NodeRef nodeRef)
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,28 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Alfresco, Inc.
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.search.impl.lucene;
|
||||
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.search.SearchService;
|
||||
import org.alfresco.service.namespace.NamespacePrefixResolver;
|
||||
|
||||
public interface LuceneSearcher2 extends SearchService
|
||||
{
|
||||
public boolean indexExists();
|
||||
public void setNodeService(NodeService nodeService);
|
||||
public void setNamespacePrefixResolver(NamespacePrefixResolver namespacePrefixResolver);
|
||||
}
|
@@ -0,0 +1,654 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Alfresco, Inc.
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.search.impl.lucene;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.repo.search.CannedQueryDef;
|
||||
import org.alfresco.repo.search.EmptyResultSet;
|
||||
import org.alfresco.repo.search.Indexer;
|
||||
import org.alfresco.repo.search.QueryRegisterComponent;
|
||||
import org.alfresco.repo.search.SearcherException;
|
||||
import org.alfresco.repo.search.impl.NodeSearcher;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.repository.InvalidNodeRefException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.Path;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.repository.XPathException;
|
||||
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
|
||||
import org.alfresco.service.cmr.search.QueryParameter;
|
||||
import org.alfresco.service.cmr.search.QueryParameterDefinition;
|
||||
import org.alfresco.service.cmr.search.ResultSet;
|
||||
import org.alfresco.service.cmr.search.SearchParameters;
|
||||
import org.alfresco.service.cmr.search.SearchService;
|
||||
import org.alfresco.service.namespace.NamespacePrefixResolver;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.ISO9075;
|
||||
import org.alfresco.util.SearchLanguageConversion;
|
||||
import org.apache.lucene.search.Hits;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Searcher;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.saxpath.SAXPathException;
|
||||
|
||||
import com.werken.saxpath.XPathReader;
|
||||
|
||||
/**
|
||||
* The Lucene implementation of Searcher At the moment we support only lucene
|
||||
* based queries.
|
||||
*
|
||||
* TODO: Support for other query languages
|
||||
*
|
||||
* @author andyh
|
||||
*
|
||||
*/
|
||||
public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
|
||||
{
|
||||
|
||||
/**
|
||||
* Default field name
|
||||
*/
|
||||
private static final String DEFAULT_FIELD = "TEXT";
|
||||
|
||||
private NamespacePrefixResolver namespacePrefixResolver;
|
||||
|
||||
private NodeService nodeService;
|
||||
|
||||
private DictionaryService dictionaryService;
|
||||
|
||||
private QueryRegisterComponent queryRegister;
|
||||
|
||||
private LuceneIndexer2 indexer;
|
||||
|
||||
/*
|
||||
* Searcher implementation
|
||||
*/
|
||||
|
||||
/**
|
||||
* Get an initialised searcher for the store and transaction Normally we do
|
||||
* not search against a a store and delta. Currently only gets the searcher
|
||||
* against the main index.
|
||||
*
|
||||
* @param storeRef
|
||||
* @param deltaId
|
||||
* @return
|
||||
*/
|
||||
public static LuceneSearcherImpl2 getSearcher(StoreRef storeRef, LuceneIndexer2 indexer, LuceneConfig config)
|
||||
{
|
||||
LuceneSearcherImpl2 searcher = new LuceneSearcherImpl2();
|
||||
searcher.setLuceneConfig(config);
|
||||
try
|
||||
{
|
||||
searcher.initialise(storeRef, indexer == null ? null : indexer.getDeltaId(), false, false);
|
||||
searcher.indexer = indexer;
|
||||
}
|
||||
catch (LuceneIndexException e)
|
||||
{
|
||||
throw new SearcherException(e);
|
||||
}
|
||||
return searcher;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an intialised searcher for the store. No transactional ammendsmends
|
||||
* are searched.
|
||||
*
|
||||
*
|
||||
* @param storeRef
|
||||
* @return
|
||||
*/
|
||||
public static LuceneSearcherImpl2 getSearcher(StoreRef storeRef, LuceneConfig config)
|
||||
{
|
||||
return getSearcher(storeRef, null, config);
|
||||
}
|
||||
|
||||
public void setNamespacePrefixResolver(NamespacePrefixResolver namespacePrefixResolver)
|
||||
{
|
||||
this.namespacePrefixResolver = namespacePrefixResolver;
|
||||
}
|
||||
|
||||
public boolean indexExists()
|
||||
{
|
||||
//return mainIndexExists();
|
||||
return true;
|
||||
}
|
||||
|
||||
public void setNodeService(NodeService nodeService)
|
||||
{
|
||||
this.nodeService = nodeService;
|
||||
}
|
||||
|
||||
public void setDictionaryService(DictionaryService dictionaryService)
|
||||
{
|
||||
this.dictionaryService = dictionaryService;
|
||||
}
|
||||
|
||||
public void setQueryRegister(QueryRegisterComponent queryRegister)
|
||||
{
|
||||
this.queryRegister = queryRegister;
|
||||
}
|
||||
|
||||
public ResultSet query(StoreRef store, String language, String queryString, Path[] queryOptions,
|
||||
QueryParameterDefinition[] queryParameterDefinitions) throws SearcherException
|
||||
{
|
||||
SearchParameters sp = new SearchParameters();
|
||||
sp.addStore(store);
|
||||
sp.setLanguage(language);
|
||||
sp.setQuery(queryString);
|
||||
if (queryOptions != null)
|
||||
{
|
||||
for (Path path : queryOptions)
|
||||
{
|
||||
sp.addAttrbutePath(path);
|
||||
}
|
||||
}
|
||||
if (queryParameterDefinitions != null)
|
||||
{
|
||||
for (QueryParameterDefinition qpd : queryParameterDefinitions)
|
||||
{
|
||||
sp.addQueryParameterDefinition(qpd);
|
||||
}
|
||||
}
|
||||
sp.excludeDataInTheCurrentTransaction(true);
|
||||
|
||||
return query(sp);
|
||||
}
|
||||
|
||||
public ResultSet query(SearchParameters searchParameters)
|
||||
{
|
||||
if (searchParameters.getStores().size() != 1)
|
||||
{
|
||||
throw new IllegalStateException("Only one store can be searched at present");
|
||||
}
|
||||
|
||||
String parameterisedQueryString;
|
||||
if (searchParameters.getQueryParameterDefinitions().size() > 0)
|
||||
{
|
||||
Map<QName, QueryParameterDefinition> map = new HashMap<QName, QueryParameterDefinition>();
|
||||
|
||||
for (QueryParameterDefinition qpd : searchParameters.getQueryParameterDefinitions())
|
||||
{
|
||||
map.put(qpd.getQName(), qpd);
|
||||
}
|
||||
|
||||
parameterisedQueryString = parameterise(searchParameters.getQuery(), map, null, namespacePrefixResolver);
|
||||
}
|
||||
else
|
||||
{
|
||||
parameterisedQueryString = searchParameters.getQuery();
|
||||
}
|
||||
|
||||
if (searchParameters.getLanguage().equalsIgnoreCase(SearchService.LANGUAGE_LUCENE))
|
||||
{
|
||||
try
|
||||
{
|
||||
|
||||
int defaultOperator;
|
||||
if (searchParameters.getDefaultOperator() == SearchParameters.AND)
|
||||
{
|
||||
defaultOperator = LuceneQueryParser.DEFAULT_OPERATOR_AND;
|
||||
}
|
||||
else
|
||||
{
|
||||
defaultOperator = LuceneQueryParser.DEFAULT_OPERATOR_OR;
|
||||
}
|
||||
|
||||
Query query = LuceneQueryParser.parse(parameterisedQueryString, DEFAULT_FIELD, new LuceneAnalyser(
|
||||
dictionaryService), namespacePrefixResolver, dictionaryService, defaultOperator);
|
||||
Searcher searcher = getSearcher(indexer);
|
||||
if (searcher == null)
|
||||
{
|
||||
// no index return an empty result set
|
||||
return new EmptyResultSet();
|
||||
}
|
||||
|
||||
Hits hits;
|
||||
|
||||
if (searchParameters.getSortDefinitions().size() > 0)
|
||||
{
|
||||
int index = 0;
|
||||
SortField[] fields = new SortField[searchParameters.getSortDefinitions().size()];
|
||||
for (SearchParameters.SortDefinition sd : searchParameters.getSortDefinitions())
|
||||
{
|
||||
switch (sd.getSortType())
|
||||
{
|
||||
case FIELD:
|
||||
fields[index++] = new SortField(sd.getField(), !sd.isAscending());
|
||||
break;
|
||||
case DOCUMENT:
|
||||
fields[index++] = new SortField(null, SortField.DOC, !sd.isAscending());
|
||||
break;
|
||||
case SCORE:
|
||||
fields[index++] = new SortField(null, SortField.SCORE, !sd.isAscending());
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
hits = searcher.search(query, new Sort(fields));
|
||||
}
|
||||
else
|
||||
{
|
||||
hits = searcher.search(query);
|
||||
}
|
||||
|
||||
return new LuceneResultSet(hits, searcher, nodeService, searchParameters.getAttributePaths().toArray(
|
||||
new Path[0]), searchParameters);
|
||||
|
||||
}
|
||||
catch (ParseException e)
|
||||
{
|
||||
throw new SearcherException("Failed to parse query: " + parameterisedQueryString, e);
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
throw new SearcherException("IO exception during search", e);
|
||||
}
|
||||
}
|
||||
else if (searchParameters.getLanguage().equalsIgnoreCase(SearchService.LANGUAGE_XPATH))
|
||||
{
|
||||
try
|
||||
{
|
||||
XPathReader reader = new XPathReader();
|
||||
LuceneXPathHandler handler = new LuceneXPathHandler();
|
||||
handler.setNamespacePrefixResolver(namespacePrefixResolver);
|
||||
handler.setDictionaryService(dictionaryService);
|
||||
// TODO: Handler should have the query parameters to use in
|
||||
// building its lucene query
|
||||
// At the moment xpath style parameters in the PATH
|
||||
// expression are not supported.
|
||||
reader.setXPathHandler(handler);
|
||||
reader.parse(parameterisedQueryString);
|
||||
Query query = handler.getQuery();
|
||||
Searcher searcher = getSearcher(null);
|
||||
if (searcher == null)
|
||||
{
|
||||
// no index return an empty result set
|
||||
return new EmptyResultSet();
|
||||
}
|
||||
Hits hits = searcher.search(query);
|
||||
return new LuceneResultSet(hits, searcher, nodeService, searchParameters.getAttributePaths().toArray(
|
||||
new Path[0]), searchParameters);
|
||||
}
|
||||
catch (SAXPathException e)
|
||||
{
|
||||
throw new SearcherException("Failed to parse query: " + searchParameters.getQuery(), e);
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
throw new SearcherException("IO exception during search", e);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new SearcherException("Unknown query language: " + searchParameters.getLanguage());
|
||||
}
|
||||
}
|
||||
|
||||
public ResultSet query(StoreRef store, String language, String query)
|
||||
{
|
||||
return query(store, language, query, null, null);
|
||||
}
|
||||
|
||||
public ResultSet query(StoreRef store, String language, String query,
|
||||
QueryParameterDefinition[] queryParameterDefintions)
|
||||
{
|
||||
return query(store, language, query, null, queryParameterDefintions);
|
||||
}
|
||||
|
||||
public ResultSet query(StoreRef store, String language, String query, Path[] attributePaths)
|
||||
{
|
||||
return query(store, language, query, attributePaths, null);
|
||||
}
|
||||
|
||||
public ResultSet query(StoreRef store, QName queryId, QueryParameter[] queryParameters)
|
||||
{
|
||||
CannedQueryDef definition = queryRegister.getQueryDefinition(queryId);
|
||||
|
||||
// Do parameter replacement
|
||||
// As lucene phrases are tokensied it is correct to just do straight
|
||||
// string replacement.
|
||||
// The string will be formatted by the tokeniser.
|
||||
//
|
||||
// For non phrase queries this is incorrect but string replacement is
|
||||
// probably the best we can do.
|
||||
// As numbers and text are indexed specially, direct term queries only
|
||||
// make sense against textual data
|
||||
|
||||
checkParameters(definition, queryParameters);
|
||||
|
||||
String queryString = parameterise(definition.getQuery(), definition.getQueryParameterMap(), queryParameters,
|
||||
definition.getNamespacePrefixResolver());
|
||||
|
||||
return query(store, definition.getLanguage(), queryString, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* The definitions must provide a default value, or of not there must be a
|
||||
* parameter to provide the value
|
||||
*
|
||||
* @param definition
|
||||
* @param queryParameters
|
||||
* @throws QueryParameterisationException
|
||||
*/
|
||||
private void checkParameters(CannedQueryDef definition, QueryParameter[] queryParameters)
|
||||
throws QueryParameterisationException
|
||||
{
|
||||
List<QName> missing = new ArrayList<QName>();
|
||||
|
||||
Set<QName> parameterQNameSet = new HashSet<QName>();
|
||||
if (queryParameters != null)
|
||||
{
|
||||
for (QueryParameter parameter : queryParameters)
|
||||
{
|
||||
parameterQNameSet.add(parameter.getQName());
|
||||
}
|
||||
}
|
||||
|
||||
for (QueryParameterDefinition parameterDefinition : definition.getQueryParameterDefs())
|
||||
{
|
||||
if (!parameterDefinition.hasDefaultValue())
|
||||
{
|
||||
if (!parameterQNameSet.contains(parameterDefinition.getQName()))
|
||||
{
|
||||
missing.add(parameterDefinition.getQName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (missing.size() > 0)
|
||||
{
|
||||
StringBuilder buffer = new StringBuilder(128);
|
||||
buffer.append("The query is missing values for the following parameters: ");
|
||||
for (QName qName : missing)
|
||||
{
|
||||
buffer.append(qName);
|
||||
buffer.append(", ");
|
||||
}
|
||||
buffer.delete(buffer.length() - 1, buffer.length() - 1);
|
||||
buffer.delete(buffer.length() - 1, buffer.length() - 1);
|
||||
throw new QueryParameterisationException(buffer.toString());
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Parameterise the query string - not sure if it is required to escape
|
||||
* lucence spacials chars The parameters could be used to build the query -
|
||||
* the contents of parameters should alread have been escaped if required.
|
||||
* ... mush better to provide the parameters and work out what to do TODO:
|
||||
* conditional query escapement - may be we should have a parameter type
|
||||
* that is not escaped
|
||||
*/
|
||||
private String parameterise(String unparameterised, Map<QName, QueryParameterDefinition> map,
|
||||
QueryParameter[] queryParameters, NamespacePrefixResolver nspr) throws QueryParameterisationException
|
||||
{
|
||||
|
||||
Map<QName, List<Serializable>> valueMap = new HashMap<QName, List<Serializable>>();
|
||||
|
||||
if (queryParameters != null)
|
||||
{
|
||||
for (QueryParameter parameter : queryParameters)
|
||||
{
|
||||
List<Serializable> list = valueMap.get(parameter.getQName());
|
||||
if (list == null)
|
||||
{
|
||||
list = new ArrayList<Serializable>();
|
||||
valueMap.put(parameter.getQName(), list);
|
||||
}
|
||||
list.add(parameter.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
Map<QName, ListIterator<Serializable>> iteratorMap = new HashMap<QName, ListIterator<Serializable>>();
|
||||
|
||||
List<QName> missing = new ArrayList<QName>(1);
|
||||
StringBuilder buffer = new StringBuilder(unparameterised);
|
||||
int index = 0;
|
||||
while ((index = buffer.indexOf("${", index)) != -1)
|
||||
{
|
||||
int endIndex = buffer.indexOf("}", index);
|
||||
String qNameString = buffer.substring(index + 2, endIndex);
|
||||
QName key = QName.createQName(qNameString, nspr);
|
||||
QueryParameterDefinition parameterDefinition = map.get(key);
|
||||
if (parameterDefinition == null)
|
||||
{
|
||||
missing.add(key);
|
||||
buffer.replace(index, endIndex + 1, "");
|
||||
}
|
||||
else
|
||||
{
|
||||
ListIterator<Serializable> it = iteratorMap.get(key);
|
||||
if ((it == null) || (!it.hasNext()))
|
||||
{
|
||||
List<Serializable> list = valueMap.get(key);
|
||||
if ((list != null) && (list.size() > 0))
|
||||
{
|
||||
it = list.listIterator();
|
||||
}
|
||||
if (it != null)
|
||||
{
|
||||
iteratorMap.put(key, it);
|
||||
}
|
||||
}
|
||||
String value;
|
||||
if (it == null)
|
||||
{
|
||||
value = parameterDefinition.getDefault();
|
||||
}
|
||||
else
|
||||
{
|
||||
value = DefaultTypeConverter.INSTANCE.convert(String.class, it.next());
|
||||
}
|
||||
buffer.replace(index, endIndex + 1, value);
|
||||
}
|
||||
}
|
||||
if (missing.size() > 0)
|
||||
{
|
||||
StringBuilder error = new StringBuilder();
|
||||
error.append("The query uses the following parameters which are not defined: ");
|
||||
for (QName qName : missing)
|
||||
{
|
||||
error.append(qName);
|
||||
error.append(", ");
|
||||
}
|
||||
error.delete(error.length() - 1, error.length() - 1);
|
||||
error.delete(error.length() - 1, error.length() - 1);
|
||||
throw new QueryParameterisationException(error.toString());
|
||||
}
|
||||
return buffer.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.alfresco.repo.search.impl.NodeSearcher
|
||||
*/
|
||||
public List<NodeRef> selectNodes(NodeRef contextNodeRef, String xpath, QueryParameterDefinition[] parameters,
|
||||
NamespacePrefixResolver namespacePrefixResolver, boolean followAllParentLinks, String language)
|
||||
throws InvalidNodeRefException, XPathException
|
||||
{
|
||||
NodeSearcher nodeSearcher = new NodeSearcher(nodeService, dictionaryService, this);
|
||||
return nodeSearcher.selectNodes(contextNodeRef, xpath, parameters, namespacePrefixResolver,
|
||||
followAllParentLinks, language);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.alfresco.repo.search.impl.NodeSearcher
|
||||
*/
|
||||
public List<Serializable> selectProperties(NodeRef contextNodeRef, String xpath,
|
||||
QueryParameterDefinition[] parameters, NamespacePrefixResolver namespacePrefixResolver,
|
||||
boolean followAllParentLinks, String language) throws InvalidNodeRefException, XPathException
|
||||
{
|
||||
NodeSearcher nodeSearcher = new NodeSearcher(nodeService, dictionaryService, this);
|
||||
return nodeSearcher.selectProperties(contextNodeRef, xpath, parameters, namespacePrefixResolver,
|
||||
followAllParentLinks, language);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Returns true if the pattern is present, otherwise false.
|
||||
*/
|
||||
public boolean contains(NodeRef nodeRef, QName propertyQName, String googleLikePattern)
|
||||
{
|
||||
return contains(nodeRef, propertyQName, googleLikePattern, SearchParameters.Operator.OR);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Returns true if the pattern is present, otherwise false.
|
||||
*/
|
||||
public boolean contains(NodeRef nodeRef, QName propertyQName, String googleLikePattern,
|
||||
SearchParameters.Operator defaultOperator)
|
||||
{
|
||||
ResultSet resultSet = null;
|
||||
try
|
||||
{
|
||||
// build Lucene search string specific to the node
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("+ID:\"").append(nodeRef.toString()).append("\" +(TEXT:(")
|
||||
.append(googleLikePattern.toLowerCase()).append(") ");
|
||||
if (propertyQName != null)
|
||||
{
|
||||
sb.append(" OR @").append(
|
||||
LuceneQueryParser.escape(QName.createQName(propertyQName.getNamespaceURI(),
|
||||
ISO9075.encode(propertyQName.getLocalName())).toString()));
|
||||
sb.append(":(").append(googleLikePattern.toLowerCase()).append(")");
|
||||
}
|
||||
else
|
||||
{
|
||||
for (QName key : nodeService.getProperties(nodeRef).keySet())
|
||||
{
|
||||
sb.append(" OR @").append(
|
||||
LuceneQueryParser.escape(QName.createQName(key.getNamespaceURI(),
|
||||
ISO9075.encode(key.getLocalName())).toString()));
|
||||
sb.append(":(").append(googleLikePattern.toLowerCase()).append(")");
|
||||
}
|
||||
}
|
||||
sb.append(")");
|
||||
|
||||
SearchParameters sp = new SearchParameters();
|
||||
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp.setQuery(sb.toString());
|
||||
sp.setDefaultOperator(defaultOperator);
|
||||
sp.addStore(nodeRef.getStoreRef());
|
||||
|
||||
resultSet = this.query(sp);
|
||||
boolean answer = resultSet.length() > 0;
|
||||
return answer;
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (resultSet != null)
|
||||
{
|
||||
resultSet.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Returns true if the pattern is present, otherwise false.
|
||||
*
|
||||
* @see #setIndexer(Indexer)
|
||||
* @see #setSearcher(SearchService)
|
||||
*/
|
||||
public boolean like(NodeRef nodeRef, QName propertyQName, String sqlLikePattern, boolean includeFTS)
|
||||
{
|
||||
if (propertyQName == null)
|
||||
{
|
||||
throw new IllegalArgumentException("Property QName is mandatory for the like expression");
|
||||
}
|
||||
|
||||
StringBuilder sb = new StringBuilder(sqlLikePattern.length() * 3);
|
||||
|
||||
if (includeFTS)
|
||||
{
|
||||
// convert the SQL-like pattern into a Lucene-compatible string
|
||||
String pattern = SearchLanguageConversion.convertXPathLikeToLucene(sqlLikePattern.toLowerCase());
|
||||
|
||||
// build Lucene search string specific to the node
|
||||
sb = new StringBuilder();
|
||||
sb.append("+ID:\"").append(nodeRef.toString()).append("\" +(");
|
||||
// FTS or attribute matches
|
||||
if (includeFTS)
|
||||
{
|
||||
sb.append("TEXT:(").append(pattern).append(") ");
|
||||
}
|
||||
if (propertyQName != null)
|
||||
{
|
||||
sb.append(" @").append(
|
||||
LuceneQueryParser.escape(QName.createQName(propertyQName.getNamespaceURI(),
|
||||
ISO9075.encode(propertyQName.getLocalName())).toString())).append(":(").append(pattern)
|
||||
.append(")");
|
||||
}
|
||||
sb.append(")");
|
||||
|
||||
ResultSet resultSet = null;
|
||||
try
|
||||
{
|
||||
resultSet = this.query(nodeRef.getStoreRef(), "lucene", sb.toString());
|
||||
boolean answer = resultSet.length() > 0;
|
||||
return answer;
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (resultSet != null)
|
||||
{
|
||||
resultSet.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// convert the SQL-like pattern into a Lucene-compatible string
|
||||
String pattern = SearchLanguageConversion.convertXPathLikeToRegex(sqlLikePattern.toLowerCase());
|
||||
|
||||
Serializable property = nodeService.getProperty(nodeRef, propertyQName);
|
||||
if (property == null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
String propertyString = DefaultTypeConverter.INSTANCE.convert(String.class, nodeService.getProperty(
|
||||
nodeRef, propertyQName));
|
||||
return propertyString.toLowerCase().matches(pattern);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public List<NodeRef> selectNodes(NodeRef contextNodeRef, String xpath, QueryParameterDefinition[] parameters,
|
||||
NamespacePrefixResolver namespacePrefixResolver, boolean followAllParentLinks)
|
||||
throws InvalidNodeRefException, XPathException
|
||||
{
|
||||
return selectNodes(contextNodeRef, xpath, parameters, namespacePrefixResolver, followAllParentLinks,
|
||||
SearchService.LANGUAGE_XPATH);
|
||||
}
|
||||
|
||||
public List<Serializable> selectProperties(NodeRef contextNodeRef, String xpath,
|
||||
QueryParameterDefinition[] parameters, NamespacePrefixResolver namespacePrefixResolver,
|
||||
boolean followAllParentLinks) throws InvalidNodeRefException, XPathException
|
||||
{
|
||||
return selectProperties(contextNodeRef, xpath, parameters, namespacePrefixResolver, followAllParentLinks,
|
||||
SearchService.LANGUAGE_XPATH);
|
||||
}
|
||||
}
|
@@ -46,6 +46,7 @@ import org.alfresco.repo.search.results.ChildAssocRefResultSet;
|
||||
import org.alfresco.repo.search.results.DetachedResultSet;
|
||||
import org.alfresco.repo.search.transaction.LuceneIndexLock;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationComponent;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.service.ServiceRegistry;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
@@ -178,7 +179,7 @@ public class LuceneTest extends TestCase
|
||||
serviceRegistry = (ServiceRegistry) ctx.getBean(ServiceRegistry.SERVICE_REGISTRY);
|
||||
|
||||
this.authenticationComponent = (AuthenticationComponent) ctx.getBean("authenticationComponent");
|
||||
this.authenticationComponent.setSystemUserAsCurrentUser();
|
||||
|
||||
|
||||
queryRegisterComponent.loadQueryCollection("testQueryRegister.xml");
|
||||
|
||||
@@ -187,7 +188,8 @@ public class LuceneTest extends TestCase
|
||||
|
||||
testTX = transactionService.getUserTransaction();
|
||||
testTX.begin();
|
||||
|
||||
this.authenticationComponent.setSystemUserAsCurrentUser();
|
||||
|
||||
// load in the test model
|
||||
ClassLoader cl = BaseNodeServiceTest.class.getClassLoader();
|
||||
InputStream modelStream = cl.getResourceAsStream("org/alfresco/repo/search/impl/lucene/LuceneTest_model.xml");
|
||||
@@ -323,7 +325,7 @@ public class LuceneTest extends TestCase
|
||||
{
|
||||
testTX.rollback();
|
||||
}
|
||||
authenticationComponent.clearCurrentSecurityContext();
|
||||
AuthenticationUtil.clearCurrentSecurityContext();
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
@@ -391,7 +393,7 @@ public class LuceneTest extends TestCase
|
||||
|
||||
public void testMTDeleteIssue() throws Exception
|
||||
{
|
||||
|
||||
luceneFTS.pause();
|
||||
testTX.commit();
|
||||
|
||||
UserTransaction tx = transactionService.getUserTransaction();
|
||||
@@ -505,6 +507,7 @@ public class LuceneTest extends TestCase
|
||||
|
||||
public void testDeltaIssue() throws Exception
|
||||
{
|
||||
luceneFTS.pause();
|
||||
final NodeService pns = (NodeService) ctx.getBean("NodeService");
|
||||
|
||||
testTX.commit();
|
||||
@@ -2075,6 +2078,38 @@ public class LuceneTest extends TestCase
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
}
|
||||
|
||||
public void testNumericInPath() throws Exception
|
||||
{
|
||||
String COMPLEX_LOCAL_NAME = "Woof12";
|
||||
|
||||
luceneFTS.pause();
|
||||
buildBaseIndex();
|
||||
runBaseTests();
|
||||
|
||||
LuceneIndexerImpl indexer = LuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
|
||||
+ System.currentTimeMillis(), indexerAndSearcher);
|
||||
indexer.setNodeService(nodeService);
|
||||
indexer.setLuceneIndexLock(luceneIndexLock);
|
||||
indexer.setDictionaryService(dictionaryService);
|
||||
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
|
||||
indexer.setContentService(contentService);
|
||||
|
||||
ChildAssociationRef car = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName
|
||||
.createQName("{namespace}" + COMPLEX_LOCAL_NAME), testSuperType);
|
||||
indexer.createNode(car);
|
||||
|
||||
indexer.commit();
|
||||
|
||||
LuceneSearcherImpl searcher = LuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
|
||||
searcher.setNodeService(nodeService);
|
||||
searcher.setDictionaryService(dictionaryService);
|
||||
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver("namespace"));
|
||||
ResultSet results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/namespace:"
|
||||
+ ISO9075.encode(COMPLEX_LOCAL_NAME) + "\"", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
}
|
||||
|
||||
public void testDeleteContainer() throws Exception
|
||||
{
|
||||
|
3197
source/java/org/alfresco/repo/search/impl/lucene/LuceneTest2.java
Normal file
3197
source/java/org/alfresco/repo/search/impl/lucene/LuceneTest2.java
Normal file
File diff suppressed because it is too large
Load Diff
@@ -49,11 +49,18 @@ public class DoubleTokenFilter extends Tokenizer
|
||||
Token candidate;
|
||||
while((candidate = baseTokeniser.next()) != null)
|
||||
{
|
||||
Double d = Double.valueOf(candidate.termText());
|
||||
String valueString = NumericEncoder.encode(d.doubleValue());
|
||||
Token doubleToken = new Token(valueString, candidate.startOffset(), candidate.startOffset(),
|
||||
candidate.type());
|
||||
return doubleToken;
|
||||
try
|
||||
{
|
||||
Double d = Double.valueOf(candidate.termText());
|
||||
String valueString = NumericEncoder.encode(d.doubleValue());
|
||||
Token doubleToken = new Token(valueString, candidate.startOffset(), candidate.startOffset(),
|
||||
candidate.type());
|
||||
return doubleToken;
|
||||
}
|
||||
catch (NumberFormatException e)
|
||||
{
|
||||
// just ignore and try the next one
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@@ -49,11 +49,18 @@ public class FloatTokenFilter extends Tokenizer
|
||||
Token candidate;
|
||||
while((candidate = baseTokeniser.next()) != null)
|
||||
{
|
||||
Float floatValue = Float.valueOf(candidate.termText());
|
||||
String valueString = NumericEncoder.encode(floatValue.floatValue());
|
||||
Token floatToken = new Token(valueString, candidate.startOffset(), candidate.startOffset(),
|
||||
candidate.type());
|
||||
return floatToken;
|
||||
try
|
||||
{
|
||||
Float floatValue = Float.valueOf(candidate.termText());
|
||||
String valueString = NumericEncoder.encode(floatValue.floatValue());
|
||||
Token floatToken = new Token(valueString, candidate.startOffset(), candidate.startOffset(),
|
||||
candidate.type());
|
||||
return floatToken;
|
||||
}
|
||||
catch (NumberFormatException e)
|
||||
{
|
||||
// just ignore and try the next one
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@@ -49,11 +49,18 @@ public class IntegerTokenFilter extends Tokenizer
|
||||
Token candidate;
|
||||
while((candidate = baseTokeniser.next()) != null)
|
||||
{
|
||||
Integer integer = Integer.valueOf(candidate.termText());
|
||||
String valueString = NumericEncoder.encode(integer.intValue());
|
||||
Token integerToken = new Token(valueString, candidate.startOffset(), candidate.startOffset(),
|
||||
candidate.type());
|
||||
return integerToken;
|
||||
try
|
||||
{
|
||||
Integer integer = Integer.valueOf(candidate.termText());
|
||||
String valueString = NumericEncoder.encode(integer.intValue());
|
||||
Token integerToken = new Token(valueString, candidate.startOffset(), candidate.startOffset(),
|
||||
candidate.type());
|
||||
return integerToken;
|
||||
}
|
||||
catch (NumberFormatException e)
|
||||
{
|
||||
// just ignore and try the next one
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@@ -19,6 +19,7 @@ package org.alfresco.repo.search.impl.lucene.analysis;
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.apache.lucene.analysis.Token;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.apache.lucene.analysis.standard.StandardTokenizer;
|
||||
@@ -49,11 +50,18 @@ public class LongTokenFilter extends Tokenizer
|
||||
Token candidate;
|
||||
while((candidate = baseTokeniser.next()) != null)
|
||||
{
|
||||
Long longValue = Long.valueOf(candidate.termText());
|
||||
String valueString = NumericEncoder.encode(longValue.longValue());
|
||||
Token longToken = new Token(valueString, candidate.startOffset(), candidate.startOffset(),
|
||||
candidate.type());
|
||||
return longToken;
|
||||
try
|
||||
{
|
||||
Long longValue = Long.valueOf(candidate.termText());
|
||||
String valueString = NumericEncoder.encode(longValue.longValue());
|
||||
Token longToken = new Token(valueString, candidate.startOffset(), candidate.startOffset(),
|
||||
candidate.type());
|
||||
return longToken;
|
||||
}
|
||||
catch (NumberFormatException e)
|
||||
{
|
||||
// just ignore and try the next one
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@@ -20,7 +20,10 @@ import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.repo.search.Indexer;
|
||||
import org.alfresco.repo.search.IndexerSPI;
|
||||
import org.alfresco.repo.search.impl.lucene.LuceneIndexer;
|
||||
import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcher;
|
||||
import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
@@ -36,7 +39,7 @@ public class FullTextSearchIndexerImpl implements FTSIndexerAware, FullTextSearc
|
||||
|
||||
private static Set<StoreRef> indexing = new HashSet<StoreRef>();
|
||||
|
||||
LuceneIndexerAndSearcherFactory luceneIndexerAndSearcherFactory;
|
||||
LuceneIndexerAndSearcher luceneIndexerAndSearcherFactory;
|
||||
|
||||
private int pauseCount = 0;
|
||||
|
||||
@@ -161,7 +164,7 @@ public class FullTextSearchIndexerImpl implements FTSIndexerAware, FullTextSearc
|
||||
if (toIndex != null)
|
||||
{
|
||||
//System.out.println("Indexing "+toIndex+" at "+(new java.util.Date()));
|
||||
LuceneIndexer indexer = luceneIndexerAndSearcherFactory.getIndexer(toIndex);
|
||||
IndexerSPI indexer = luceneIndexerAndSearcherFactory.getIndexer(toIndex);
|
||||
indexer.registerCallBack(this);
|
||||
indexer.updateFullTextSearch(1000);
|
||||
}
|
||||
@@ -198,7 +201,7 @@ public class FullTextSearchIndexerImpl implements FTSIndexerAware, FullTextSearc
|
||||
return nextStoreRef;
|
||||
}
|
||||
|
||||
public void setLuceneIndexerAndSearcherFactory(LuceneIndexerAndSearcherFactory luceneIndexerAndSearcherFactory)
|
||||
public void setLuceneIndexerAndSearcherFactory(LuceneIndexerAndSearcher luceneIndexerAndSearcherFactory)
|
||||
{
|
||||
this.luceneIndexerAndSearcherFactory = luceneIndexerAndSearcherFactory;
|
||||
}
|
||||
|
@@ -0,0 +1,161 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Alfresco, Inc.
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.search.impl.lucene.index;
|
||||
|
||||
/**
|
||||
* Describes an entry in an index
|
||||
*
|
||||
* @author Andy Hind
|
||||
*/
|
||||
class IndexEntry
|
||||
{
|
||||
/**
|
||||
* The type of the index entry
|
||||
*/
|
||||
private IndexType type;
|
||||
|
||||
/**
|
||||
* The unique name of the index entry
|
||||
*/
|
||||
private String name;
|
||||
|
||||
/**
|
||||
* The preceeding index name.
|
||||
* Allows deltas etc to apply to the index or an overlay for example.
|
||||
*/
|
||||
private String parentName;
|
||||
|
||||
/**
|
||||
* The status of the index entry
|
||||
*/
|
||||
private TransactionStatus status;
|
||||
|
||||
/**
|
||||
* If merging, the id where the result is going
|
||||
*/
|
||||
private String mergeId;
|
||||
|
||||
private long documentCount;
|
||||
|
||||
private long deletions;
|
||||
|
||||
private boolean deletOnlyNodes;
|
||||
|
||||
IndexEntry(IndexType type, String name, String parentName, TransactionStatus status, String mergeId, long documentCount, long deletions, boolean deletOnlyNodes)
|
||||
{
|
||||
this.type = type;
|
||||
this.name = name;
|
||||
this.parentName = parentName;
|
||||
this.status = status;
|
||||
this.mergeId = mergeId;
|
||||
this.documentCount = documentCount;
|
||||
this.deletions = deletions;
|
||||
this.deletOnlyNodes = deletOnlyNodes;
|
||||
}
|
||||
|
||||
public String getMergeId()
|
||||
{
|
||||
return mergeId;
|
||||
}
|
||||
|
||||
public void setMergeId(String mergeId)
|
||||
{
|
||||
this.mergeId = mergeId;
|
||||
}
|
||||
|
||||
public String getName()
|
||||
{
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name)
|
||||
{
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getParentName()
|
||||
{
|
||||
return parentName;
|
||||
}
|
||||
|
||||
public void setParentName(String parentName)
|
||||
{
|
||||
this.parentName = parentName;
|
||||
}
|
||||
|
||||
public TransactionStatus getStatus()
|
||||
{
|
||||
return status;
|
||||
}
|
||||
|
||||
public void setStatus(TransactionStatus status)
|
||||
{
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public IndexType getType()
|
||||
{
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(IndexType type)
|
||||
{
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public long getDocumentCount()
|
||||
{
|
||||
return documentCount;
|
||||
}
|
||||
|
||||
public void setDocumentCount(long documentCount)
|
||||
{
|
||||
this.documentCount = documentCount;
|
||||
}
|
||||
|
||||
public long getDeletions()
|
||||
{
|
||||
return deletions;
|
||||
}
|
||||
|
||||
public void setDeletions(long deletions)
|
||||
{
|
||||
this.deletions = deletions;
|
||||
}
|
||||
|
||||
public boolean isDeletOnlyNodes()
|
||||
{
|
||||
return deletOnlyNodes;
|
||||
}
|
||||
|
||||
public void setDeletOnlyNodes(boolean deletOnlyNodes)
|
||||
{
|
||||
this.deletOnlyNodes = deletOnlyNodes;
|
||||
}
|
||||
|
||||
public String toString()
|
||||
{
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append(" Name=").append(getName()).append(" ");
|
||||
builder.append("Type=").append(getType()).append(" ");
|
||||
builder.append("Status=").append(getStatus()).append(" ");
|
||||
builder.append("Docs=").append(getDocumentCount()).append(" ");
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,871 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Alfresco, Inc.
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.search.impl.lucene.index;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.util.GUID;
|
||||
import org.alfresco.util.TempFileProvider;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermDocs;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
public class IndexInfoTest extends TestCase
|
||||
{
|
||||
|
||||
public static final String[] WORD_LIST = { "aardvark", "banana", "cucumber", "daffodil", "emu", "frog", "gibbon",
|
||||
"humour", "injection", "jelly", "key", "lemur", "monkey", "number", "open", "plummet", "quest",
|
||||
"replication", "steam", "tunnel", "uncommon", "verbose", "where", "xylem", "yellow", "zebra", "alpha",
|
||||
"bravo", "charlie", "delta", "echo", "foxtrot", "golf", "hotel", "indigo", "juliet", "kilo", "lima",
|
||||
"mike", "november", "oscar", "papa", "quebec", "romeo", "sierra", "tango", "uniform", "victor", "whisky",
|
||||
"xray", "yankee", "zulu" };
|
||||
|
||||
public static final String[] CREATE_LIST = { "aardvark", "banana", "cucumber", "daffodil", "emu", "frog", "gibbon",
|
||||
"humour", "injection", "jelly", "key", "lemur", "monkey", "number", "open", "plummet", "quest",
|
||||
"replication", "steam", "tunnel", "uncommon", "verbose", "where", "xylem", "yellow", "zebra", };
|
||||
|
||||
public static final String[] UPDATE_LIST = { "alpha", "bravo", "charlie", "delta", "echo", "foxtrot", "golf",
|
||||
"hotel", "indigo", "juliet", "kilo", "lima", "mike", "november", "oscar", "papa", "quebec", "romeo",
|
||||
"sierra", "tango", "uniform", "victor", "whisky", "xray", "yankee", "zulu" };
|
||||
|
||||
public static final String[] CREATE_LIST_2 = { "aardvark2", "banana2", "cucumber2", "daffodil2", "emu2", "frog2", "gibbon2",
|
||||
"humour2", "injection2", "jelly2", "key2", "lemur2", "monkey2", "number2", "open2", "plummet2", "quest2",
|
||||
"replication2", "steam2", "tunnel2", "uncommon2", "verbose2", "where2", "xylem2", "yellow2", "zebra2", };
|
||||
|
||||
public static final String[] UPDATE_LIST_2 = { "alpha2", "bravo2", "charlie2", "delta2", "echo2", "foxtrot2", "golf2",
|
||||
"hotel2", "indigo2", "juliet2", "kilo2", "lima2", "mike2", "november2", "oscar2", "papa2", "quebec2", "romeo2",
|
||||
"sierra2", "tango2", "uniform2", "victor2", "whisky2", "xray2", "yankee2", "zulu2" };
|
||||
|
||||
public IndexInfoTest()
|
||||
{
|
||||
super();
|
||||
}
|
||||
|
||||
public IndexInfoTest(String arg0)
|
||||
{
|
||||
super(arg0);
|
||||
}
|
||||
|
||||
public void testCreateAndSearch() throws IOException
|
||||
{
|
||||
System.setProperty("disableLuceneLocks", "true");
|
||||
|
||||
// no deletions - create only
|
||||
HashSet<NodeRef> deletions = new HashSet<NodeRef>();
|
||||
for (int i = 0; i < 0; i++)
|
||||
{
|
||||
deletions.add(new NodeRef(new StoreRef("woof", "bingle"), GUID.generate()));
|
||||
}
|
||||
|
||||
File tempLocation = TempFileProvider.getTempDir();
|
||||
File testArea = new File(tempLocation, "IndexInfoTest");
|
||||
File testDir = new File(testArea, "" + System.currentTimeMillis());
|
||||
final IndexInfo ii = IndexInfo.getIndexInfo(testDir);
|
||||
|
||||
for (int i = 0; i < WORD_LIST.length; i++)
|
||||
{
|
||||
IndexReader reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
assertEquals(reader.numDocs(), i);
|
||||
reader.close();
|
||||
|
||||
String guid = GUID.generate();
|
||||
ii.setStatus(guid, TransactionStatus.ACTIVE, null, null);
|
||||
IndexWriter writer = ii.getDeltaIndexWriter(guid, new StandardAnalyzer());
|
||||
|
||||
Document doc = new Document();
|
||||
for (int k = 0; k < 15; k++)
|
||||
{
|
||||
doc.add(new Field("ID" + k, guid, false, true, false));
|
||||
}
|
||||
doc.add(new Field("TEXT", WORD_LIST[i], false, true, false));
|
||||
writer.addDocument(doc);
|
||||
|
||||
ii.closeDeltaIndexWriter(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARING, null, null);
|
||||
ii.setPreparedState(guid, deletions, 1, false);
|
||||
ii.getDeletions(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARED, null, null);
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
assertEquals(reader.numDocs(), i);
|
||||
for (int j = 0; j < WORD_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", WORD_LIST[j]));
|
||||
if (j < i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertEquals(tds.doc(), j);
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(guid, deletions, false);
|
||||
assertEquals(reader.numDocs(), i + 1);
|
||||
for (int j = 0; j < WORD_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", WORD_LIST[j]));
|
||||
if (j <= i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertEquals(tds.doc(), j);
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTING, null, null);
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTED, null, null);
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
assertEquals(reader.numDocs(), i + 1);
|
||||
for (int j = 0; j < WORD_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", WORD_LIST[j]));
|
||||
if (j <= i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertEquals(tds.doc(), j);
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void testCreateDeleteAndSearch() throws IOException
|
||||
{
|
||||
assertEquals(CREATE_LIST.length, UPDATE_LIST.length);
|
||||
|
||||
StoreRef storeRef = new StoreRef("woof", "bingle");
|
||||
|
||||
System.setProperty("disableLuceneLocks", "true");
|
||||
|
||||
// no deletions - create only
|
||||
ArrayList<NodeRef> nodeRefs = new ArrayList<NodeRef>();
|
||||
|
||||
File tempLocation = TempFileProvider.getTempDir();
|
||||
File testArea = new File(tempLocation, "IndexInfoTest");
|
||||
File testDir = new File(testArea, "" + System.currentTimeMillis());
|
||||
final IndexInfo ii = IndexInfo.getIndexInfo(testDir);
|
||||
|
||||
for (int i = 0; i < CREATE_LIST.length; i++)
|
||||
{
|
||||
IndexReader reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
assertEquals(reader.numDocs(), i);
|
||||
reader.close();
|
||||
|
||||
String guid = GUID.generate();
|
||||
ii.setStatus(guid, TransactionStatus.ACTIVE, null, null);
|
||||
IndexWriter writer = ii.getDeltaIndexWriter(guid, new StandardAnalyzer());
|
||||
|
||||
Document doc = new Document();
|
||||
for (int k = 0; k < 15; k++)
|
||||
{
|
||||
doc.add(new Field("ID" + k, guid, false, true, false));
|
||||
}
|
||||
doc.add(new Field("TEXT", CREATE_LIST[i], false, true, false));
|
||||
NodeRef nodeRef = new NodeRef(storeRef, GUID.generate());
|
||||
nodeRefs.add(nodeRef);
|
||||
doc.add(new Field("ID", nodeRef.toString(), false, true, false));
|
||||
writer.addDocument(doc);
|
||||
|
||||
ii.closeDeltaIndexWriter(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARING, null, null);
|
||||
ii.setPreparedState(guid, new HashSet<NodeRef>(), 1, false);
|
||||
ii.getDeletions(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARED, null, null);
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
assertEquals(reader.numDocs(), i);
|
||||
for (int j = 0; j < CREATE_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j]));
|
||||
if (j < i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertEquals(tds.doc(), j);
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(guid, new HashSet<NodeRef>(), false);
|
||||
assertEquals(reader.numDocs(), i + 1);
|
||||
for (int j = 0; j < CREATE_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j]));
|
||||
if (j <= i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertEquals(tds.doc(), j);
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTING, null, null);
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTED, null, null);
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
assertEquals(reader.numDocs(), i + 1);
|
||||
for (int j = 0; j < CREATE_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j]));
|
||||
if (j <= i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertEquals(tds.doc(), j);
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
}
|
||||
|
||||
for (int i = 0; i < CREATE_LIST.length; i++)
|
||||
{
|
||||
HashSet<NodeRef> deletions = new HashSet<NodeRef>();
|
||||
deletions.add(nodeRefs.get(i));
|
||||
|
||||
IndexReader reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
assertEquals(reader.numDocs(), CREATE_LIST.length - i);
|
||||
reader.close();
|
||||
|
||||
String guid = GUID.generate();
|
||||
ii.setStatus(guid, TransactionStatus.ACTIVE, null, null);
|
||||
ii.closeDeltaIndexWriter(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARING, null, null);
|
||||
ii.setPreparedState(guid, deletions, 1, false);
|
||||
ii.getDeletions(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARED, null, null);
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
assertEquals(reader.numDocs(), CREATE_LIST.length - i);
|
||||
int lastDoc = -1;
|
||||
for (int j = 0; j < CREATE_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j]));
|
||||
if (j >= i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(guid, deletions, false);
|
||||
assertEquals(reader.numDocs(), UPDATE_LIST.length - i - 1);
|
||||
lastDoc = -1;
|
||||
for (int j = 0; j < CREATE_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j]));
|
||||
if (j > i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
|
||||
reader.close();
|
||||
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTING, null, null);
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTED, null, null);
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
assertEquals(reader.numDocs(), UPDATE_LIST.length - i - 1);
|
||||
lastDoc = -1;
|
||||
for (int j = 0; j < CREATE_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j]));
|
||||
if (j > i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
|
||||
reader.close();
|
||||
|
||||
IndexReader reader1 = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
IndexReader reader2 = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
IndexReader reader3 = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
reader3.close();
|
||||
reader2.close();
|
||||
reader1.close();
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void testCreateUpdateAndSearch() throws IOException
|
||||
{
|
||||
assertEquals(CREATE_LIST.length, UPDATE_LIST.length);
|
||||
|
||||
StoreRef storeRef = new StoreRef("woof", "bingle");
|
||||
|
||||
System.setProperty("disableLuceneLocks", "true");
|
||||
|
||||
// no deletions - create only
|
||||
ArrayList<NodeRef> nodeRefs = new ArrayList<NodeRef>();
|
||||
|
||||
File tempLocation = TempFileProvider.getTempDir();
|
||||
File testArea = new File(tempLocation, "IndexInfoTest");
|
||||
File testDir = new File(testArea, "" + System.currentTimeMillis());
|
||||
final IndexInfo ii = IndexInfo.getIndexInfo(testDir);
|
||||
|
||||
for (int i = 0; i < CREATE_LIST.length; i++)
|
||||
{
|
||||
IndexReader reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
assertEquals(reader.numDocs(), i);
|
||||
reader.close();
|
||||
|
||||
String guid = GUID.generate();
|
||||
ii.setStatus(guid, TransactionStatus.ACTIVE, null, null);
|
||||
IndexWriter writer = ii.getDeltaIndexWriter(guid, new StandardAnalyzer());
|
||||
|
||||
Document doc = new Document();
|
||||
for (int k = 0; k < 15; k++)
|
||||
{
|
||||
doc.add(new Field("ID" + k, guid, false, true, false));
|
||||
}
|
||||
doc.add(new Field("TEXT", CREATE_LIST[i], false, true, false));
|
||||
NodeRef nodeRef = new NodeRef(storeRef, GUID.generate());
|
||||
nodeRefs.add(nodeRef);
|
||||
doc.add(new Field("ID", nodeRef.toString(), false, true, false));
|
||||
writer.addDocument(doc);
|
||||
|
||||
ii.closeDeltaIndexWriter(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARING, null, null);
|
||||
ii.setPreparedState(guid, new HashSet<NodeRef>(), 1, false);
|
||||
ii.getDeletions(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARED, null, null);
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
assertEquals(reader.numDocs(), i);
|
||||
for (int j = 0; j < CREATE_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j]));
|
||||
if (j < i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertEquals(tds.doc(), j);
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(guid, new HashSet<NodeRef>(), false);
|
||||
assertEquals(reader.numDocs(), i + 1);
|
||||
for (int j = 0; j < CREATE_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j]));
|
||||
if (j <= i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertEquals(tds.doc(), j);
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTING, null, null);
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTED, null, null);
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
assertEquals(reader.numDocs(), i + 1);
|
||||
for (int j = 0; j < CREATE_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j]));
|
||||
if (j <= i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertEquals(tds.doc(), j);
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
}
|
||||
|
||||
for (int i = 0; i < UPDATE_LIST.length; i++)
|
||||
{
|
||||
HashSet<NodeRef> deletions = new HashSet<NodeRef>();
|
||||
deletions.add(nodeRefs.get(i));
|
||||
|
||||
IndexReader reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
assertEquals(reader.numDocs(), UPDATE_LIST.length);
|
||||
reader.close();
|
||||
|
||||
String guid = GUID.generate();
|
||||
ii.setStatus(guid, TransactionStatus.ACTIVE, null, null);
|
||||
IndexWriter writer = ii.getDeltaIndexWriter(guid, new StandardAnalyzer());
|
||||
|
||||
Document doc = new Document();
|
||||
for (int k = 0; k < 15; k++)
|
||||
{
|
||||
doc.add(new Field("ID" + k, guid, false, true, false));
|
||||
}
|
||||
doc.add(new Field("TEXT", UPDATE_LIST[i], false, true, false));
|
||||
writer.addDocument(doc);
|
||||
|
||||
ii.closeDeltaIndexWriter(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARING, null, null);
|
||||
ii.setPreparedState(guid, deletions, 1, false);
|
||||
ii.getDeletions(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARED, null, null);
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
assertEquals(reader.numDocs(), UPDATE_LIST.length);
|
||||
int lastDoc = -1;
|
||||
for (int j = 0; j < CREATE_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j]));
|
||||
if (j >= i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
for (int j = 0; j < UPDATE_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", UPDATE_LIST[j]));
|
||||
if (j < i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(guid, deletions, false);
|
||||
assertEquals(reader.numDocs(), UPDATE_LIST.length);
|
||||
lastDoc = -1;
|
||||
for (int j = 0; j < CREATE_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j]));
|
||||
if (j > i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
for (int j = 0; j < UPDATE_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", UPDATE_LIST[j]));
|
||||
if (j <= i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
|
||||
reader.close();
|
||||
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTING, null, null);
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTED, null, null);
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
assertEquals(reader.numDocs(), UPDATE_LIST.length);
|
||||
lastDoc = -1;
|
||||
for (int j = 0; j < CREATE_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j]));
|
||||
if (j > i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
for (int j = 0; j < UPDATE_LIST.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", UPDATE_LIST[j]));
|
||||
if (j <= i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void testMultiThreadedCreateAndSearch()
|
||||
{
|
||||
|
||||
System.setProperty("disableLuceneLocks", "true");
|
||||
|
||||
File tempLocation = TempFileProvider.getTempDir();
|
||||
File testArea = new File(tempLocation, "IndexInfoTest");
|
||||
File testDir = new File(testArea, "" + System.currentTimeMillis());
|
||||
final IndexInfo ii = IndexInfo.getIndexInfo(testDir);
|
||||
|
||||
Thread thread1 = new Thread(new Test(ii, CREATE_LIST, UPDATE_LIST));
|
||||
Thread thread2 = new Thread(new Test(ii, CREATE_LIST_2, UPDATE_LIST_2));
|
||||
thread1.start();
|
||||
thread2.start();
|
||||
try
|
||||
{
|
||||
thread1.join();
|
||||
thread2.join();
|
||||
}
|
||||
catch (InterruptedException e)
|
||||
{
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public static class Test implements Runnable
|
||||
{
|
||||
String[] create;
|
||||
String[] update;
|
||||
IndexInfo ii;
|
||||
|
||||
Test(IndexInfo ii, String[] create, String[] update)
|
||||
{
|
||||
this.ii = ii;
|
||||
this.create = create;
|
||||
this.update = update;
|
||||
}
|
||||
|
||||
public void run()
|
||||
{
|
||||
try
|
||||
{
|
||||
assertEquals(create.length, update.length);
|
||||
|
||||
StoreRef storeRef = new StoreRef("woof", "bingle");
|
||||
|
||||
// no deletions - create only
|
||||
ArrayList<NodeRef> nodeRefs = new ArrayList<NodeRef>();
|
||||
|
||||
for (int i = 0; i < create.length; i++)
|
||||
{
|
||||
IndexReader reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
reader.close();
|
||||
|
||||
String guid = GUID.generate();
|
||||
ii.setStatus(guid, TransactionStatus.ACTIVE, null, null);
|
||||
IndexWriter writer = ii.getDeltaIndexWriter(guid, new StandardAnalyzer());
|
||||
|
||||
Document doc = new Document();
|
||||
for (int k = 0; k < 15; k++)
|
||||
{
|
||||
doc.add(new Field("ID" + k, guid, false, true, false));
|
||||
}
|
||||
doc.add(new Field("TEXT", create[i], false, true, false));
|
||||
NodeRef nodeRef = new NodeRef(storeRef, GUID.generate());
|
||||
nodeRefs.add(nodeRef);
|
||||
doc.add(new Field("ID", nodeRef.toString(), false, true, false));
|
||||
writer.addDocument(doc);
|
||||
|
||||
ii.closeDeltaIndexWriter(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARING, null, null);
|
||||
ii.setPreparedState(guid, new HashSet<NodeRef>(), 1, false);
|
||||
ii.getDeletions(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARED, null, null);
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
|
||||
int lastDoc = -1;
|
||||
|
||||
for (int j = 0; j < create.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", create[j]));
|
||||
if (j < i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(guid, new HashSet<NodeRef>(), false);
|
||||
lastDoc = -1;
|
||||
for (int j = 0; j < create.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", create[j]));
|
||||
if (j <= i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTING, null, null);
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTED, null, null);
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
lastDoc = -1;
|
||||
for (int j = 0; j < create.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", create[j]));
|
||||
if (j <= i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
}
|
||||
|
||||
for (int i = 0; i < update.length; i++)
|
||||
{
|
||||
HashSet<NodeRef> deletions = new HashSet<NodeRef>();
|
||||
deletions.add(nodeRefs.get(i));
|
||||
|
||||
IndexReader reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
|
||||
reader.close();
|
||||
|
||||
String guid = GUID.generate();
|
||||
ii.setStatus(guid, TransactionStatus.ACTIVE, null, null);
|
||||
IndexWriter writer = ii.getDeltaIndexWriter(guid, new StandardAnalyzer());
|
||||
|
||||
Document doc = new Document();
|
||||
for (int k = 0; k < 15; k++)
|
||||
{
|
||||
doc.add(new Field("ID" + k, guid, false, true, false));
|
||||
}
|
||||
doc.add(new Field("TEXT", update[i], false, true, false));
|
||||
writer.addDocument(doc);
|
||||
|
||||
ii.closeDeltaIndexWriter(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARING, null, null);
|
||||
ii.setPreparedState(guid, deletions, 1, false);
|
||||
ii.getDeletions(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARED, null, null);
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
|
||||
int lastDoc = -1;
|
||||
for (int j = 0; j < create.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", create[j]));
|
||||
if (j >= i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
for (int j = 0; j < update.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", update[j]));
|
||||
if (j < i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(guid, deletions, false);
|
||||
|
||||
lastDoc = -1;
|
||||
for (int j = 0; j < create.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", create[j]));
|
||||
if (j > i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
for (int j = 0; j < update.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", update[j]));
|
||||
if (j <= i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
|
||||
reader.close();
|
||||
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTING, null, null);
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTED, null, null);
|
||||
|
||||
reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
|
||||
lastDoc = -1;
|
||||
for (int j = 0; j < create.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", create[j]));
|
||||
if (j > i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
for (int j = 0; j < update.length; j++)
|
||||
{
|
||||
TermDocs tds = reader.termDocs(new Term("TEXT", update[j]));
|
||||
if (j <= i)
|
||||
{
|
||||
assertTrue(tds.next());
|
||||
assertTrue(tds.doc() > lastDoc);
|
||||
lastDoc = tds.doc();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertFalse(tds.next());
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
System.exit(-1);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@@ -0,0 +1,30 @@
|
||||
package org.alfresco.repo.search.impl.lucene.index;
|
||||
|
||||
/**
|
||||
* The type of an entry in this index.
|
||||
*
|
||||
* @author Andy Hind
|
||||
*/
|
||||
public enum IndexType
|
||||
{
|
||||
/**
|
||||
* Identifies the main index. This is always a fully optimised index.
|
||||
*/
|
||||
INDEX,
|
||||
|
||||
/**
|
||||
* An overlay. This is an optimised index with a deletion list. To commit an overlay requires no deletions against other indexes. Deletions are done when an overlay turns
|
||||
* into or is merged into a index. Overlays are periodically merged into an index. An overlay can require or have background properties indexed.
|
||||
*/
|
||||
DELTA,
|
||||
|
||||
/**
|
||||
* A long running overlay definition against the index. Not yet supported.
|
||||
* This, itself, may have transactional additions.
|
||||
*/
|
||||
OVERLAY,
|
||||
|
||||
OVERLAY_DELTA;
|
||||
|
||||
|
||||
}
|
@@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Alfresco, Inc.
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.search.impl.lucene.index;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public interface ReferenceCounting
|
||||
{
|
||||
public void incrementReferenceCount();
|
||||
|
||||
public void decrementReferenceCount() throws IOException;
|
||||
|
||||
public int getReferenceCount();
|
||||
|
||||
public void setInvalidForReuse() throws IOException;
|
||||
}
|
@@ -0,0 +1,122 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Alfresco, Inc.
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.search.impl.lucene.index;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.lucene.index.FilterIndexReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
|
||||
public class ReferenceCountingReadOnlyIndexReaderFactory
|
||||
{
|
||||
public static IndexReader createReader(String id, IndexReader indexReader)
|
||||
{
|
||||
return new ReferenceCountingReadOnlyIndexReader(id, indexReader);
|
||||
}
|
||||
|
||||
public static class ReferenceCountingReadOnlyIndexReader extends FilterIndexReader implements ReferenceCounting
|
||||
{
|
||||
private static Logger s_logger = Logger.getLogger(ReferenceCountingReadOnlyIndexReader.class);
|
||||
|
||||
|
||||
private static final long serialVersionUID = 7693185658022810428L;
|
||||
|
||||
String id;
|
||||
|
||||
int refCount = 0;
|
||||
|
||||
boolean invalidForReuse = false;
|
||||
|
||||
ReferenceCountingReadOnlyIndexReader(String id, IndexReader indexReader)
|
||||
{
|
||||
super(indexReader);
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public synchronized void incrementReferenceCount()
|
||||
{
|
||||
refCount++;
|
||||
if(s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug(Thread.currentThread().getName()+ ": Reader "+id+ " - increment - ref count is "+refCount);
|
||||
}
|
||||
}
|
||||
|
||||
public synchronized void decrementReferenceCount() throws IOException
|
||||
{
|
||||
refCount--;
|
||||
if(s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug(Thread.currentThread().getName()+ ": Reader "+id+ " - decrement - ref count is "+refCount);
|
||||
}
|
||||
closeIfRequired();
|
||||
}
|
||||
|
||||
private void closeIfRequired() throws IOException
|
||||
{
|
||||
if ((refCount == 0) && invalidForReuse)
|
||||
{
|
||||
if(s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug(Thread.currentThread().getName()+ ": Reader "+id+ " closed.");
|
||||
}
|
||||
in.close();
|
||||
}
|
||||
else
|
||||
{
|
||||
if(s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug(Thread.currentThread().getName()+ ": Reader "+id+ " still open .... ref = "+refCount+" invalidForReuse = "+invalidForReuse);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public synchronized int getReferenceCount()
|
||||
{
|
||||
return refCount;
|
||||
}
|
||||
|
||||
public synchronized void setInvalidForReuse() throws IOException
|
||||
{
|
||||
invalidForReuse = true;
|
||||
if(s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug(Thread.currentThread().getName()+ ": Reader "+id+ " set invalid for reuse");
|
||||
}
|
||||
closeIfRequired();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doClose() throws IOException
|
||||
{
|
||||
if(s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug(Thread.currentThread().getName()+ ": Reader "+id+ " closing");
|
||||
}
|
||||
decrementReferenceCount();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doDelete(int n) throws IOException
|
||||
{
|
||||
throw new UnsupportedOperationException("Delete is not supported by read only index readers");
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
@@ -0,0 +1,488 @@
|
||||
package org.alfresco.repo.search.impl.lucene.index;
|
||||
|
||||
|
||||
/**
|
||||
* Status of indexes that make up the whole index. This starts with the value from javax.transaction.Status.
|
||||
*
|
||||
* Lifecycle ---------
|
||||
*
|
||||
* As a transaction starts, the delta is ACTIVE It may be MARKED_ROLLBACK -> ROLLED BACK -> PREPARING -> PREPARED -> COMMITTING -> COMMITTED... with roll back at any time
|
||||
*
|
||||
* If the index has any delayed indexing it commits to COMMITTED_REQUIRES_REINDEX and then the overlay can go from -> COMMITTED_REINDEXING -> COMMITTED_REINDEXED
|
||||
*
|
||||
* If there was no reindexing required the delat commits as COMMITTED
|
||||
*
|
||||
* A delta changes to an index overlay as it is committed.
|
||||
*
|
||||
* For an overlay in COMMITTED or COMMITTED_REINDEXED it can have its delete list applied to sub indexes. At this point it becomes a sub index.
|
||||
*
|
||||
* @author Andy Hind
|
||||
*/
|
||||
|
||||
public enum TransactionStatus
|
||||
{
|
||||
|
||||
// Match the order in javax.transaction.Status so ordinal values are correct
|
||||
ACTIVE
|
||||
{
|
||||
public boolean isCommitted()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isTransient()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean canBeReordered()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous == null;
|
||||
}
|
||||
},
|
||||
|
||||
MARKED_ROLLBACK
|
||||
{
|
||||
public boolean isCommitted()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isTransient()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean canBeReordered()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous.allowsRollbackOrMark(previous);
|
||||
}
|
||||
},
|
||||
|
||||
PREPARED
|
||||
{
|
||||
public boolean isCommitted()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isTransient()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean canBeReordered()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous == TransactionStatus.PREPARING;
|
||||
}
|
||||
},
|
||||
|
||||
COMMITTED
|
||||
{
|
||||
public boolean isCommitted()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean isTransient()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean canBeReordered()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous == TransactionStatus.COMMITTING;
|
||||
}
|
||||
},
|
||||
|
||||
ROLLEDBACK
|
||||
{
|
||||
public boolean isCommitted()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isTransient()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean canBeReordered()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous == TransactionStatus.ROLLINGBACK;
|
||||
}
|
||||
},
|
||||
|
||||
UNKNOWN
|
||||
{
|
||||
public boolean isCommitted()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isTransient()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean canBeReordered()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
NO_TRANSACTION
|
||||
{
|
||||
public boolean isCommitted()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isTransient()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean canBeReordered()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
PREPARING
|
||||
{
|
||||
public boolean isCommitted()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isTransient()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean canBeReordered()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous == TransactionStatus.ACTIVE;
|
||||
}
|
||||
},
|
||||
|
||||
COMMITTING
|
||||
{
|
||||
public boolean isCommitted()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isTransient()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean canBeReordered()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous == TransactionStatus.PREPARED;
|
||||
}
|
||||
},
|
||||
|
||||
ROLLINGBACK
|
||||
{
|
||||
public boolean isCommitted()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isTransient()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean canBeReordered()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous.allowsRollbackOrMark(previous);
|
||||
}
|
||||
},
|
||||
|
||||
/*
|
||||
* This entry is the source for an active merge. The result will be in a new index.
|
||||
*/
|
||||
MERGE
|
||||
{
|
||||
public boolean isCommitted()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean isTransient()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean canBeReordered()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
/*
|
||||
* A new index element that is being made by a merge.
|
||||
*/
|
||||
MERGE_TARGET
|
||||
{
|
||||
public boolean isCommitted()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isTransient()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean canBeReordered()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
/*
|
||||
* These index overlays require reindexing
|
||||
*/
|
||||
// COMMITTED_REQUIRES_REINDEX
|
||||
// {
|
||||
// public boolean isCommitted()
|
||||
// {
|
||||
// return true;
|
||||
// }
|
||||
//
|
||||
// public boolean isTransient()
|
||||
// {
|
||||
// return false;
|
||||
// }
|
||||
//
|
||||
// public boolean canBeReordered()
|
||||
// {
|
||||
// return false;
|
||||
// }
|
||||
//
|
||||
// public boolean follows(TransactionStatus previous)
|
||||
// {
|
||||
// return false;
|
||||
// }
|
||||
// },
|
||||
|
||||
/*
|
||||
* These index overlays are reindexing
|
||||
*/
|
||||
// COMMITTED_REINDEXING
|
||||
// {
|
||||
// public boolean isCommitted()
|
||||
// {
|
||||
// return true;
|
||||
// }
|
||||
//
|
||||
//
|
||||
// public boolean canBeReordered()
|
||||
// {
|
||||
// return false;
|
||||
// }
|
||||
//
|
||||
// public boolean isTransient()
|
||||
// {
|
||||
// return false;
|
||||
// }
|
||||
//
|
||||
// public boolean follows(TransactionStatus previous)
|
||||
// {
|
||||
// return false;
|
||||
// }
|
||||
// },
|
||||
|
||||
/*
|
||||
* These index overlays have ben reindexed.
|
||||
*/
|
||||
// COMMITTED_REINDEXED
|
||||
// {
|
||||
// public boolean isCommitted()
|
||||
// {
|
||||
// return true;
|
||||
// }
|
||||
//
|
||||
// public boolean isTransient()
|
||||
// {
|
||||
// return false;
|
||||
// }
|
||||
//
|
||||
// public boolean canBeReordered()
|
||||
// {
|
||||
// return false;
|
||||
// }
|
||||
//
|
||||
// public boolean follows(TransactionStatus previous)
|
||||
// {
|
||||
// return false;
|
||||
// }
|
||||
// },
|
||||
|
||||
/*
|
||||
* Committed but the index still has deletions
|
||||
*/
|
||||
|
||||
// COMMITTED_WITH_DELETIONS
|
||||
// {
|
||||
// public boolean isCommitted()
|
||||
// {
|
||||
// return true;
|
||||
// }
|
||||
//
|
||||
// public boolean isTransient()
|
||||
// {
|
||||
// return false;
|
||||
// }
|
||||
//
|
||||
// public boolean canBeReordered()
|
||||
// {
|
||||
// return false;
|
||||
// }
|
||||
//
|
||||
// public boolean follows(TransactionStatus previous)
|
||||
// {
|
||||
// return false;
|
||||
// }
|
||||
// },
|
||||
|
||||
/*
|
||||
* Pending deleted are being committed to for the delta.
|
||||
*/
|
||||
COMMITTED_DELETING
|
||||
{
|
||||
public boolean isCommitted()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean isTransient()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean canBeReordered()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
/*
|
||||
* An entry that may be deleted
|
||||
*/
|
||||
DELETABLE
|
||||
{
|
||||
public boolean isCommitted()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isTransient()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean canBeReordered()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
public abstract boolean isCommitted();
|
||||
|
||||
public abstract boolean isTransient();
|
||||
|
||||
public abstract boolean canBeReordered();
|
||||
|
||||
public abstract boolean follows(TransactionStatus previous);
|
||||
|
||||
private boolean allowsRollbackOrMark(TransactionStatus previous)
|
||||
{
|
||||
switch (previous)
|
||||
{
|
||||
case ACTIVE:
|
||||
case MARKED_ROLLBACK:
|
||||
case PREPARED:
|
||||
case PREPARING:
|
||||
case COMMITTING:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user