Andrew Hind
2006-12-04 11:04:34 +00:00
parent 2845364a18
commit dcc831b2a3
19 changed files with 173 additions and 8980 deletions

View File

@@ -130,7 +130,8 @@ public class ConcurrentNodeServiceSearchTest extends TestCase
public void testConcurrent() throws Exception
{
luceneFTS.pause();
IndexWriter.COMMIT_LOCK_TIMEOUT = 100000;
// TODO: LUCENE UPDATE ISSUE fix commit lock timeout here
// IndexWriter.COMMIT_LOCK_TIMEOUT = 100000;
int count = 10;
int repeats = 10;

View File

@@ -126,7 +126,8 @@ public class ConcurrentNodeServiceTest extends TestCase
public void testConcurrent() throws Exception
{
luceneFTS.pause();
IndexWriter.COMMIT_LOCK_TIMEOUT = 100000;
// TODO: LUCENE UPDATE ISSUE fix commit lock time out
// IndexWriter.COMMIT_LOCK_TIMEOUT = 100000;
int count = 10;
int repeats = 10;

View File

@@ -18,7 +18,7 @@ package org.alfresco.repo.node.index;
import java.util.List;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerImpl;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerImpl2;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.cmr.repository.NodeRef;
@@ -92,7 +92,7 @@ public class MissingContentReindexComponent extends AbstractReindexComponent
// search for it in the index, sorting with youngest first
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("TEXT:" + LuceneIndexerImpl.NOT_INDEXED_CONTENT_MISSING);
sp.setQuery("TEXT:" + LuceneIndexerImpl2.NOT_INDEXED_CONTENT_MISSING);
sp.addSort(SearchParameters.SORT_IN_DOCUMENT_ORDER_DESCENDING);
ResultSet results = null;
try

View File

@@ -23,7 +23,7 @@ import org.alfresco.repo.content.AbstractContentStore;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerImpl;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerImpl2;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.repo.transaction.TransactionComponent;
@@ -126,7 +126,7 @@ public class MissingContentReindexComponentTest extends TestCase
SearchParameters sp = new SearchParameters();
sp.addStore(rootNodeRef.getStoreRef());
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("TEXT:" + LuceneIndexerImpl.NOT_INDEXED_CONTENT_MISSING);
sp.setQuery("TEXT:" + LuceneIndexerImpl2.NOT_INDEXED_CONTENT_MISSING);
sp.addSort(SearchParameters.SORT_IN_DOCUMENT_ORDER_DESCENDING);
ResultSet results = null;
try

View File

@@ -1,672 +0,0 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.search.impl.lucene;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Random;
import javax.transaction.UserTransaction;
import junit.framework.TestCase;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.dictionary.DictionaryDAO;
import org.alfresco.repo.dictionary.M2Aspect;
import org.alfresco.repo.dictionary.M2Model;
import org.alfresco.repo.dictionary.M2Property;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.search.transaction.LuceneIndexLock;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.search.CategoryService;
import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.ResultSetRow;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.DynamicNamespacePrefixResolver;
import org.alfresco.service.namespace.NamespacePrefixResolver;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper;
import org.springframework.context.ApplicationContext;
public class LuceneCategoryTest extends TestCase
{
private ServiceRegistry serviceRegistry;
static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
NodeService nodeService;
DictionaryService dictionaryService;
LuceneIndexLock luceneIndexLock;
private NodeRef rootNodeRef;
private NodeRef n1;
private NodeRef n2;
private NodeRef n3;
private NodeRef n4;
private NodeRef n6;
private NodeRef n5;
private NodeRef n7;
private NodeRef n8;
private NodeRef n9;
private NodeRef n10;
private NodeRef n11;
private NodeRef n12;
private NodeRef n13;
private NodeRef n14;
private NodeRef catContainer;
private NodeRef catRoot;
private NodeRef catACBase;
private NodeRef catACOne;
private NodeRef catACTwo;
private NodeRef catACThree;
private FullTextSearchIndexer luceneFTS;
private DictionaryDAO dictionaryDAO;
private String TEST_NAMESPACE = "http://www.alfresco.org/test/lucenecategorytest";
private QName regionCategorisationQName;
private QName assetClassCategorisationQName;
private QName investmentRegionCategorisationQName;
private QName marketingRegionCategorisationQName;
private NodeRef catRBase;
private NodeRef catROne;
private NodeRef catRTwo;
private NodeRef catRThree;
private SearchService searcher;
private LuceneIndexerAndSearcher indexerAndSearcher;
private CategoryService categoryService;
public LuceneCategoryTest()
{
super();
}
public LuceneCategoryTest(String arg0)
{
super(arg0);
}
public void setUp() throws Exception
{
nodeService = (NodeService)ctx.getBean("dbNodeService");
luceneIndexLock = (LuceneIndexLock)ctx.getBean("luceneIndexLock");
dictionaryService = (DictionaryService)ctx.getBean("dictionaryService");
luceneFTS = (FullTextSearchIndexer) ctx.getBean("LuceneFullTextSearchIndexer");
dictionaryDAO = (DictionaryDAO) ctx.getBean("dictionaryDAO");
searcher = (SearchService) ctx.getBean("searchService");
indexerAndSearcher = (LuceneIndexerAndSearcher) ctx.getBean("luceneIndexerAndSearcherFactory");
categoryService = (CategoryService) ctx.getBean("categoryService");
serviceRegistry = (ServiceRegistry) ctx.getBean(ServiceRegistry.SERVICE_REGISTRY);
createTestTypes();
TransactionService transactionService = serviceRegistry.getTransactionService();
UserTransaction tx = transactionService.getUserTransaction();
tx.begin();
StoreRef storeRef = nodeService.createStore(
StoreRef.PROTOCOL_WORKSPACE,
"Test_" + System.currentTimeMillis());
rootNodeRef = nodeService.getRootNode(storeRef);
n1 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}one"), ContentModel.TYPE_CONTAINER).getChildRef();
nodeService.setProperty(n1, QName.createQName("{namespace}property-1"), "value-1");
n2 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}two"), ContentModel.TYPE_CONTAINER).getChildRef();
nodeService.setProperty(n2, QName.createQName("{namespace}property-1"), "value-1");
nodeService.setProperty(n2, QName.createQName("{namespace}property-2"), "value-2");
n3 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}three"), ContentModel.TYPE_CONTAINER).getChildRef();
n4 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}four"), ContentModel.TYPE_CONTAINER).getChildRef();
n5 = nodeService.createNode(n1, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}five"), ContentModel.TYPE_CONTAINER).getChildRef();
n6 = nodeService.createNode(n1, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}six"), ContentModel.TYPE_CONTAINER).getChildRef();
n7 = nodeService.createNode(n2, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}seven"), ContentModel.TYPE_CONTAINER).getChildRef();
n8 = nodeService.createNode(n2, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}eight-2"), ContentModel.TYPE_CONTAINER).getChildRef();
n9 = nodeService.createNode(n5, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}nine"), ContentModel.TYPE_CONTAINER).getChildRef();
n10 = nodeService.createNode(n5, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}ten"), ContentModel.TYPE_CONTAINER).getChildRef();
n11 = nodeService.createNode(n5, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}eleven"), ContentModel.TYPE_CONTAINER).getChildRef();
n12 = nodeService.createNode(n5, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}twelve"), ContentModel.TYPE_CONTAINER).getChildRef();
n13 = nodeService.createNode(n12, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}thirteen"), ContentModel.TYPE_CONTAINER).getChildRef();
n14 = nodeService.createNode(n13, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}fourteen"), ContentModel.TYPE_CONTAINER).getChildRef();
nodeService.addChild(rootNodeRef, n8, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}eight-0"));
nodeService.addChild(n1, n8, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}eight-1"));
nodeService.addChild(n2, n13, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}link"));
nodeService.addChild(n1, n14, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}common"));
nodeService.addChild(n2, n14, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}common"));
nodeService.addChild(n5, n14, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}common"));
nodeService.addChild(n6, n14, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}common"));
nodeService.addChild(n12, n14, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}common"));
nodeService.addChild(n13, n14, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}common"));
// Categories
catContainer = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "categoryContainer"), ContentModel.TYPE_CONTAINER).getChildRef();
catRoot = nodeService.createNode(catContainer, ContentModel.ASSOC_CHILDREN, QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "categoryRoot"), ContentModel.TYPE_CATEGORYROOT).getChildRef();
catRBase = nodeService.createNode(catRoot, ContentModel.ASSOC_CATEGORIES, QName.createQName(TEST_NAMESPACE, "Region"), ContentModel.TYPE_CATEGORY).getChildRef();
catROne = nodeService.createNode(catRBase, ContentModel.ASSOC_SUBCATEGORIES, QName.createQName(TEST_NAMESPACE, "Europe"), ContentModel.TYPE_CATEGORY).getChildRef();
catRTwo = nodeService.createNode(catRBase, ContentModel.ASSOC_SUBCATEGORIES, QName.createQName(TEST_NAMESPACE, "RestOfWorld"), ContentModel.TYPE_CATEGORY).getChildRef();
catRThree = nodeService.createNode(catRTwo, ContentModel.ASSOC_SUBCATEGORIES, QName.createQName(TEST_NAMESPACE, "US"), ContentModel.TYPE_CATEGORY).getChildRef();
nodeService.addChild(catRoot, catRBase, ContentModel.ASSOC_CATEGORIES, QName.createQName(TEST_NAMESPACE, "InvestmentRegion"));
nodeService.addChild(catRoot, catRBase, ContentModel.ASSOC_CATEGORIES, QName.createQName(TEST_NAMESPACE, "MarketingRegion"));
catACBase = nodeService.createNode(catRoot, ContentModel.ASSOC_CATEGORIES, QName.createQName(TEST_NAMESPACE, "AssetClass"), ContentModel.TYPE_CATEGORY).getChildRef();
catACOne = nodeService.createNode(catACBase, ContentModel.ASSOC_SUBCATEGORIES, QName.createQName(TEST_NAMESPACE, "Fixed"), ContentModel.TYPE_CATEGORY).getChildRef();
catACTwo = nodeService.createNode(catACBase, ContentModel.ASSOC_SUBCATEGORIES, QName.createQName(TEST_NAMESPACE, "Equity"), ContentModel.TYPE_CATEGORY).getChildRef();
catACThree = nodeService.createNode(catACTwo, ContentModel.ASSOC_SUBCATEGORIES, QName.createQName(TEST_NAMESPACE, "SpecialEquity"), ContentModel.TYPE_CATEGORY).getChildRef();
nodeService.addAspect(n1, assetClassCategorisationQName, createMap("assetClass", catACBase));
nodeService.addAspect(n1, regionCategorisationQName, createMap("region", catRBase));
nodeService.addAspect(n2, assetClassCategorisationQName, createMap("assetClass", catACOne));
nodeService.addAspect(n3, assetClassCategorisationQName, createMap("assetClass", catACOne));
nodeService.addAspect(n4, assetClassCategorisationQName, createMap("assetClass", catACOne));
nodeService.addAspect(n5, assetClassCategorisationQName, createMap("assetClass", catACOne));
nodeService.addAspect(n6, assetClassCategorisationQName, createMap("assetClass", catACOne));
nodeService.addAspect(n7, assetClassCategorisationQName, createMap("assetClass", catACTwo));
nodeService.addAspect(n8, assetClassCategorisationQName, createMap("assetClass", catACTwo));
nodeService.addAspect(n9, assetClassCategorisationQName, createMap("assetClass", catACTwo));
nodeService.addAspect(n10, assetClassCategorisationQName, createMap("assetClass", catACTwo));
nodeService.addAspect(n11, assetClassCategorisationQName, createMap("assetClass", catACTwo));
nodeService.addAspect(n12, assetClassCategorisationQName, createMap("assetClass", catACOne, catACTwo));
nodeService.addAspect(n13, assetClassCategorisationQName, createMap("assetClass", catACOne, catACTwo, catACThree));
nodeService.addAspect(n14, assetClassCategorisationQName, createMap("assetClass", catACOne, catACTwo));
nodeService.addAspect(n2, regionCategorisationQName, createMap("region", catROne));
nodeService.addAspect(n3, regionCategorisationQName, createMap("region", catROne));
nodeService.addAspect(n4, regionCategorisationQName, createMap("region", catRTwo));
nodeService.addAspect(n5, regionCategorisationQName, createMap("region", catRTwo));
nodeService.addAspect(n5, investmentRegionCategorisationQName, createMap("investmentRegion", catRBase));
nodeService.addAspect(n5, marketingRegionCategorisationQName, createMap("marketingRegion", catRBase));
nodeService.addAspect(n6, investmentRegionCategorisationQName, createMap("investmentRegion", catRBase));
nodeService.addAspect(n7, investmentRegionCategorisationQName, createMap("investmentRegion", catRBase));
nodeService.addAspect(n8, investmentRegionCategorisationQName, createMap("investmentRegion", catRBase));
nodeService.addAspect(n9, investmentRegionCategorisationQName, createMap("investmentRegion", catRBase));
nodeService.addAspect(n10, marketingRegionCategorisationQName, createMap("marketingRegion", catRBase));
nodeService.addAspect(n11, marketingRegionCategorisationQName, createMap("marketingRegion", catRBase));
nodeService.addAspect(n12, marketingRegionCategorisationQName, createMap("marketingRegion", catRBase));
nodeService.addAspect(n13, marketingRegionCategorisationQName, createMap("marketingRegion", catRBase));
nodeService.addAspect(n14, marketingRegionCategorisationQName, createMap("marketingRegion", catRBase));
tx.commit();
}
private HashMap<QName, Serializable> createMap(String name, NodeRef[] nodeRefs)
{
HashMap<QName, Serializable> map = new HashMap<QName, Serializable>();
Serializable value = (Serializable) Arrays.asList(nodeRefs);
map.put(QName.createQName(TEST_NAMESPACE, name), value);
return map;
}
private HashMap<QName, Serializable> createMap(String name, NodeRef nodeRef)
{
return createMap(name, new NodeRef[]{nodeRef});
}
private HashMap<QName, Serializable> createMap(String name, NodeRef nodeRef1, NodeRef nodeRef2)
{
return createMap(name, new NodeRef[]{nodeRef1, nodeRef2});
}
private HashMap<QName, Serializable> createMap(String name, NodeRef nodeRef1, NodeRef nodeRef2, NodeRef nodeRef3)
{
return createMap(name, new NodeRef[]{nodeRef1, nodeRef2, nodeRef3});
}
private void createTestTypes()
{
M2Model model = M2Model.createModel("test:lucenecategory");
model.createNamespace(TEST_NAMESPACE, "test");
model.createImport(NamespaceService.DICTIONARY_MODEL_1_0_URI, NamespaceService.DICTIONARY_MODEL_PREFIX);
model.createImport(NamespaceService.CONTENT_MODEL_1_0_URI, NamespaceService.CONTENT_MODEL_PREFIX);
regionCategorisationQName = QName.createQName(TEST_NAMESPACE, "Region");
M2Aspect generalCategorisation = model.createAspect("test:" + regionCategorisationQName.getLocalName());
generalCategorisation.setParentName("cm:" + ContentModel.ASPECT_CLASSIFIABLE.getLocalName());
M2Property genCatProp = generalCategorisation.createProperty("test:region");
genCatProp.setIndexed(true);
genCatProp.setIndexedAtomically(true);
genCatProp.setMandatory(true);
genCatProp.setMultiValued(true);
genCatProp.setStoredInIndex(true);
genCatProp.setTokenisedInIndex(true);
genCatProp.setType("d:" + DataTypeDefinition.CATEGORY.getLocalName());
assetClassCategorisationQName = QName.createQName(TEST_NAMESPACE, "AssetClass");
M2Aspect assetClassCategorisation = model.createAspect("test:" + assetClassCategorisationQName.getLocalName());
assetClassCategorisation.setParentName("cm:" + ContentModel.ASPECT_CLASSIFIABLE.getLocalName());
M2Property acProp = assetClassCategorisation.createProperty("test:assetClass");
acProp.setIndexed(true);
acProp.setIndexedAtomically(true);
acProp.setMandatory(true);
acProp.setMultiValued(true);
acProp.setStoredInIndex(true);
acProp.setTokenisedInIndex(true);
acProp.setType("d:" + DataTypeDefinition.CATEGORY.getLocalName());
investmentRegionCategorisationQName = QName.createQName(TEST_NAMESPACE, "InvestmentRegion");
M2Aspect investmentRegionCategorisation = model.createAspect("test:" + investmentRegionCategorisationQName.getLocalName());
investmentRegionCategorisation.setParentName("cm:" + ContentModel.ASPECT_CLASSIFIABLE.getLocalName());
M2Property irProp = investmentRegionCategorisation.createProperty("test:investmentRegion");
irProp.setIndexed(true);
irProp.setIndexedAtomically(true);
irProp.setMandatory(true);
irProp.setMultiValued(true);
irProp.setStoredInIndex(true);
irProp.setTokenisedInIndex(true);
irProp.setType("d:" + DataTypeDefinition.CATEGORY.getLocalName());
marketingRegionCategorisationQName = QName.createQName(TEST_NAMESPACE, "MarketingRegion");
M2Aspect marketingRegionCategorisation = model.createAspect("test:" + marketingRegionCategorisationQName.getLocalName());
marketingRegionCategorisation.setParentName("cm:" + ContentModel.ASPECT_CLASSIFIABLE.getLocalName());
M2Property mrProp = marketingRegionCategorisation.createProperty("test:marketingRegion");
mrProp.setIndexed(true);
mrProp.setIndexedAtomically(true);
mrProp.setMandatory(true);
mrProp.setMultiValued(true);
mrProp.setStoredInIndex(true);
mrProp.setTokenisedInIndex(true);
mrProp.setType("d:" + DataTypeDefinition.CATEGORY.getLocalName());
dictionaryDAO.putModel(model);
}
private void buildBaseIndex()
{
LuceneIndexerImpl indexer = LuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta" + System.currentTimeMillis() + "_" + (new Random().nextInt()), indexerAndSearcher);
indexer.setNodeService(nodeService);
indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setDictionaryService(dictionaryService);
indexer.setLuceneFullTextSearchIndexer(luceneFTS);
//indexer.clearIndex();
indexer.createNode(new ChildAssociationRef(null, null, null, rootNodeRef));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, rootNodeRef, QName.createQName("{namespace}one"), n1));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, rootNodeRef, QName.createQName("{namespace}two"), n2));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, rootNodeRef, QName.createQName("{namespace}three"), n3));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, rootNodeRef, QName.createQName("{namespace}four"), n4));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, rootNodeRef, QName.createQName("{namespace}categoryContainer"), catContainer));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, catContainer, QName.createQName("{cat}categoryRoot"), catRoot));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, catRoot, QName.createQName(TEST_NAMESPACE, "AssetClass"), catACBase));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_SUBCATEGORIES, catACBase, QName.createQName(TEST_NAMESPACE, "Fixed"), catACOne));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_SUBCATEGORIES, catACBase, QName.createQName(TEST_NAMESPACE, "Equity"), catACTwo));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_SUBCATEGORIES, catACTwo, QName.createQName(TEST_NAMESPACE, "SpecialEquity"), catACThree));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, catRoot, QName.createQName(TEST_NAMESPACE, "Region"), catRBase));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_SUBCATEGORIES, catRBase, QName.createQName(TEST_NAMESPACE, "Europe"), catROne));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_SUBCATEGORIES, catRBase, QName.createQName(TEST_NAMESPACE, "RestOfWorld"), catRTwo));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_SUBCATEGORIES, catRTwo, QName.createQName(TEST_NAMESPACE, "US"), catRThree));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n1, QName.createQName("{namespace}five"), n5));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n1, QName.createQName("{namespace}six"), n6));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n2, QName.createQName("{namespace}seven"), n7));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n2, QName.createQName("{namespace}eight"), n8));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n5, QName.createQName("{namespace}nine"), n9));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n5, QName.createQName("{namespace}ten"), n10));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n5, QName.createQName("{namespace}eleven"), n11));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n5, QName.createQName("{namespace}twelve"), n12));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n12, QName.createQName("{namespace}thirteen"), n13));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CATEGORIES, n13, QName.createQName("{namespace}fourteen"), n14));
indexer.prepare();
indexer.commit();
}
public void testMulti() throws Exception
{
TransactionService transactionService = serviceRegistry.getTransactionService();
UserTransaction tx = transactionService.getUserTransaction();
tx.begin();
buildBaseIndex();
LuceneSearcherImpl searcher = LuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver(""));
ResultSet results;
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"//*\" AND (PATH:\"/test:AssetClass/test:Equity/member\" PATH:\"/test:MarketingRegion/member\")", null, null);
//printPaths(results);
assertEquals(9, results.length());
results.close();
tx.rollback();
}
public void testBasic() throws Exception
{
TransactionService transactionService = serviceRegistry.getTransactionService();
UserTransaction tx = transactionService.getUserTransaction();
tx.begin();
buildBaseIndex();
LuceneSearcherImpl searcher = LuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver(""));
ResultSet results;
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:MarketingRegion\"", null, null);
//printPaths(results);
assertEquals(1, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:MarketingRegion//member\"", null, null);
//printPaths(results);
assertEquals(6, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer\"", null, null);
assertEquals(1, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot\"", null, null);
assertEquals(1, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot\"", null, null);
assertEquals(1, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot/test:AssetClass\"", null, null);
assertEquals(1, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot/test:AssetClass/member\" ", null, null);
assertEquals(1, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot/test:AssetClass/test:Fixed\"", null, null);
assertEquals(1, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot/test:AssetClass/test:Equity\"", null, null);
assertEquals(1, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass\"", null, null);
assertEquals(1, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Fixed\"", null, null);
assertEquals(1, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Equity\"", null, null);
assertEquals(1, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:*\"", null, null);
assertEquals(2, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass//test:*\"", null, null);
assertEquals(3, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Fixed/member\"", null, null);
//printPaths(results);
assertEquals(8, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Equity/member\"", null, null);
//printPaths(results);
assertEquals(8, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Equity/test:SpecialEquity/member//.\"", null, null);
assertEquals(1, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Equity/test:SpecialEquity/member//*\"", null, null);
assertEquals(0, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Equity/test:SpecialEquity/member\"", null, null);
assertEquals(1, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "+PATH:\"/test:AssetClass/test:Equity/member\" AND +PATH:\"/test:AssetClass/test:Fixed/member\"", null, null);
//printPaths(results);
assertEquals(3, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Equity/member\" PATH:\"/test:AssetClass/test:Fixed/member\"", null, null);
//printPaths(results);
assertEquals(13, results.length());
results.close();
// Region
assertEquals(4, nodeService.getChildAssocs(catRoot).size());
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:Region\"", null, null);
//printPaths(results);
assertEquals(1, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:Region/member\"", null, null);
//printPaths(results);
assertEquals(1, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:Region/test:Europe/member\"", null, null);
//printPaths(results);
assertEquals(2, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:Region/test:RestOfWorld/member\"", null, null);
//printPaths(results);
assertEquals(2, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:Region//member\"", null, null);
//printPaths(results);
assertEquals(5, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:InvestmentRegion//member\"", null, null);
//printPaths(results);
assertEquals(5, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:MarketingRegion//member\"", null, null);
//printPaths(results);
assertEquals(6, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "+PATH:\"/test:AssetClass/test:Fixed/member\" AND +PATH:\"/test:Region/test:Europe/member\"", null, null);
//printPaths(results);
assertEquals(2, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "+PATH:\"/cm:categoryContainer/cm:categoryRoot/test:AssetClass/test:Fixed/member\" AND +PATH:\"/cm:categoryContainer/cm:categoryRoot/test:Region/test:Europe/member\"", null, null);
//printPaths(results);
assertEquals(2, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/test:AssetClass/test:Equity/member\" PATH:\"/test:MarketingRegion/member\"", null, null);
//printPaths(results);
assertEquals(9, results.length());
results.close();
tx.rollback();
}
public void testCategoryServiceImpl() throws Exception
{
TransactionService transactionService = serviceRegistry.getTransactionService();
UserTransaction tx = transactionService.getUserTransaction();
tx.begin();
buildBaseIndex();
LuceneSearcherImpl searcher = LuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setNodeService(nodeService);
searcher.setDictionaryService(dictionaryService);
searcher.setNamespacePrefixResolver(getNamespacePrefixReolsver(""));
ResultSet
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot/test:AssetClass/*\" ", null, null);
assertEquals(3, results.length());
results.close();
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"/cm:categoryContainer/cm:categoryRoot/test:AssetClass/member\" ", null, null);
assertEquals(1, results.length());
results.close();
LuceneCategoryServiceImpl impl = new LuceneCategoryServiceImpl();
impl.setNodeService(nodeService);
impl.setNamespacePrefixResolver(getNamespacePrefixReolsver(""));
impl.setIndexerAndSearcher(indexerAndSearcher);
impl.setDictionaryService(dictionaryService);
Collection<ChildAssociationRef>
result = impl.getChildren(catACBase , CategoryService.Mode.MEMBERS, CategoryService.Depth.IMMEDIATE);
assertEquals(1, result.size());
result = impl.getChildren(catACBase , CategoryService.Mode.ALL, CategoryService.Depth.IMMEDIATE);
assertEquals(3, result.size());
result = impl.getChildren(catACBase , CategoryService.Mode.SUB_CATEGORIES, CategoryService.Depth.IMMEDIATE);
assertEquals(2, result.size());
result = impl.getChildren(catACBase , CategoryService.Mode.MEMBERS, CategoryService.Depth.ANY);
assertEquals(18, result.size());
result = impl.getChildren(catACBase , CategoryService.Mode.ALL, CategoryService.Depth.ANY);
assertEquals(21, result.size());
result = impl.getChildren(catACBase , CategoryService.Mode.SUB_CATEGORIES, CategoryService.Depth.ANY);
assertEquals(3, result.size());
result = impl.getClassifications(rootNodeRef.getStoreRef());
assertEquals(4, result.size());
result = impl.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.IMMEDIATE);
assertEquals(2, result.size());
Collection<QName> aspects = impl.getClassificationAspects();
assertEquals(6, aspects.size());
tx.rollback();
}
private NamespacePrefixResolver getNamespacePrefixReolsver(String defaultURI)
{
DynamicNamespacePrefixResolver nspr = new DynamicNamespacePrefixResolver(null);
nspr.registerNamespace(NamespaceService.CONTENT_MODEL_PREFIX, NamespaceService.CONTENT_MODEL_1_0_URI);
nspr.registerNamespace("namespace", "namespace");
nspr.registerNamespace("test", TEST_NAMESPACE);
nspr.registerNamespace(NamespaceService.DEFAULT_PREFIX, defaultURI);
return nspr;
}
public void testCategoryService() throws Exception
{
TransactionService transactionService = serviceRegistry.getTransactionService();
UserTransaction tx = transactionService.getUserTransaction();
tx.begin();
buildBaseIndex();
assertEquals(1, categoryService.getChildren(catACBase , CategoryService.Mode.MEMBERS, CategoryService.Depth.IMMEDIATE).size());
assertEquals(2, categoryService.getChildren(catACBase , CategoryService.Mode.SUB_CATEGORIES, CategoryService.Depth.IMMEDIATE).size());
assertEquals(3, categoryService.getChildren(catACBase , CategoryService.Mode.ALL, CategoryService.Depth.IMMEDIATE).size());
assertEquals(18, categoryService.getChildren(catACBase , CategoryService.Mode.MEMBERS, CategoryService.Depth.ANY).size());
assertEquals(3, categoryService.getChildren(catACBase , CategoryService.Mode.SUB_CATEGORIES, CategoryService.Depth.ANY).size());
assertEquals(21, categoryService.getChildren(catACBase , CategoryService.Mode.ALL, CategoryService.Depth.ANY).size());
assertEquals(4, categoryService.getClassifications(rootNodeRef.getStoreRef()).size());
assertEquals(2, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.IMMEDIATE).size());
assertEquals(3, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.ANY).size());
assertEquals(6, categoryService.getClassificationAspects().size());
assertEquals(2, categoryService.getRootCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass")).size());
NodeRef newRoot = categoryService.createRootCategory(rootNodeRef.getStoreRef(),QName.createQName(TEST_NAMESPACE, "AssetClass"), "Fruit");
tx.commit();
tx = transactionService.getUserTransaction();
tx.begin();
assertEquals(3, categoryService.getRootCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass")).size());
assertEquals(3, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.IMMEDIATE).size());
assertEquals(4, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.ANY).size());
NodeRef newCat = categoryService.createCategory(newRoot, "Banana");
tx.commit();
tx = transactionService.getUserTransaction();
tx.begin();
assertEquals(3, categoryService.getRootCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass")).size());
assertEquals(3, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.IMMEDIATE).size());
assertEquals(5, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.ANY).size());
categoryService.deleteCategory(newCat);
tx.commit();
tx = transactionService.getUserTransaction();
tx.begin();
assertEquals(3, categoryService.getRootCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass")).size());
assertEquals(3, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.IMMEDIATE).size());
assertEquals(4, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.ANY).size());
categoryService.deleteCategory(newRoot);
tx.commit();
tx = transactionService.getUserTransaction();
tx.begin();
assertEquals(2, categoryService.getRootCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass")).size());
assertEquals(2, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.IMMEDIATE).size());
assertEquals(3, categoryService.getCategories(rootNodeRef.getStoreRef(), QName.createQName(TEST_NAMESPACE, "AssetClass"), CategoryService.Depth.ANY).size());
tx.rollback();
}
private int getTotalScore(ResultSet results)
{
int totalScore = 0;
for(ResultSetRow row: results)
{
totalScore += row.getScore();
}
return totalScore;
}
}

View File

@@ -20,7 +20,7 @@ import java.io.File;
import junit.framework.TestCase;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory.LuceneIndexBackupComponent;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory2.LuceneIndexBackupComponent;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.transaction.TransactionService;

View File

@@ -138,6 +138,10 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
private int indexerMaxFieldLength;
private long writeLockTimeout;
private long commitLockTimeout;
/**
* Private constructor for the singleton TODO: FIt in with IOC
*/
@@ -831,8 +835,8 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
{
this.lockDirectory = lockDirectory;
// Set the lucene lock file via System property
// org.apache.lucene.lockdir
System.setProperty("org.apache.lucene.lockdir", lockDirectory);
// org.apache.lucene.lockDir
System.setProperty("org.apache.lucene.lockDir", lockDirectory);
// Make sure the lock directory exists
File lockDir = new File(lockDirectory);
if (!lockDir.exists())
@@ -871,12 +875,22 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
public void setWriteLockTimeout(long timeout)
{
IndexWriter.WRITE_LOCK_TIMEOUT = timeout;
this.writeLockTimeout = timeout;
}
public void setCommitLockTimeout(long timeout)
{
IndexWriter.COMMIT_LOCK_TIMEOUT = timeout;
this.commitLockTimeout = timeout;
}
public long getCommitLockTimeout()
{
return commitLockTimeout;
}
public long getWriteLockTimeout()
{
return writeLockTimeout;
}
public void setLockPollInterval(long time)

View File

@@ -77,6 +77,7 @@ import org.apache.lucene.search.Hits;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Searcher;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.BooleanClause.Occur;
/**
* The implementation of the lucene based indexer. Supports basic transactional behaviour if used on its own.
@@ -1043,7 +1044,7 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
refs.add(ref);
if (delete)
{
reader.delete(doc);
reader.deleteDocument(doc);
}
}
}
@@ -1078,7 +1079,7 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
refs.add(ref);
if (delete)
{
reader.delete(doc);
reader.deleteDocument(doc);
}
}
}
@@ -1101,7 +1102,7 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
{
if (delete)
{
reader.delete(new Term("ID", nodeRef.toString()));
reader.deleteDocuments(new Term("ID", nodeRef.toString()));
}
refs.add(nodeRef);
if (cascade)
@@ -1116,7 +1117,7 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
refs.add(ref);
if (delete)
{
reader.delete(doc);
reader.deleteDocument(doc);
}
}
}
@@ -1238,8 +1239,8 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
paths.addAll(categoryPaths);
Document xdoc = new Document();
xdoc.add(new Field("ID", nodeRef.toString(), true, true, false));
xdoc.add(new Field("TX", nodeStatus.getChangeTxnId(), true, true, false));
xdoc.add(new Field("ID", nodeRef.toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
xdoc.add(new Field("TX", nodeStatus.getChangeTxnId(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
boolean isAtomic = true;
for (QName propertyName : properties.keySet())
{
@@ -1295,11 +1296,10 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
qNameBuffer.append(";/");
}
qNameBuffer.append(ISO9075.getXPathName(qNameRef.getQName()));
xdoc.add(new Field("PARENT", qNameRef.getParentRef().toString(), true, true, false));
xdoc.add(new Field("ASSOCTYPEQNAME", ISO9075.getXPathName(qNameRef.getTypeQName()), true,
false, false));
xdoc.add(new Field("PARENT", qNameRef.getParentRef().toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
xdoc.add(new Field("ASSOCTYPEQNAME", ISO9075.getXPathName(qNameRef.getTypeQName()), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
xdoc.add(new Field("LINKASPECT", (pair.getSecond() == null) ? "" : ISO9075.getXPathName(pair
.getSecond()), true, true, false));
.getSecond()), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
}
}
@@ -1320,17 +1320,17 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
if (directPaths.contains(pair.getFirst()))
{
Document directoryEntry = new Document();
directoryEntry.add(new Field("ID", nodeRef.toString(), true, true, false));
directoryEntry.add(new Field("PATH", pathString, true, true, true));
directoryEntry.add(new Field("ID", nodeRef.toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
directoryEntry.add(new Field("PATH", pathString, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
for (NodeRef parent : getParents(pair.getFirst()))
{
directoryEntry.add(new Field("ANCESTOR", parent.toString(), false, true, false));
directoryEntry.add(new Field("ANCESTOR", parent.toString(), Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
}
directoryEntry.add(new Field("ISCONTAINER", "T", true, true, false));
directoryEntry.add(new Field("ISCONTAINER", "T", Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
if (isCategory(getDictionaryService().getType(nodeService.getType(nodeRef))))
{
directoryEntry.add(new Field("ISCATEGORY", "T", true, true, false));
directoryEntry.add(new Field("ISCATEGORY", "T", Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
}
docs.add(directoryEntry);
@@ -1344,50 +1344,48 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
if (isRoot)
{
// TODO: Does the root element have a QName?
xdoc.add(new Field("ISCONTAINER", "T", true, true, false));
xdoc.add(new Field("PATH", "", true, true, true));
xdoc.add(new Field("QNAME", "", true, true, true));
xdoc.add(new Field("ISROOT", "T", false, true, false));
xdoc.add(new Field("PRIMARYASSOCTYPEQNAME", ISO9075.getXPathName(ContentModel.ASSOC_CHILDREN), true, false,
false));
xdoc.add(new Field("ISNODE", "T", false, true, false));
xdoc.add(new Field("ISCONTAINER", "T", Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
xdoc.add(new Field("PATH", "", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
xdoc.add(new Field("QNAME", "", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
xdoc.add(new Field("ISROOT", "T", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
xdoc.add(new Field("PRIMARYASSOCTYPEQNAME", ISO9075.getXPathName(ContentModel.ASSOC_CHILDREN), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
xdoc.add(new Field("ISNODE", "T", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
docs.add(xdoc);
}
else
// not a root node
{
xdoc.add(new Field("QNAME", qNameBuffer.toString(), true, true, true));
xdoc.add(new Field("QNAME", qNameBuffer.toString(),Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
// xdoc.add(new Field("PARENT", parentBuffer.toString(), true, true,
// true));
ChildAssociationRef primary = nodeService.getPrimaryParent(nodeRef);
xdoc.add(new Field("PRIMARYPARENT", primary.getParentRef().toString(), true, true, false));
xdoc.add(new Field("PRIMARYASSOCTYPEQNAME", ISO9075.getXPathName(primary.getTypeQName()), true, false,
false));
xdoc.add(new Field("PRIMARYPARENT", primary.getParentRef().toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
xdoc.add(new Field("PRIMARYASSOCTYPEQNAME", ISO9075.getXPathName(primary.getTypeQName()), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
QName typeQName = nodeService.getType(nodeRef);
xdoc.add(new Field("TYPE", ISO9075.getXPathName(typeQName), true, true, false));
xdoc.add(new Field("TYPE", ISO9075.getXPathName(typeQName), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
for (QName classRef : nodeService.getAspects(nodeRef))
{
xdoc.add(new Field("ASPECT", ISO9075.getXPathName(classRef), true, true, false));
xdoc.add(new Field("ASPECT", ISO9075.getXPathName(classRef), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
}
xdoc.add(new Field("ISROOT", "F", false, true, false));
xdoc.add(new Field("ISNODE", "T", false, true, false));
xdoc.add(new Field("ISROOT", "F", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
xdoc.add(new Field("ISNODE", "T", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
if (isAtomic || indexAllProperties)
{
xdoc.add(new Field("FTSSTATUS", "Clean", false, true, false));
xdoc.add(new Field("FTSSTATUS", "Clean", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
}
else
{
if (isNew)
{
xdoc.add(new Field("FTSSTATUS", "New", false, true, false));
xdoc.add(new Field("FTSSTATUS", "New", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
}
else
{
xdoc.add(new Field("FTSSTATUS", "Dirty", false, true, false));
xdoc.add(new Field("FTSSTATUS", "Dirty", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
}
}
@@ -1491,7 +1489,7 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
continue;
}
// store mimetype in index - even if content does not index it is useful
doc.add(new Field(attributeName + ".mimetype", contentData.getMimetype(), false, true, false));
doc.add(new Field(attributeName + ".mimetype", contentData.getMimetype(), Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
ContentReader reader = contentService.getReader(nodeRef, propertyName);
if (reader != null && reader.exists())
@@ -1517,8 +1515,8 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
// don't index from the reader
readerReady = false;
// not indexed: no transformation
doc.add(Field.Text("TEXT", NOT_INDEXED_NO_TRANSFORMATION));
doc.add(Field.Text(attributeName, NOT_INDEXED_NO_TRANSFORMATION));
doc.add(new Field("TEXT", NOT_INDEXED_NO_TRANSFORMATION, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.NO));
doc.add(new Field(attributeName, NOT_INDEXED_NO_TRANSFORMATION, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.NO));
}
else if (indexAtomicPropertiesOnly
&& transformer.getTransformationTime() > maxAtomicTransformationTime)
@@ -1552,8 +1550,8 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
readerReady = false;
// not indexed: transformation
// failed
doc.add(Field.Text("TEXT", NOT_INDEXED_TRANSFORMATION_FAILED));
doc.add(Field.Text(attributeName, NOT_INDEXED_TRANSFORMATION_FAILED));
doc.add(new Field("TEXT", NOT_INDEXED_TRANSFORMATION_FAILED, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.NO));
doc.add(new Field(attributeName, NOT_INDEXED_TRANSFORMATION_FAILED, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.NO));
}
}
}
@@ -1571,7 +1569,7 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
{
isr = new InputStreamReader(ris);
}
doc.add(Field.Text("TEXT", isr));
doc.add(new Field("TEXT", isr, Field.TermVector.NO));
ris = reader.getReader().getContentInputStream();
try
@@ -1583,9 +1581,9 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
isr = new InputStreamReader(ris);
}
doc.add(Field.Text("@"
doc.add(new Field("@"
+ QName.createQName(propertyName.getNamespaceURI(), ISO9075.encode(propertyName
.getLocalName())), isr));
.getLocalName())), isr, Field.TermVector.NO));
}
}
else
@@ -1599,13 +1597,31 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
+ (reader == null ? " --- " : Boolean.toString(reader.exists())));
}
// not indexed: content missing
doc.add(Field.Text("TEXT", NOT_INDEXED_CONTENT_MISSING));
doc.add(Field.Text(attributeName, NOT_INDEXED_CONTENT_MISSING));
doc.add(new Field("TEXT", NOT_INDEXED_CONTENT_MISSING, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.NO));
doc.add(new Field(attributeName, NOT_INDEXED_CONTENT_MISSING, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.NO));
}
}
else
{
doc.add(new Field(attributeName, strValue, store, index, tokenise));
Field.Store fieldStore = store ? Field.Store.YES : Field.Store.NO;
Field.Index fieldIndex;
if(index )
{
if(tokenise)
{
fieldIndex = Field.Index.TOKENIZED;
}
else
{
fieldIndex = Field.Index.UN_TOKENIZED;
}
}
else
{
fieldIndex = Field.Index.NO;
}
doc.add(new Field(attributeName, strValue, fieldStore, fieldIndex, Field.TermVector.NO));
}
}
@@ -1761,8 +1777,8 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
toFTSIndex = new ArrayList<Helper>(size);
BooleanQuery booleanQuery = new BooleanQuery();
booleanQuery.add(new TermQuery(new Term("FTSSTATUS", "Dirty")), false, false);
booleanQuery.add(new TermQuery(new Term("FTSSTATUS", "New")), false, false);
booleanQuery.add(new TermQuery(new Term("FTSSTATUS", "Dirty")), Occur.SHOULD);
booleanQuery.add(new TermQuery(new Term("FTSSTATUS", "New")), Occur.SHOULD);
int count = 0;
Searcher searcher = null;

View File

@@ -37,6 +37,7 @@ import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.RangeQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.BooleanClause.Occur;
import org.saxpath.SAXPathException;
import com.werken.saxpath.XPathReader;
@@ -198,7 +199,7 @@ public class LuceneQueryParser extends QueryParser
for (QName qname : subclasses)
{
TermQuery termQuery = new TermQuery(new Term(field, qname.toString()));
booleanQuery.add(termQuery, false, false);
booleanQuery.add(termQuery, Occur.SHOULD);
}
return booleanQuery;
}
@@ -244,7 +245,7 @@ public class LuceneQueryParser extends QueryParser
for (QName qname : subclasses)
{
TermQuery termQuery = new TermQuery(new Term(field, qname.toString()));
booleanQuery.add(termQuery, false, false);
booleanQuery.add(termQuery, Occur.SHOULD);
}
return booleanQuery;
}

View File

@@ -1,652 +0,0 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.search.impl.lucene;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
import org.alfresco.repo.search.CannedQueryDef;
import org.alfresco.repo.search.EmptyResultSet;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.QueryRegisterComponent;
import org.alfresco.repo.search.SearcherException;
import org.alfresco.repo.search.impl.NodeSearcher;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.InvalidNodeRefException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.Path;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.repository.XPathException;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.cmr.search.QueryParameter;
import org.alfresco.service.cmr.search.QueryParameterDefinition;
import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.NamespacePrefixResolver;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.ISO9075;
import org.alfresco.util.SearchLanguageConversion;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Searcher;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.saxpath.SAXPathException;
import com.werken.saxpath.XPathReader;
/**
* The Lucene implementation of Searcher At the moment we support only lucene
* based queries.
*
* TODO: Support for other query languages
*
* @author andyh
*
*/
public class LuceneSearcherImpl extends LuceneBase implements LuceneSearcher
{
/**
* Default field name
*/
private static final String DEFAULT_FIELD = "TEXT";
private NamespacePrefixResolver namespacePrefixResolver;
private NodeService nodeService;
private DictionaryService dictionaryService;
private QueryRegisterComponent queryRegister;
private LuceneIndexer indexer;
/*
* Searcher implementation
*/
/**
* Get an initialised searcher for the store and transaction Normally we do
* not search against a a store and delta. Currently only gets the searcher
* against the main index.
*
* @param storeRef
* @param deltaId
* @return
*/
public static LuceneSearcherImpl getSearcher(StoreRef storeRef, LuceneIndexer indexer, LuceneConfig config)
{
LuceneSearcherImpl searcher = new LuceneSearcherImpl();
searcher.setLuceneConfig(config);
try
{
searcher.initialise(storeRef, indexer == null ? null : indexer.getDeltaId(), false, false);
searcher.indexer = indexer;
}
catch (LuceneIndexException e)
{
throw new SearcherException(e);
}
return searcher;
}
/**
* Get an intialised searcher for the store. No transactional ammendsmends
* are searched.
*
*
* @param storeRef
* @return
*/
public static LuceneSearcherImpl getSearcher(StoreRef storeRef, LuceneConfig config)
{
return getSearcher(storeRef, null, config);
}
public void setNamespacePrefixResolver(NamespacePrefixResolver namespacePrefixResolver)
{
this.namespacePrefixResolver = namespacePrefixResolver;
}
public boolean indexExists()
{
return mainIndexExists();
}
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
public void setDictionaryService(DictionaryService dictionaryService)
{
this.dictionaryService = dictionaryService;
}
public void setQueryRegister(QueryRegisterComponent queryRegister)
{
this.queryRegister = queryRegister;
}
public ResultSet query(StoreRef store, String language, String queryString, Path[] queryOptions,
QueryParameterDefinition[] queryParameterDefinitions) throws SearcherException
{
SearchParameters sp = new SearchParameters();
sp.addStore(store);
sp.setLanguage(language);
sp.setQuery(queryString);
if (queryOptions != null)
{
for (Path path : queryOptions)
{
sp.addAttrbutePath(path);
}
}
if (queryParameterDefinitions != null)
{
for (QueryParameterDefinition qpd : queryParameterDefinitions)
{
sp.addQueryParameterDefinition(qpd);
}
}
sp.excludeDataInTheCurrentTransaction(true);
return query(sp);
}
public ResultSet query(SearchParameters searchParameters)
{
if (searchParameters.getStores().size() != 1)
{
throw new IllegalStateException("Only one store can be searched at present");
}
String parameterisedQueryString;
if (searchParameters.getQueryParameterDefinitions().size() > 0)
{
Map<QName, QueryParameterDefinition> map = new HashMap<QName, QueryParameterDefinition>();
for (QueryParameterDefinition qpd : searchParameters.getQueryParameterDefinitions())
{
map.put(qpd.getQName(), qpd);
}
parameterisedQueryString = parameterise(searchParameters.getQuery(), map, null, namespacePrefixResolver);
}
else
{
parameterisedQueryString = searchParameters.getQuery();
}
if (searchParameters.getLanguage().equalsIgnoreCase(SearchService.LANGUAGE_LUCENE))
{
try
{
int defaultOperator;
if (searchParameters.getDefaultOperator() == SearchParameters.AND)
{
defaultOperator = LuceneQueryParser.DEFAULT_OPERATOR_AND;
}
else
{
defaultOperator = LuceneQueryParser.DEFAULT_OPERATOR_OR;
}
Query query = LuceneQueryParser.parse(parameterisedQueryString, DEFAULT_FIELD, new LuceneAnalyser(
dictionaryService), namespacePrefixResolver, dictionaryService, defaultOperator);
Searcher searcher = getSearcher(indexer);
if (searcher == null)
{
// no index return an empty result set
return new EmptyResultSet();
}
Hits hits;
if (searchParameters.getSortDefinitions().size() > 0)
{
int index = 0;
SortField[] fields = new SortField[searchParameters.getSortDefinitions().size()];
for (SearchParameters.SortDefinition sd : searchParameters.getSortDefinitions())
{
switch (sd.getSortType())
{
case FIELD:
fields[index++] = new SortField(sd.getField(), SortField.STRING, !sd.isAscending());
break;
case DOCUMENT:
fields[index++] = new SortField(null, SortField.DOC, !sd.isAscending());
break;
case SCORE:
fields[index++] = new SortField(null, SortField.SCORE, !sd.isAscending());
break;
}
}
hits = searcher.search(query, new Sort(fields));
}
else
{
hits = searcher.search(query);
}
return new LuceneResultSet(hits, searcher, nodeService, searchParameters.getAttributePaths().toArray(
new Path[0]), searchParameters);
}
catch (ParseException e)
{
throw new SearcherException("Failed to parse query: " + parameterisedQueryString, e);
}
catch (IOException e)
{
throw new SearcherException("IO exception during search", e);
}
}
else if (searchParameters.getLanguage().equalsIgnoreCase(SearchService.LANGUAGE_XPATH))
{
try
{
XPathReader reader = new XPathReader();
LuceneXPathHandler handler = new LuceneXPathHandler();
handler.setNamespacePrefixResolver(namespacePrefixResolver);
handler.setDictionaryService(dictionaryService);
// TODO: Handler should have the query parameters to use in
// building its lucene query
// At the moment xpath style parameters in the PATH
// expression are not supported.
reader.setXPathHandler(handler);
reader.parse(parameterisedQueryString);
Query query = handler.getQuery();
Searcher searcher = getSearcher(null);
if (searcher == null)
{
// no index return an empty result set
return new EmptyResultSet();
}
Hits hits = searcher.search(query);
return new LuceneResultSet(hits, searcher, nodeService, searchParameters.getAttributePaths().toArray(
new Path[0]), searchParameters);
}
catch (SAXPathException e)
{
throw new SearcherException("Failed to parse query: " + searchParameters.getQuery(), e);
}
catch (IOException e)
{
throw new SearcherException("IO exception during search", e);
}
}
else
{
throw new SearcherException("Unknown query language: " + searchParameters.getLanguage());
}
}
public ResultSet query(StoreRef store, String language, String query)
{
return query(store, language, query, null, null);
}
public ResultSet query(StoreRef store, String language, String query,
QueryParameterDefinition[] queryParameterDefintions)
{
return query(store, language, query, null, queryParameterDefintions);
}
public ResultSet query(StoreRef store, String language, String query, Path[] attributePaths)
{
return query(store, language, query, attributePaths, null);
}
public ResultSet query(StoreRef store, QName queryId, QueryParameter[] queryParameters)
{
CannedQueryDef definition = queryRegister.getQueryDefinition(queryId);
// Do parameter replacement
// As lucene phrases are tokensied it is correct to just do straight
// string replacement.
// The string will be formatted by the tokeniser.
//
// For non phrase queries this is incorrect but string replacement is
// probably the best we can do.
// As numbers and text are indexed specially, direct term queries only
// make sense against textual data
checkParameters(definition, queryParameters);
String queryString = parameterise(definition.getQuery(), definition.getQueryParameterMap(), queryParameters,
definition.getNamespacePrefixResolver());
return query(store, definition.getLanguage(), queryString, null, null);
}
/**
* The definitions must provide a default value, or of not there must be a
* parameter to provide the value
*
* @param definition
* @param queryParameters
* @throws QueryParameterisationException
*/
private void checkParameters(CannedQueryDef definition, QueryParameter[] queryParameters)
throws QueryParameterisationException
{
List<QName> missing = new ArrayList<QName>();
Set<QName> parameterQNameSet = new HashSet<QName>();
if (queryParameters != null)
{
for (QueryParameter parameter : queryParameters)
{
parameterQNameSet.add(parameter.getQName());
}
}
for (QueryParameterDefinition parameterDefinition : definition.getQueryParameterDefs())
{
if (!parameterDefinition.hasDefaultValue())
{
if (!parameterQNameSet.contains(parameterDefinition.getQName()))
{
missing.add(parameterDefinition.getQName());
}
}
}
if (missing.size() > 0)
{
StringBuilder buffer = new StringBuilder(128);
buffer.append("The query is missing values for the following parameters: ");
for (QName qName : missing)
{
buffer.append(qName);
buffer.append(", ");
}
buffer.delete(buffer.length() - 1, buffer.length() - 1);
buffer.delete(buffer.length() - 1, buffer.length() - 1);
throw new QueryParameterisationException(buffer.toString());
}
}
/*
* Parameterise the query string - not sure if it is required to escape
* lucence spacials chars The parameters could be used to build the query -
* the contents of parameters should alread have been escaped if required.
* ... mush better to provide the parameters and work out what to do TODO:
* conditional query escapement - may be we should have a parameter type
* that is not escaped
*/
private String parameterise(String unparameterised, Map<QName, QueryParameterDefinition> map,
QueryParameter[] queryParameters, NamespacePrefixResolver nspr) throws QueryParameterisationException
{
Map<QName, List<Serializable>> valueMap = new HashMap<QName, List<Serializable>>();
if (queryParameters != null)
{
for (QueryParameter parameter : queryParameters)
{
List<Serializable> list = valueMap.get(parameter.getQName());
if (list == null)
{
list = new ArrayList<Serializable>();
valueMap.put(parameter.getQName(), list);
}
list.add(parameter.getValue());
}
}
Map<QName, ListIterator<Serializable>> iteratorMap = new HashMap<QName, ListIterator<Serializable>>();
List<QName> missing = new ArrayList<QName>(1);
StringBuilder buffer = new StringBuilder(unparameterised);
int index = 0;
while ((index = buffer.indexOf("${", index)) != -1)
{
int endIndex = buffer.indexOf("}", index);
String qNameString = buffer.substring(index + 2, endIndex);
QName key = QName.createQName(qNameString, nspr);
QueryParameterDefinition parameterDefinition = map.get(key);
if (parameterDefinition == null)
{
missing.add(key);
buffer.replace(index, endIndex + 1, "");
}
else
{
ListIterator<Serializable> it = iteratorMap.get(key);
if ((it == null) || (!it.hasNext()))
{
List<Serializable> list = valueMap.get(key);
if ((list != null) && (list.size() > 0))
{
it = list.listIterator();
}
if (it != null)
{
iteratorMap.put(key, it);
}
}
String value;
if (it == null)
{
value = parameterDefinition.getDefault();
}
else
{
value = DefaultTypeConverter.INSTANCE.convert(String.class, it.next());
}
buffer.replace(index, endIndex + 1, value);
}
}
if (missing.size() > 0)
{
StringBuilder error = new StringBuilder();
error.append("The query uses the following parameters which are not defined: ");
for (QName qName : missing)
{
error.append(qName);
error.append(", ");
}
error.delete(error.length() - 1, error.length() - 1);
error.delete(error.length() - 1, error.length() - 1);
throw new QueryParameterisationException(error.toString());
}
return buffer.toString();
}
/**
* @see org.alfresco.repo.search.impl.NodeSearcher
*/
public List<NodeRef> selectNodes(NodeRef contextNodeRef, String xpath, QueryParameterDefinition[] parameters,
NamespacePrefixResolver namespacePrefixResolver, boolean followAllParentLinks, String language)
throws InvalidNodeRefException, XPathException
{
NodeSearcher nodeSearcher = new NodeSearcher(nodeService, dictionaryService, this);
return nodeSearcher.selectNodes(contextNodeRef, xpath, parameters, namespacePrefixResolver,
followAllParentLinks, language);
}
/**
* @see org.alfresco.repo.search.impl.NodeSearcher
*/
public List<Serializable> selectProperties(NodeRef contextNodeRef, String xpath,
QueryParameterDefinition[] parameters, NamespacePrefixResolver namespacePrefixResolver,
boolean followAllParentLinks, String language) throws InvalidNodeRefException, XPathException
{
NodeSearcher nodeSearcher = new NodeSearcher(nodeService, dictionaryService, this);
return nodeSearcher.selectProperties(contextNodeRef, xpath, parameters, namespacePrefixResolver,
followAllParentLinks, language);
}
/**
* @return Returns true if the pattern is present, otherwise false.
*/
public boolean contains(NodeRef nodeRef, QName propertyQName, String googleLikePattern)
{
return contains(nodeRef, propertyQName, googleLikePattern, SearchParameters.Operator.OR);
}
/**
* @return Returns true if the pattern is present, otherwise false.
*/
public boolean contains(NodeRef nodeRef, QName propertyQName, String googleLikePattern,
SearchParameters.Operator defaultOperator)
{
ResultSet resultSet = null;
try
{
// build Lucene search string specific to the node
StringBuilder sb = new StringBuilder();
sb.append("+ID:\"").append(nodeRef.toString()).append("\" +(TEXT:(")
.append(googleLikePattern.toLowerCase()).append(") ");
if (propertyQName != null)
{
sb.append(" OR @").append(
LuceneQueryParser.escape(QName.createQName(propertyQName.getNamespaceURI(),
ISO9075.encode(propertyQName.getLocalName())).toString()));
sb.append(":(").append(googleLikePattern.toLowerCase()).append(")");
}
else
{
for (QName key : nodeService.getProperties(nodeRef).keySet())
{
sb.append(" OR @").append(
LuceneQueryParser.escape(QName.createQName(key.getNamespaceURI(),
ISO9075.encode(key.getLocalName())).toString()));
sb.append(":(").append(googleLikePattern.toLowerCase()).append(")");
}
}
sb.append(")");
SearchParameters sp = new SearchParameters();
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery(sb.toString());
sp.setDefaultOperator(defaultOperator);
sp.addStore(nodeRef.getStoreRef());
resultSet = this.query(sp);
boolean answer = resultSet.length() > 0;
return answer;
}
finally
{
if (resultSet != null)
{
resultSet.close();
}
}
}
/**
* @return Returns true if the pattern is present, otherwise false.
*
* @see #setIndexer(Indexer)
* @see #setSearcher(SearchService)
*/
public boolean like(NodeRef nodeRef, QName propertyQName, String sqlLikePattern, boolean includeFTS)
{
if (propertyQName == null)
{
throw new IllegalArgumentException("Property QName is mandatory for the like expression");
}
StringBuilder sb = new StringBuilder(sqlLikePattern.length() * 3);
if (includeFTS)
{
// convert the SQL-like pattern into a Lucene-compatible string
String pattern = SearchLanguageConversion.convertXPathLikeToLucene(sqlLikePattern.toLowerCase());
// build Lucene search string specific to the node
sb = new StringBuilder();
sb.append("+ID:\"").append(nodeRef.toString()).append("\" +(");
// FTS or attribute matches
if (includeFTS)
{
sb.append("TEXT:(").append(pattern).append(") ");
}
if (propertyQName != null)
{
sb.append(" @").append(
LuceneQueryParser.escape(QName.createQName(propertyQName.getNamespaceURI(),
ISO9075.encode(propertyQName.getLocalName())).toString())).append(":(").append(pattern)
.append(")");
}
sb.append(")");
ResultSet resultSet = null;
try
{
resultSet = this.query(nodeRef.getStoreRef(), "lucene", sb.toString());
boolean answer = resultSet.length() > 0;
return answer;
}
finally
{
if (resultSet != null)
{
resultSet.close();
}
}
}
else
{
// convert the SQL-like pattern into a Lucene-compatible string
String pattern = SearchLanguageConversion.convertXPathLikeToRegex(sqlLikePattern.toLowerCase());
Serializable property = nodeService.getProperty(nodeRef, propertyQName);
if (property == null)
{
return false;
}
else
{
String propertyString = DefaultTypeConverter.INSTANCE.convert(String.class, nodeService.getProperty(
nodeRef, propertyQName));
return propertyString.toLowerCase().matches(pattern);
}
}
}
public List<NodeRef> selectNodes(NodeRef contextNodeRef, String xpath, QueryParameterDefinition[] parameters,
NamespacePrefixResolver namespacePrefixResolver, boolean followAllParentLinks)
throws InvalidNodeRefException, XPathException
{
return selectNodes(contextNodeRef, xpath, parameters, namespacePrefixResolver, followAllParentLinks,
SearchService.LANGUAGE_XPATH);
}
public List<Serializable> selectProperties(NodeRef contextNodeRef, String xpath,
QueryParameterDefinition[] parameters, NamespacePrefixResolver namespacePrefixResolver,
boolean followAllParentLinks) throws InvalidNodeRefException, XPathException
{
return selectProperties(contextNodeRef, xpath, parameters, namespacePrefixResolver, followAllParentLinks,
SearchService.LANGUAGE_XPATH);
}
}

View File

@@ -443,7 +443,7 @@ public class LuceneTest2 extends TestCase
tx3.commit();
}
public void testMTDeleteIssue() throws Exception
public void xtestMTDeleteIssue() throws Exception
{
luceneFTS.pause();
testTX.commit();
@@ -1109,7 +1109,7 @@ public class LuceneTest2 extends TestCase
luceneFTS.pause();
buildBaseIndex();
LuceneSearcherImpl searcher = LuceneSearcherImpl.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(rootNodeRef.getStoreRef(), indexerAndSearcher);
searcher.setDictionaryService(dictionaryService);
ResultSet results = searcher.query(rootNodeRef.getStoreRef(), "lucene",
@@ -1137,7 +1137,7 @@ public class LuceneTest2 extends TestCase
public void testNoOp() throws Exception
{
luceneFTS.pause();
LuceneIndexerImpl indexer = LuceneIndexerImpl.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(rootNodeRef.getStoreRef(), "delta"
+ System.currentTimeMillis() + "_1", indexerAndSearcher);
indexer.setNodeService(nodeService);

View File

@@ -21,6 +21,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.RangeQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.search.BooleanClause.Occur;
/**
* This class is generated by JavaCC. The only method that clients should need
@@ -219,8 +220,8 @@ public class QueryParser implements QueryParserConstants {
// unless it's already prohibited
if (clauses.size() > 0 && conj == CONJ_AND) {
BooleanClause c = (BooleanClause) clauses.elementAt(clauses.size()-1);
if (!c.prohibited)
c.required = true;
if (!c.isProhibited())
c.setOccur(Occur.MUST);
}
if (clauses.size() > 0 && operator == DEFAULT_OPERATOR_AND && conj == CONJ_OR) {
@@ -229,8 +230,8 @@ public class QueryParser implements QueryParserConstants {
// notice if the input is a OR b, first term is parsed as required; without
// this modification a OR b would parsed as +a OR b
BooleanClause c = (BooleanClause) clauses.elementAt(clauses.size()-1);
if (!c.prohibited)
c.required = false;
if (!c.isProhibited())
c.setOccur(Occur.SHOULD);
}
// We might have been passed a null query; the term might have been
@@ -252,7 +253,16 @@ public class QueryParser implements QueryParserConstants {
prohibited = (mods == MOD_NOT);
required = (!prohibited && conj != CONJ_OR);
}
clauses.addElement(new BooleanClause(q, required, prohibited));
Occur occur = Occur.SHOULD;
if(prohibited)
{
occur = Occur.MUST_NOT;
}
if(required)
{
occur = Occur.MUST;
}
clauses.addElement(new BooleanClause(q, occur));
}
/**

View File

@@ -20,11 +20,8 @@ import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Set;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.IndexerSPI;
import org.alfresco.repo.search.impl.lucene.LuceneIndexer;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcher;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory;
import org.alfresco.service.cmr.repository.StoreRef;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;

View File

@@ -48,11 +48,11 @@ import java.util.zip.CRC32;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.search.IndexerException;
import org.alfresco.repo.search.impl.lucene.FilterIndexReaderByNodeRefs2;
import org.alfresco.repo.search.impl.lucene.analysis.AlfrescoStandardAnalyser;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.util.GUID;
import org.apache.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.alfresco.repo.search.impl.lucene.analysis.AlfrescoStandardAnalyser;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
@@ -64,8 +64,8 @@ import org.apache.lucene.search.Searcher;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.InputStream;
import org.apache.lucene.store.OutputStream;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.RAMDirectory;
/**
@@ -223,6 +223,14 @@ public class IndexInfo
private boolean mergerUseCompoundFile = true;
private int mergerTargetOverlays = 5;
private long writeLockTimeout = IndexWriter.WRITE_LOCK_TIMEOUT;
private long commitLockTimeout = IndexWriter.COMMIT_LOCK_TIMEOUT;
private int maxFieldLength = IndexWriter.DEFAULT_MAX_FIELD_LENGTH;
private int termIndexInterval = IndexWriter.DEFAULT_TERM_INDEX_INTERVAL;
// TODO: Something to control the maximum number of overlays
@@ -308,9 +316,13 @@ public class IndexInfo
{
writer = new IndexWriter(oldIndex, new AlfrescoStandardAnalyser(), false);
writer.setUseCompoundFile(writerUseCompoundFile);
writer.minMergeDocs = writerMinMergeDocs;
writer.mergeFactor = writerMergeFactor;
writer.maxMergeDocs = writerMaxMergeDocs;
writer.setMaxBufferedDocs(writerMinMergeDocs);
writer.setMergeFactor(writerMergeFactor);
writer.setMaxMergeDocs(writerMaxMergeDocs);
writer.setCommitLockTimeout(commitLockTimeout);
writer.setWriteLockTimeout(writeLockTimeout);
writer.setMaxFieldLength(maxFieldLength);
writer.setTermIndexInterval(termIndexInterval);
writer.optimize();
long docs = writer.docCount();
writer.close();
@@ -444,9 +456,13 @@ public class IndexInfo
{
writer = new IndexWriter(emptyIndex, new AlfrescoStandardAnalyser(), true);
writer.setUseCompoundFile(writerUseCompoundFile);
writer.minMergeDocs = writerMinMergeDocs;
writer.mergeFactor = writerMergeFactor;
writer.maxMergeDocs = writerMaxMergeDocs;
writer.setMaxBufferedDocs(writerMinMergeDocs);
writer.setMergeFactor(writerMergeFactor);
writer.setMaxMergeDocs(writerMaxMergeDocs);
writer.setCommitLockTimeout(commitLockTimeout);
writer.setWriteLockTimeout(writeLockTimeout);
writer.setMaxFieldLength(maxFieldLength);
writer.setTermIndexInterval(termIndexInterval);
}
catch (IOException e)
{
@@ -555,9 +571,13 @@ public class IndexInfo
{
IndexWriter creator = new IndexWriter(location, analyzer, true);
creator.setUseCompoundFile(writerUseCompoundFile);
creator.minMergeDocs = writerMinMergeDocs;
creator.mergeFactor = writerMergeFactor;
creator.maxMergeDocs = writerMaxMergeDocs;
creator.setMaxBufferedDocs(writerMinMergeDocs);
creator.setMergeFactor(writerMergeFactor);
creator.setMaxMergeDocs(writerMaxMergeDocs);
creator.setCommitLockTimeout(commitLockTimeout);
creator.setWriteLockTimeout(writeLockTimeout);
creator.setMaxFieldLength(maxFieldLength);
creator.setTermIndexInterval(termIndexInterval);
return creator;
}
return null;
@@ -582,9 +602,13 @@ public class IndexInfo
{
writer = new IndexWriter(location, analyzer, false);
writer.setUseCompoundFile(writerUseCompoundFile);
writer.minMergeDocs = writerMinMergeDocs;
writer.mergeFactor = writerMergeFactor;
writer.maxMergeDocs = writerMaxMergeDocs;
writer.setMaxBufferedDocs(writerMinMergeDocs);
writer.setMergeFactor(writerMergeFactor);
writer.setMaxMergeDocs(writerMaxMergeDocs);
writer.setCommitLockTimeout(commitLockTimeout);
writer.setWriteLockTimeout(writeLockTimeout);
writer.setMaxFieldLength(maxFieldLength);
writer.setTermIndexInterval(termIndexInterval);
}
indexWriters.put(id, writer);
}
@@ -2173,7 +2197,7 @@ public class IndexInfo
Document doc = hits.doc(i);
if (doc.getField("ISCONTAINER") == null)
{
reader.delete(hits.id(i));
reader.deleteDocument(hits.id(i));
invalidIndexes.add(key);
// There should only be one thing to delete
// break;
@@ -2185,7 +2209,7 @@ public class IndexInfo
}
else
{
if (reader.delete(new Term("ID", nodeRef.toString())) > 0)
if (reader.deleteDocuments(new Term("ID", nodeRef.toString())) > 0)
{
invalidIndexes.add(key);
}
@@ -2440,9 +2464,11 @@ public class IndexInfo
}
writer.setUseCompoundFile(mergerUseCompoundFile);
writer.minMergeDocs = mergerMinMergeDocs;
writer.mergeFactor = mergerMergeFactor;
writer.maxMergeDocs = mergerMaxMergeDocs;
writer.setMaxBufferedDocs(mergerMinMergeDocs);
writer.setMergeFactor(mergerMergeFactor);
writer.setMaxMergeDocs(mergerMaxMergeDocs);
writer.setCommitLockTimeout(commitLockTimeout);
writer.setWriteLockTimeout(writeLockTimeout);
}
}
writer.addIndexes(readers);
@@ -2455,9 +2481,9 @@ public class IndexInfo
for (int i = 0; i < files.length; i++)
{
// make place on ram disk
OutputStream os = directory.createFile(files[i]);
IndexOutput os = directory.createOutput(files[i]);
// read current file
InputStream is = ramDirectory.openFile(files[i]);
IndexInput is = ramDirectory.openInput(files[i]);
// and copy to ram disk
int len = (int) is.length();
byte[] buf = new byte[len];

View File

@@ -100,9 +100,9 @@ public static final String[] UPDATE_LIST_2 = { "alpha2", "bravo2", "charlie2", "
Document doc = new Document();
for (int k = 0; k < 15; k++)
{
doc.add(new Field("ID" + k, guid, false, true, false));
doc.add(new Field("ID" + k, guid, Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
}
doc.add(new Field("TEXT", WORD_LIST[i], false, true, false));
doc.add(new Field("TEXT", WORD_LIST[i], Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
writer.addDocument(doc);
ii.closeDeltaIndexWriter(guid);
@@ -198,12 +198,12 @@ public static final String[] UPDATE_LIST_2 = { "alpha2", "bravo2", "charlie2", "
Document doc = new Document();
for (int k = 0; k < 15; k++)
{
doc.add(new Field("ID" + k, guid, false, true, false));
doc.add(new Field("ID" + k, guid, Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
}
doc.add(new Field("TEXT", CREATE_LIST[i], false, true, false));
doc.add(new Field("TEXT", CREATE_LIST[i], Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
NodeRef nodeRef = new NodeRef(storeRef, GUID.generate());
nodeRefs.add(nodeRef);
doc.add(new Field("ID", nodeRef.toString(), false, true, false));
doc.add(new Field("ID", nodeRef.toString(), Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
writer.addDocument(doc);
ii.closeDeltaIndexWriter(guid);
@@ -388,12 +388,12 @@ public static final String[] UPDATE_LIST_2 = { "alpha2", "bravo2", "charlie2", "
Document doc = new Document();
for (int k = 0; k < 15; k++)
{
doc.add(new Field("ID" + k, guid, false, true, false));
doc.add(new Field("ID" + k, guid, Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
}
doc.add(new Field("TEXT", CREATE_LIST[i], false, true, false));
doc.add(new Field("TEXT", CREATE_LIST[i], Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
NodeRef nodeRef = new NodeRef(storeRef, GUID.generate());
nodeRefs.add(nodeRef);
doc.add(new Field("ID", nodeRef.toString(), false, true, false));
doc.add(new Field("ID", nodeRef.toString(), Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
writer.addDocument(doc);
ii.closeDeltaIndexWriter(guid);
@@ -474,9 +474,9 @@ public static final String[] UPDATE_LIST_2 = { "alpha2", "bravo2", "charlie2", "
Document doc = new Document();
for (int k = 0; k < 15; k++)
{
doc.add(new Field("ID" + k, guid, false, true, false));
doc.add(new Field("ID" + k, guid, Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
}
doc.add(new Field("TEXT", UPDATE_LIST[i], false, true, false));
doc.add(new Field("TEXT", UPDATE_LIST[i], Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
writer.addDocument(doc);
ii.closeDeltaIndexWriter(guid);
@@ -654,12 +654,12 @@ public static final String[] UPDATE_LIST_2 = { "alpha2", "bravo2", "charlie2", "
Document doc = new Document();
for (int k = 0; k < 15; k++)
{
doc.add(new Field("ID" + k, guid, false, true, false));
doc.add(new Field("ID" + k, guid, Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
}
doc.add(new Field("TEXT", create[i], false, true, false));
doc.add(new Field("TEXT", create[i], Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
NodeRef nodeRef = new NodeRef(storeRef, GUID.generate());
nodeRefs.add(nodeRef);
doc.add(new Field("ID", nodeRef.toString(), false, true, false));
doc.add(new Field("ID", nodeRef.toString(), Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
writer.addDocument(doc);
ii.closeDeltaIndexWriter(guid);
@@ -745,9 +745,9 @@ public static final String[] UPDATE_LIST_2 = { "alpha2", "bravo2", "charlie2", "
Document doc = new Document();
for (int k = 0; k < 15; k++)
{
doc.add(new Field("ID" + k, guid, false, true, false));
doc.add(new Field("ID" + k, guid, Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
}
doc.add(new Field("TEXT", update[i], false, true, false));
doc.add(new Field("TEXT", update[i], Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
writer.addDocument(doc);
ii.closeDeltaIndexWriter(guid);