mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-07-31 17:39:05 +00:00
Merged V2.2 to HEAD
11016: Query performance improvements 11018: Build fixes after .... Query performance improvements 11043: Updated Lucene config and test fixes 11047: Fixed test's expected results 11049: Build fix 11051: Tighten up on Auditable tests and checks for null git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@11221 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
@@ -104,13 +104,26 @@ public class AuditableAspectTest extends BaseSpringTest
|
||||
QName.createQName("{test}testperson"),
|
||||
ContentModel.TYPE_PERSON,
|
||||
personProps);
|
||||
NodeRef nodeRef = childAssocRef.getChildRef();
|
||||
|
||||
// Assert the person is not auditable
|
||||
Set<QName> aspects = nodeService.getAspects(childAssocRef.getChildRef());
|
||||
assertFalse(aspects.contains(ContentModel.ASPECT_AUDITABLE));
|
||||
Set<QName> aspects = nodeService.getAspects(nodeRef);
|
||||
assertFalse("cm:auditable must not be present.", aspects.contains(ContentModel.ASPECT_AUDITABLE));
|
||||
Map<QName, Serializable> properties = nodeService.getProperties(nodeRef);
|
||||
assertFalse("cm:creator must not be present", properties.containsKey(ContentModel.PROP_CREATOR));
|
||||
assertFalse("cm:created must not be present", properties.containsKey(ContentModel.PROP_CREATED));
|
||||
|
||||
assertNull(
|
||||
"Didn't expect to get single auditable property",
|
||||
nodeService.getProperty(nodeRef, ContentModel.PROP_CREATOR));
|
||||
|
||||
System.out.println(NodeStoreInspector.dumpNodeStore(nodeService, storeRef));
|
||||
}
|
||||
|
||||
public void test() throws Throwable
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
|
||||
public void testAddAudit()
|
||||
|
@@ -92,11 +92,12 @@ public class AuditableProperties
|
||||
}
|
||||
else if (qname.equals(ContentModel.PROP_MODIFIER))
|
||||
{
|
||||
return auditModifier;
|
||||
return auditModifier == null ? auditCreator : auditModifier;
|
||||
}
|
||||
else if (qname.equals(ContentModel.PROP_MODIFIED))
|
||||
{
|
||||
return DefaultTypeConverter.INSTANCE.convert(Date.class, auditModified);
|
||||
String dateStr = auditModified == null ? auditCreated : auditModified;
|
||||
return DefaultTypeConverter.INSTANCE.convert(Date.class, dateStr);
|
||||
}
|
||||
else if (qname.equals(ContentModel.PROP_ACCESSED))
|
||||
{
|
||||
|
@@ -1020,8 +1020,16 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
|
||||
// Handle cm:auditable
|
||||
if (AuditableProperties.isAuditableProperty(propertyQName))
|
||||
{
|
||||
AuditableProperties auditableProperties = node.getAuditableProperties();
|
||||
return auditableProperties.getAuditableProperty(propertyQName);
|
||||
// Only bother if the aspect is present
|
||||
if (hasNodeAspect(node, ContentModel.ASPECT_AUDITABLE))
|
||||
{
|
||||
AuditableProperties auditableProperties = node.getAuditableProperties();
|
||||
return auditableProperties.getAuditableProperty(propertyQName);
|
||||
}
|
||||
else
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
QNameEntity propertyQNameEntity = qnameDAO.getQNameEntity(propertyQName);
|
||||
@@ -1051,8 +1059,11 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
|
||||
dictionaryService);
|
||||
|
||||
// Handle cm:auditable
|
||||
AuditableProperties auditableProperties = node.getAuditableProperties();
|
||||
converted.putAll(auditableProperties.getAuditableProperties());
|
||||
if (hasNodeAspect(node, ContentModel.ASPECT_AUDITABLE))
|
||||
{
|
||||
AuditableProperties auditableProperties = node.getAuditableProperties();
|
||||
converted.putAll(auditableProperties.getAuditableProperties());
|
||||
}
|
||||
|
||||
// Done
|
||||
return converted;
|
||||
|
@@ -153,7 +153,7 @@ public class SearcherComponentTest extends TestCase
|
||||
|
||||
xpath = new NodeServiceXPath("*/*", documentNavigator, null);
|
||||
list = xpath.selectNodes(new ChildAssociationRef(null, null, null, rootNodeRef));
|
||||
assertEquals(4, list.size());
|
||||
assertEquals(5, list.size());
|
||||
|
||||
xpath = new NodeServiceXPath("*/*/*", documentNavigator, null);
|
||||
list = xpath.selectNodes(new ChildAssociationRef(null, null, null, rootNodeRef));
|
||||
@@ -169,7 +169,7 @@ public class SearcherComponentTest extends TestCase
|
||||
|
||||
xpath = new NodeServiceXPath("*//.", documentNavigator, null);
|
||||
list = xpath.selectNodes(new ChildAssociationRef(null, null, null, rootNodeRef));
|
||||
assertEquals(12, list.size());
|
||||
assertEquals(13, list.size());
|
||||
|
||||
xpathStr = "test:root_p_n1";
|
||||
xpath = new NodeServiceXPath(xpathStr, documentNavigator, null);
|
||||
@@ -268,7 +268,7 @@ public class SearcherComponentTest extends TestCase
|
||||
xpath = new NodeServiceXPath(xpathStr, documentNavigator, new QueryParameterDefinition[] { paramDef });
|
||||
xpath.addNamespace(BaseNodeServiceTest.TEST_PREFIX, BaseNodeServiceTest.NAMESPACE);
|
||||
list = xpath.selectNodes(assocRefs.get(qname));
|
||||
assertEquals(2, list.size()); // 2 distinct paths to node n8, which is of type content
|
||||
assertEquals(3, list.size()); // 2 distinct paths to node n8, which is of type content
|
||||
|
||||
xpath = new NodeServiceXPath("/", documentNavigator, null);
|
||||
xpath.addNamespace(BaseNodeServiceTest.TEST_PREFIX, BaseNodeServiceTest.NAMESPACE);
|
||||
@@ -560,7 +560,7 @@ public class SearcherComponentTest extends TestCase
|
||||
"element\\(\\s*(\\*|\\$?\\w*:\\w*)\\s*,\\s*(\\*|\\$?\\w*:\\w*)\\s*\\)", "$1[subtypeOf(\"$2\")]"),
|
||||
documentNavigator, null);
|
||||
list = xpath.selectNodes(new ChildAssociationRef(null, null, null, rootNodeRef));
|
||||
assertEquals(12, list.size());
|
||||
assertEquals(13, list.size());
|
||||
|
||||
xpath = new NodeServiceXPath("//element(jcr:root, *)".replaceAll(
|
||||
"element\\(\\s*(\\*|\\$?\\w*:\\w*)\\s*,\\s*(\\*|\\$?\\w*:\\w*)\\s*\\)", "$1[subtypeOf(\"$2\")]"),
|
||||
@@ -577,7 +577,7 @@ public class SearcherComponentTest extends TestCase
|
||||
"element\\(\\s*(\\*|\\$?\\w*:\\w*)\\s*,\\s*(\\*|\\$?\\w*:\\w*)\\s*\\)", "$1[subtypeOf(\"$2\")]"),
|
||||
documentNavigator, new QueryParameterDefinition[] { paramDef });
|
||||
list = xpath.selectNodes(new ChildAssociationRef(null, null, null, rootNodeRef));
|
||||
assertEquals(2, list.size());
|
||||
assertEquals(3, list.size());
|
||||
|
||||
paramDef = new QueryParameterDefImpl(QName.createQName("test:type", namespacePrefixResolver), dictionaryService
|
||||
.getDataType(DataTypeDefinition.QNAME), true, BaseNodeServiceTest.TYPE_QNAME_TEST_CONTENT
|
||||
|
@@ -33,7 +33,9 @@ import java.io.ObjectOutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.GregorianCalendar;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Locale;
|
||||
@@ -109,7 +111,7 @@ public class ADMLuceneTest extends TestCase
|
||||
{
|
||||
|
||||
private static final String TEST_NAMESPACE = "http://www.alfresco.org/test/lucenetest";
|
||||
|
||||
|
||||
private static final QName ASSOC_TYPE_QNAME = QName.createQName(TEST_NAMESPACE, "assoc");
|
||||
|
||||
private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
|
||||
@@ -420,10 +422,12 @@ public class ADMLuceneTest extends TestCase
|
||||
|
||||
documentOrder = new NodeRef[] { rootNodeRef, n4, n5, n6, n7, n8, n9, n10, n11, n12, n13, n14, n3, n1, n2 };
|
||||
|
||||
nodeService.addAspect(n3, ContentModel.ASPECT_AUDITABLE, null);
|
||||
nodeService.addAspect(n1, ContentModel.ASPECT_AUDITABLE, null);
|
||||
// TODO: Why was the cm:auditable aspect added here?
|
||||
// By adding it, the auditable properties were set automatically.
|
||||
// nodeService.addAspect(n3, ContentModel.ASPECT_AUDITABLE, null);
|
||||
// nodeService.addAspect(n1, ContentModel.ASPECT_AUDITABLE, null);
|
||||
nodeService.setProperty(n1, ContentModel.PROP_MODIFIED, new Date(new Date().getTime() - 1000*60*60));
|
||||
nodeService.addAspect(n2, ContentModel.ASPECT_AUDITABLE, null);
|
||||
// nodeService.addAspect(n2, ContentModel.ASPECT_AUDITABLE, null);
|
||||
}
|
||||
|
||||
private double orderDoubleCount = -0.11d;
|
||||
@@ -477,13 +481,17 @@ public class ADMLuceneTest extends TestCase
|
||||
|
||||
public void restManyReaders() throws Exception
|
||||
{
|
||||
QName propQName = QName.createQName(TEST_NAMESPACE, "text-indexed-stored-tokenised-atomic");
|
||||
|
||||
NodeRef base = rootNodeRef;
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
NodeRef dir = nodeService.createNode(base, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}d-" + i), testSuperType, null).getChildRef();
|
||||
for (int j = 0; j < 10; j++)
|
||||
for (int j = 0; j < 100; j++)
|
||||
{
|
||||
NodeRef file = nodeService.createNode(dir, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}meep"), testSuperType, null).getChildRef();
|
||||
Map<QName, Serializable> properties = new HashMap<QName, Serializable>();
|
||||
properties.put(propQName, "lemon");
|
||||
NodeRef file = nodeService.createNode(dir, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}meep"), testSuperType, properties).getChildRef();
|
||||
}
|
||||
}
|
||||
testTX.commit();
|
||||
@@ -504,10 +512,23 @@ public class ADMLuceneTest extends TestCase
|
||||
|
||||
Thread runner = null;
|
||||
|
||||
for (int i = 0; i < 20; i++)
|
||||
// testQuery(searcher, runner, "PATH:\"/d-0/*\"");
|
||||
// testQuery(searcher, runner, "PATH:\"/d-0/meep\"");
|
||||
// testQuery(searcher, runner, "PATH:\"/d-0//*\"");
|
||||
// testQuery(searcher, runner, "PATH:\"/d-0//meep\"");
|
||||
testQuery(searcher, runner, "PATH:\"//*\"");
|
||||
// testQuery(searcher, runner, "PATH:\"//meep\"");
|
||||
// testQuery(searcher, runner, "@"+LuceneQueryParser.escape(propQName.toString())+":\"lemon\"");
|
||||
|
||||
}
|
||||
|
||||
private void testQuery(ADMLuceneSearcherImpl searcher, Thread runner, String query)
|
||||
{
|
||||
for (int i = 0; i < 1; i++)
|
||||
{
|
||||
runner = new QueryThread("Concurrent-" + i, runner, searcher);
|
||||
runner = new QueryThread("Concurrent-" + i, runner, searcher, query);
|
||||
}
|
||||
long start = System.nanoTime();
|
||||
if (runner != null)
|
||||
{
|
||||
runner.start();
|
||||
@@ -521,6 +542,8 @@ public class ADMLuceneTest extends TestCase
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
System.out.println(query + "\t" + ((end - start) / 1e9f));
|
||||
}
|
||||
|
||||
class QueryThread extends Thread
|
||||
@@ -529,12 +552,15 @@ public class ADMLuceneTest extends TestCase
|
||||
|
||||
ADMLuceneSearcherImpl searcher;
|
||||
|
||||
QueryThread(String name, Thread waiter, ADMLuceneSearcherImpl searcher)
|
||||
String query;
|
||||
|
||||
QueryThread(String name, Thread waiter, ADMLuceneSearcherImpl searcher, String query)
|
||||
{
|
||||
super(name);
|
||||
this.setDaemon(true);
|
||||
this.waiter = waiter;
|
||||
this.searcher = searcher;
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
public void run()
|
||||
@@ -546,7 +572,7 @@ public class ADMLuceneTest extends TestCase
|
||||
}
|
||||
try
|
||||
{
|
||||
System.out.println("Start " + this.getName());
|
||||
// System.out.println("Start " + this.getName());
|
||||
|
||||
RetryingTransactionCallback<Object> createAndDeleteCallback = new RetryingTransactionCallback<Object>()
|
||||
{
|
||||
@@ -555,9 +581,9 @@ public class ADMLuceneTest extends TestCase
|
||||
SessionSizeResourceManager.setDisableInTransaction();
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
ResultSet results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "PATH:\"//meep\"");
|
||||
ResultSet results = searcher.query(rootNodeRef.getStoreRef(), "lucene", query);
|
||||
int count = results.length();
|
||||
for(ResultSetRow row : results)
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
NodeRef nr = row.getNodeRef();
|
||||
}
|
||||
@@ -568,7 +594,7 @@ public class ADMLuceneTest extends TestCase
|
||||
};
|
||||
retryingTransactionHelper.doInTransaction(createAndDeleteCallback);
|
||||
|
||||
System.out.println("End " + this.getName());
|
||||
// System.out.println("End " + this.getName());
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
@@ -2002,7 +2028,16 @@ public class ADMLuceneTest extends TestCase
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Date currentBun = DefaultTypeConverter.INSTANCE.convert(Date.class, nodeService.getProperty(row.getNodeRef(), ContentModel.PROP_MODIFIED));
|
||||
//System.out.println("A " + currentBun + " "+row.getQName());
|
||||
if (currentBun != null)
|
||||
{
|
||||
Calendar c = new GregorianCalendar();
|
||||
c.setTime(currentBun);
|
||||
c.set(Calendar.MILLISECOND, 0);
|
||||
c.set(Calendar.SECOND, 0);
|
||||
c.set(Calendar.MINUTE, 0);
|
||||
c.set(Calendar.HOUR_OF_DAY, 0);
|
||||
currentBun = c.getTime();
|
||||
}
|
||||
if (date != null)
|
||||
{
|
||||
assertTrue(date.compareTo(currentBun) <= 0);
|
||||
@@ -2023,6 +2058,17 @@ public class ADMLuceneTest extends TestCase
|
||||
{
|
||||
Date currentBun = DefaultTypeConverter.INSTANCE.convert(Date.class, nodeService.getProperty(row.getNodeRef(), ContentModel.PROP_MODIFIED));
|
||||
// System.out.println(currentBun);
|
||||
if (currentBun != null)
|
||||
{
|
||||
Calendar c = new GregorianCalendar();
|
||||
c.setTime(currentBun);
|
||||
c.set(Calendar.MILLISECOND, 0);
|
||||
c.set(Calendar.SECOND, 0);
|
||||
c.set(Calendar.MINUTE, 0);
|
||||
c.set(Calendar.HOUR_OF_DAY, 0);
|
||||
currentBun = c.getTime();
|
||||
}
|
||||
|
||||
if ((date != null) && (currentBun != null))
|
||||
{
|
||||
assertTrue(date.compareTo(currentBun) >= 0);
|
||||
@@ -3099,20 +3145,17 @@ public class ADMLuceneTest extends TestCase
|
||||
results.close();
|
||||
|
||||
// short and long field ranges
|
||||
|
||||
|
||||
sDate = df.format(date);
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "\\@cm\\:created:[MIN TO " + sDate + "]",
|
||||
null, null);
|
||||
assertEquals(4, results.length());
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "\\@cm\\:created:[MIN TO " + sDate + "]", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
|
||||
sDate = df.format(date);
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "\\@" + escapeQName(ContentModel.PROP_CREATED) + ":[MIN TO " + sDate + "]",
|
||||
null, null);
|
||||
assertEquals(4, results.length());
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "\\@" + escapeQName(ContentModel.PROP_CREATED) + ":[MIN TO " + sDate + "]", null, null);
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
|
||||
|
||||
|
||||
// Date ranges
|
||||
// Test date collapses but date time does not
|
||||
|
||||
|
@@ -46,7 +46,6 @@ import javax.transaction.xa.XAResource;
|
||||
import javax.transaction.xa.Xid;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.repo.avm.AVMNodeService;
|
||||
import org.alfresco.repo.domain.hibernate.BulkLoader;
|
||||
import org.alfresco.repo.search.IndexerException;
|
||||
import org.alfresco.repo.search.MLAnalysisMode;
|
||||
@@ -64,6 +63,7 @@ import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.util.GUID;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.store.Lock;
|
||||
import org.quartz.Job;
|
||||
@@ -126,7 +126,7 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI
|
||||
/** the maximum transformation time to allow atomically, defaulting to 20ms */
|
||||
private long maxAtomicTransformationTime = 20;
|
||||
|
||||
private int indexerMaxFieldLength;
|
||||
private int indexerMaxFieldLength = IndexWriter.DEFAULT_MAX_FIELD_LENGTH;
|
||||
|
||||
private long writeLockTimeout;
|
||||
|
||||
@@ -142,6 +142,44 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI
|
||||
|
||||
private BulkLoader bulkLoader;
|
||||
|
||||
private int maxDocIdCacheSize = 10000;
|
||||
|
||||
private int maxDocsForInMemoryMerge = 10000;
|
||||
|
||||
private int maxDocumentCacheSize = 100;
|
||||
|
||||
private int maxIsCategoryCacheSize = -1;
|
||||
|
||||
private int maxLinkAspectCacheSize = 10000;
|
||||
|
||||
private int maxParentCacheSize = 10000;
|
||||
|
||||
private int maxPathCacheSize = 10000;
|
||||
|
||||
private int maxTypeCacheSize = 10000;
|
||||
|
||||
private int mergerMaxMergeDocs = 1000000;
|
||||
|
||||
private int mergerMergeFactor = 5;
|
||||
|
||||
private int mergerMinMergeDocs = 1000;
|
||||
|
||||
private int mergerTargetIndexCount = 5;
|
||||
|
||||
private int mergerTargetOverlayCount = 5;
|
||||
|
||||
private int termIndexInterval =IndexWriter.DEFAULT_TERM_INDEX_INTERVAL;
|
||||
|
||||
private boolean useNioMemoryMapping = true;
|
||||
|
||||
private int writerMaxMergeDocs = 1000000;
|
||||
|
||||
private int writerMergeFactor = 5;
|
||||
|
||||
private int writerMinMergeDocs = 1000;
|
||||
|
||||
private boolean cacheEnabled = true;
|
||||
|
||||
/**
|
||||
* Private constructor for the singleton TODO: FIt in with IOC
|
||||
*/
|
||||
@@ -1462,6 +1500,197 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI
|
||||
defaultMLSearchAnalysisMode = mode;
|
||||
}
|
||||
|
||||
|
||||
public int getMaxDocIdCacheSize()
|
||||
{
|
||||
return maxDocIdCacheSize;
|
||||
}
|
||||
|
||||
public void setMaxDocIdCacheSize(int maxDocIdCacheSize)
|
||||
{
|
||||
this.maxDocIdCacheSize = maxDocIdCacheSize;
|
||||
}
|
||||
|
||||
public int getMaxDocsForInMemoryMerge()
|
||||
{
|
||||
return maxDocsForInMemoryMerge;
|
||||
}
|
||||
|
||||
public void setMaxDocsForInMemoryMerge(int maxDocsForInMemoryMerge)
|
||||
{
|
||||
this.maxDocsForInMemoryMerge = maxDocsForInMemoryMerge;
|
||||
}
|
||||
|
||||
public int getMaxDocumentCacheSize()
|
||||
{
|
||||
return maxDocumentCacheSize;
|
||||
}
|
||||
|
||||
public void setMaxDocumentCacheSize(int maxDocumentCacheSize)
|
||||
{
|
||||
this.maxDocumentCacheSize = maxDocumentCacheSize;
|
||||
}
|
||||
|
||||
public int getMaxIsCategoryCacheSize()
|
||||
{
|
||||
return maxIsCategoryCacheSize;
|
||||
}
|
||||
|
||||
public void setMaxIsCategoryCacheSize(int maxIsCategoryCacheSize)
|
||||
{
|
||||
this.maxIsCategoryCacheSize = maxIsCategoryCacheSize;
|
||||
}
|
||||
|
||||
public int getMaxLinkAspectCacheSize()
|
||||
{
|
||||
return maxLinkAspectCacheSize;
|
||||
}
|
||||
|
||||
public void setMaxLinkAspectCacheSize(int maxLinkAspectCacheSize)
|
||||
{
|
||||
this.maxLinkAspectCacheSize = maxLinkAspectCacheSize;
|
||||
}
|
||||
|
||||
public int getMaxParentCacheSize()
|
||||
{
|
||||
return maxParentCacheSize;
|
||||
}
|
||||
|
||||
public void setMaxParentCacheSize(int maxParentCacheSize)
|
||||
{
|
||||
this.maxParentCacheSize = maxParentCacheSize;
|
||||
}
|
||||
|
||||
public int getMaxPathCacheSize()
|
||||
{
|
||||
return maxPathCacheSize;
|
||||
}
|
||||
|
||||
public void setMaxPathCacheSize(int maxPathCacheSize)
|
||||
{
|
||||
this.maxPathCacheSize = maxPathCacheSize;
|
||||
}
|
||||
|
||||
public int getMaxTypeCacheSize()
|
||||
{
|
||||
return maxTypeCacheSize;
|
||||
}
|
||||
|
||||
public void setMaxTypeCacheSize(int maxTypeCacheSize)
|
||||
{
|
||||
this.maxTypeCacheSize = maxTypeCacheSize;
|
||||
}
|
||||
|
||||
public int getMergerMaxMergeDocs()
|
||||
{
|
||||
return mergerMaxMergeDocs;
|
||||
}
|
||||
|
||||
public void setMergerMaxMergeDocs(int mergerMaxMergeDocs)
|
||||
{
|
||||
this.mergerMaxMergeDocs = mergerMaxMergeDocs;
|
||||
}
|
||||
|
||||
public int getMergerMergeFactor()
|
||||
{
|
||||
return mergerMergeFactor;
|
||||
}
|
||||
|
||||
public void setMergerMergeFactor(int mergerMergeFactor)
|
||||
{
|
||||
this.mergerMergeFactor = mergerMergeFactor;
|
||||
}
|
||||
|
||||
public int getMergerMinMergeDocs()
|
||||
{
|
||||
return mergerMinMergeDocs;
|
||||
}
|
||||
|
||||
public void setMergerMinMergeDocs(int mergerMinMergeDocs)
|
||||
{
|
||||
this.mergerMinMergeDocs = mergerMinMergeDocs;
|
||||
}
|
||||
|
||||
public int getMergerTargetIndexCount()
|
||||
{
|
||||
return mergerTargetIndexCount;
|
||||
}
|
||||
|
||||
public void setMergerTargetIndexCount(int mergerTargetIndexCount)
|
||||
{
|
||||
this.mergerTargetIndexCount = mergerTargetIndexCount;
|
||||
}
|
||||
|
||||
public int getMergerTargetOverlayCount()
|
||||
{
|
||||
return mergerTargetOverlayCount;
|
||||
}
|
||||
|
||||
public void setMergerTargetOverlayCount(int mergerTargetOverlayCount)
|
||||
{
|
||||
this.mergerTargetOverlayCount = mergerTargetOverlayCount;
|
||||
}
|
||||
|
||||
public int getTermIndexInterval()
|
||||
{
|
||||
return termIndexInterval;
|
||||
}
|
||||
|
||||
public void setTermIndexInterval(int termIndexInterval)
|
||||
{
|
||||
this.termIndexInterval = termIndexInterval;
|
||||
}
|
||||
|
||||
public boolean getUseNioMemoryMapping()
|
||||
{
|
||||
return useNioMemoryMapping;
|
||||
}
|
||||
|
||||
public void setUseNioMemoryMapping(boolean useNioMemoryMapping)
|
||||
{
|
||||
this.useNioMemoryMapping = useNioMemoryMapping;
|
||||
}
|
||||
|
||||
public int getWriterMaxMergeDocs()
|
||||
{
|
||||
return writerMaxMergeDocs;
|
||||
}
|
||||
|
||||
public void setWriterMaxMergeDocs(int writerMaxMergeDocs)
|
||||
{
|
||||
this.writerMaxMergeDocs = writerMaxMergeDocs;
|
||||
}
|
||||
|
||||
public int getWriterMergeFactor()
|
||||
{
|
||||
return writerMergeFactor;
|
||||
}
|
||||
|
||||
public void setWriterMergeFactor(int writerMergeFactor)
|
||||
{
|
||||
this.writerMergeFactor = writerMergeFactor;
|
||||
}
|
||||
|
||||
public int getWriterMinMergeDocs()
|
||||
{
|
||||
return writerMinMergeDocs;
|
||||
}
|
||||
|
||||
public void setWriterMinMergeDocs(int writerMinMergeDocs)
|
||||
{
|
||||
this.writerMinMergeDocs = writerMinMergeDocs;
|
||||
}
|
||||
|
||||
public boolean isCacheEnabled()
|
||||
{
|
||||
return cacheEnabled;
|
||||
}
|
||||
|
||||
public void setCacheEnabled(boolean cacheEnabled)
|
||||
{
|
||||
this.cacheEnabled = cacheEnabled;
|
||||
}
|
||||
|
||||
protected abstract List<StoreRef> getAllStores();
|
||||
|
||||
public <R> R doWithAllWriteLocks(WithAllWriteLocksWork<R> lockWork)
|
||||
|
@@ -94,4 +94,118 @@ public interface LuceneConfig
|
||||
*/
|
||||
public BulkLoader getBulkLoader();
|
||||
|
||||
/**
|
||||
* Use the nio memory mapping (work arounf for bugs with some JVMs)
|
||||
* @return
|
||||
*/
|
||||
public boolean getUseNioMemoryMapping();
|
||||
|
||||
/**
|
||||
* Max doc number that will merged in memory (and not on disk)
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public int getMaxDocsForInMemoryMerge();
|
||||
|
||||
/**
|
||||
* Lucene writer config
|
||||
* @return
|
||||
*/
|
||||
public int getWriterMinMergeDocs();
|
||||
|
||||
/**
|
||||
* Lucene writer config
|
||||
* @return
|
||||
*/
|
||||
public int getWriterMergeFactor();
|
||||
|
||||
/**
|
||||
* Lucene writer config
|
||||
* @return
|
||||
*/
|
||||
public int getWriterMaxMergeDocs();
|
||||
|
||||
/**
|
||||
* Lucene merger config
|
||||
* @return
|
||||
*/
|
||||
public int getMergerMinMergeDocs();
|
||||
|
||||
/**
|
||||
* Lucene merger config
|
||||
* @return
|
||||
*/
|
||||
public int getMergerMergeFactor();
|
||||
|
||||
/**
|
||||
* Lucene merger config
|
||||
* @return
|
||||
*/
|
||||
public int getMergerMaxMergeDocs();
|
||||
|
||||
/**
|
||||
* Target overlays (will apply deletions and create indexes if over this limit)
|
||||
* @return
|
||||
*/
|
||||
public int getMergerTargetOverlayCount();
|
||||
|
||||
/**
|
||||
* Target index count. Over this indexes will be merged together.
|
||||
* @return
|
||||
*/
|
||||
public int getMergerTargetIndexCount();
|
||||
|
||||
/**
|
||||
* Lucene term index interval
|
||||
* @return
|
||||
*/
|
||||
public int getTermIndexInterval();
|
||||
|
||||
/**
|
||||
* Is caching enabled for each index fragment?
|
||||
* @return
|
||||
*/
|
||||
public boolean isCacheEnabled();
|
||||
|
||||
/**
|
||||
* How many categories to cache (-ve => unbounded)
|
||||
* @return
|
||||
*/
|
||||
public int getMaxIsCategoryCacheSize();
|
||||
|
||||
/**
|
||||
* How many documents to cache (-ve => unbounded)
|
||||
* @return
|
||||
*/
|
||||
public int getMaxDocumentCacheSize();
|
||||
|
||||
/**
|
||||
* How many document ids to cache (-ve => unbounded)
|
||||
* @return
|
||||
*/
|
||||
public int getMaxDocIdCacheSize();
|
||||
|
||||
/**
|
||||
* How many paths to cache (-ve => unbounded)
|
||||
* @return
|
||||
*/
|
||||
public int getMaxPathCacheSize();
|
||||
|
||||
/**
|
||||
* How many types to cache (-ve => unbounded)
|
||||
* @return
|
||||
*/
|
||||
public int getMaxTypeCacheSize();
|
||||
|
||||
/**
|
||||
* How many parents to cache (-ve => unbounded)
|
||||
* @return
|
||||
*/
|
||||
public int getMaxParentCacheSize();
|
||||
|
||||
/**
|
||||
* How many link aspects to cache (-ve => unbounded)
|
||||
* @return
|
||||
*/
|
||||
public int getMaxLinkAspectCacheSize();
|
||||
}
|
||||
|
@@ -35,6 +35,8 @@ public interface CachingIndexReader
|
||||
{
|
||||
public String getId(int n) throws IOException;
|
||||
|
||||
public String getPathLinkId(int n) throws IOException;
|
||||
|
||||
public String[] getIds(int n) throws IOException;
|
||||
|
||||
public String getIsCategory(int n) throws IOException;
|
||||
|
@@ -132,7 +132,7 @@ public class IndexInfo
|
||||
/**
|
||||
* Use NIO memory mapping to wite the index control file.
|
||||
*/
|
||||
private static final boolean useNIOMemoryMapping = true;
|
||||
private static boolean useNIOMemoryMapping = true;
|
||||
|
||||
/**
|
||||
* The default name for the file that holds the index information
|
||||
@@ -305,20 +305,14 @@ public class IndexInfo
|
||||
|
||||
private int termIndexInterval = IndexWriter.DEFAULT_TERM_INDEX_INTERVAL;
|
||||
|
||||
/**
|
||||
* Control if the cleaner thread is active
|
||||
*/
|
||||
|
||||
private boolean enableCleaner = true;
|
||||
|
||||
/**
|
||||
* Control if the merger thread is active
|
||||
*/
|
||||
|
||||
private boolean enableMerger = true;
|
||||
|
||||
private ThreadPoolExecutor threadPoolExecutor;
|
||||
|
||||
private LuceneConfig config;
|
||||
|
||||
static
|
||||
{
|
||||
// We do not require any of the lucene in-built locking.
|
||||
@@ -371,11 +365,22 @@ public class IndexInfo
|
||||
{
|
||||
super();
|
||||
initialiseTransitions();
|
||||
|
||||
this.config = config;
|
||||
|
||||
if (config != null)
|
||||
{
|
||||
this.maxFieldLength = config.getIndexerMaxFieldLength();
|
||||
this.threadPoolExecutor = config.getThreadPoolExecutor();
|
||||
IndexInfo.useNIOMemoryMapping = config.getUseNioMemoryMapping();
|
||||
this.maxDocsForInMemoryMerge = config.getMaxDocsForInMemoryMerge();
|
||||
this.writerMinMergeDocs = config.getWriterMinMergeDocs();
|
||||
this.writerMergeFactor = config.getWriterMergeFactor();
|
||||
this.writerMaxMergeDocs = config.getWriterMaxMergeDocs();
|
||||
this.mergerMinMergeDocs = config.getMergerMinMergeDocs();
|
||||
this.mergerMergeFactor = config.getMergerMergeFactor();
|
||||
this.mergerMaxMergeDocs = config.getMergerMaxMergeDocs();
|
||||
this.termIndexInterval = config.getTermIndexInterval();
|
||||
this.mergerTargetOverlays = config.getMergerTargetOverlayCount();
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -474,7 +479,7 @@ public class IndexInfo
|
||||
writeStatus();
|
||||
|
||||
// The index exists and we should initialise the single reader
|
||||
registerReferenceCountingIndexReader(entry.getName(), buildReferenceCountingIndexReader(entry.getName()));
|
||||
registerReferenceCountingIndexReader(entry.getName(), buildReferenceCountingIndexReader(entry.getName(), entry.getDocumentCount()));
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
@@ -548,7 +553,7 @@ public class IndexInfo
|
||||
s_logger.info("Resetting merge to committed " + entry);
|
||||
}
|
||||
entry.setStatus(TransactionStatus.COMMITTED);
|
||||
registerReferenceCountingIndexReader(entry.getName(), buildReferenceCountingIndexReader(entry.getName()));
|
||||
registerReferenceCountingIndexReader(entry.getName(), buildReferenceCountingIndexReader(entry.getName(), entry.getDocumentCount()));
|
||||
break;
|
||||
// Complete committing (which is post database
|
||||
// commit)
|
||||
@@ -559,12 +564,12 @@ public class IndexInfo
|
||||
s_logger.info("Committing " + entry);
|
||||
}
|
||||
entry.setStatus(TransactionStatus.COMMITTED);
|
||||
registerReferenceCountingIndexReader(entry.getName(), buildReferenceCountingIndexReader(entry.getName()));
|
||||
registerReferenceCountingIndexReader(entry.getName(), buildReferenceCountingIndexReader(entry.getName(), entry.getDocumentCount()));
|
||||
mainIndexReader = null;
|
||||
break;
|
||||
// States that require no action
|
||||
case COMMITTED:
|
||||
registerReferenceCountingIndexReader(entry.getName(), buildReferenceCountingIndexReader(entry.getName()));
|
||||
registerReferenceCountingIndexReader(entry.getName(), buildReferenceCountingIndexReader(entry.getName(), entry.getDocumentCount()));
|
||||
break;
|
||||
default:
|
||||
// nothing to do
|
||||
@@ -650,6 +655,10 @@ public class IndexInfo
|
||||
String id = file.getName();
|
||||
if (!indexEntries.containsKey(id) && isGUID(id))
|
||||
{
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug("Deleting unused index directory " + id);
|
||||
}
|
||||
deleteQueue.add(id);
|
||||
}
|
||||
}
|
||||
@@ -1048,7 +1057,7 @@ public class IndexInfo
|
||||
}
|
||||
return mainIndexReader;
|
||||
}
|
||||
catch(RuntimeException e)
|
||||
catch (RuntimeException e)
|
||||
{
|
||||
e.printStackTrace();
|
||||
throw e;
|
||||
@@ -1147,7 +1156,7 @@ public class IndexInfo
|
||||
{
|
||||
reader = new MultiReader(new IndexReader[] { new FilterIndexReaderByStringId("main+id", mainIndexReader, deletions, deleteOnlyNodes), deltaReader });
|
||||
}
|
||||
reader = ReferenceCountingReadOnlyIndexReaderFactory.createReader(MAIN_READER + id, reader, false);
|
||||
reader = ReferenceCountingReadOnlyIndexReaderFactory.createReader(MAIN_READER + id, reader, false, config);
|
||||
ReferenceCounting refCounting = (ReferenceCounting) reader;
|
||||
refCounting.incrementReferenceCount();
|
||||
refCounting.setInvalidForReuse();
|
||||
@@ -1427,7 +1436,8 @@ public class IndexInfo
|
||||
// Make sure we have set up the reader for the data
|
||||
// ... and close it so we do not up the ref count
|
||||
closeDelta(id);
|
||||
tl.set(buildReferenceCountingIndexReader(id));
|
||||
IndexEntry entry = indexEntries.get(id);
|
||||
tl.set(buildReferenceCountingIndexReader(id, entry.getDocumentCount()));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1538,7 +1548,8 @@ public class IndexInfo
|
||||
public void beforeWithReadLock(String id, Set<Term> toDelete, Set<Term> read) throws IOException
|
||||
{
|
||||
closeDelta(id);
|
||||
tl.set(buildReferenceCountingIndexReader(id));
|
||||
IndexEntry entry = indexEntries.get(id);
|
||||
tl.set(buildReferenceCountingIndexReader(id, entry.getDocumentCount()));
|
||||
}
|
||||
|
||||
public void transition(String id, Set<Term> toDelete, Set<Term> read) throws IOException
|
||||
@@ -1825,7 +1836,7 @@ public class IndexInfo
|
||||
{
|
||||
reader = IndexReader.open(emptyIndex);
|
||||
}
|
||||
reader = ReferenceCountingReadOnlyIndexReaderFactory.createReader(MAIN_READER, reader, false);
|
||||
reader = ReferenceCountingReadOnlyIndexReaderFactory.createReader(MAIN_READER, reader, false, config);
|
||||
return reader;
|
||||
}
|
||||
|
||||
@@ -1851,19 +1862,27 @@ public class IndexInfo
|
||||
referenceCountingReadOnlyIndexReaders.put(id, reader);
|
||||
}
|
||||
|
||||
private IndexReader buildReferenceCountingIndexReader(String id) throws IOException
|
||||
private IndexReader buildReferenceCountingIndexReader(String id, long size) throws IOException
|
||||
{
|
||||
IndexReader reader;
|
||||
File location = new File(indexDirectory, id).getCanonicalFile();
|
||||
if (IndexReader.indexExists(location))
|
||||
{
|
||||
reader = IndexReader.open(location);
|
||||
if (size > config.getMaxDocsForInMemoryMerge())
|
||||
{
|
||||
reader = IndexReader.open(location);
|
||||
}
|
||||
else
|
||||
{
|
||||
RAMDirectory rd = new RAMDirectory(location);
|
||||
reader = IndexReader.open(rd);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
reader = IndexReader.open(emptyIndex);
|
||||
}
|
||||
reader = ReferenceCountingReadOnlyIndexReaderFactory.createReader(id, reader, true);
|
||||
reader = ReferenceCountingReadOnlyIndexReaderFactory.createReader(id, reader, true, config);
|
||||
return reader;
|
||||
}
|
||||
|
||||
@@ -2581,7 +2600,7 @@ public class IndexInfo
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private synchronized void rescheduleRecovery()
|
||||
{
|
||||
switch (scheduledState)
|
||||
@@ -2869,7 +2888,7 @@ public class IndexInfo
|
||||
// Check we have a reader registered
|
||||
if (referenceCountingReadOnlyIndexReaders.get(entry.getName()) == null)
|
||||
{
|
||||
registerReferenceCountingIndexReader(entry.getName(), buildReferenceCountingIndexReader(entry.getName()));
|
||||
registerReferenceCountingIndexReader(entry.getName(), buildReferenceCountingIndexReader(entry.getName(), entry.getDocumentCount()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3126,7 +3145,7 @@ public class IndexInfo
|
||||
|
||||
for (String id : invalidIndexes)
|
||||
{
|
||||
IndexReader reader = buildReferenceCountingIndexReader(id);
|
||||
IndexReader reader = buildReferenceCountingIndexReader(id, newIndexCounts.get(id));
|
||||
newReaders.put(id, reader);
|
||||
}
|
||||
|
||||
@@ -3325,13 +3344,15 @@ public class IndexInfo
|
||||
|
||||
String mergeTargetId = null;
|
||||
|
||||
long docCount = 0;
|
||||
|
||||
if (toMerge.size() > 0)
|
||||
{
|
||||
int count = 0;
|
||||
IndexReader[] readers = new IndexReader[toMerge.size() - 1];
|
||||
RAMDirectory ramDirectory = null;
|
||||
IndexWriter writer = null;
|
||||
long docCount = 0;
|
||||
|
||||
File outputLocation = null;
|
||||
for (IndexEntry entry : toMerge.values())
|
||||
{
|
||||
@@ -3409,7 +3430,7 @@ public class IndexInfo
|
||||
getReadLock();
|
||||
try
|
||||
{
|
||||
newReader = buildReferenceCountingIndexReader(mergeTargetId);
|
||||
newReader = buildReferenceCountingIndexReader(mergeTargetId, docCount);
|
||||
}
|
||||
finally
|
||||
{
|
||||
@@ -3598,136 +3619,6 @@ public class IndexInfo
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
public boolean isEnableCleanerThread()
|
||||
{
|
||||
return enableCleaner;
|
||||
}
|
||||
|
||||
public void setEnableCleanerThread(boolean enableCleaner)
|
||||
{
|
||||
this.enableCleaner = enableCleaner;
|
||||
}
|
||||
|
||||
public boolean isEnableMerger()
|
||||
{
|
||||
return enableMerger;
|
||||
}
|
||||
|
||||
public void setEnableMerger(boolean enableMerger)
|
||||
{
|
||||
this.enableMerger = enableMerger;
|
||||
}
|
||||
|
||||
public boolean isIndexIsShared()
|
||||
{
|
||||
return indexIsShared;
|
||||
}
|
||||
|
||||
public void setIndexIsShared(boolean indexIsShared)
|
||||
{
|
||||
this.indexIsShared = indexIsShared;
|
||||
}
|
||||
|
||||
public int getMaxDocsForInMemoryMerge()
|
||||
{
|
||||
return maxDocsForInMemoryMerge;
|
||||
}
|
||||
|
||||
public void setMaxDocsForInMemoryMerge(int maxDocsForInMemoryMerge)
|
||||
{
|
||||
this.maxDocsForInMemoryMerge = maxDocsForInMemoryMerge;
|
||||
}
|
||||
|
||||
public int getMergerMaxMergeDocs()
|
||||
{
|
||||
return mergerMaxMergeDocs;
|
||||
}
|
||||
|
||||
public void setMergerMaxMergeDocs(int mergerMaxMergeDocs)
|
||||
{
|
||||
this.mergerMaxMergeDocs = mergerMaxMergeDocs;
|
||||
}
|
||||
|
||||
public int getMergerMergeFactor()
|
||||
{
|
||||
return mergerMergeFactor;
|
||||
}
|
||||
|
||||
public void setMergerMergeFactor(int mergerMergeFactor)
|
||||
{
|
||||
this.mergerMergeFactor = mergerMergeFactor;
|
||||
}
|
||||
|
||||
public int getMergerMinMergeDocs()
|
||||
{
|
||||
return mergerMinMergeDocs;
|
||||
}
|
||||
|
||||
public void setMergerMinMergeDocs(int mergerMinMergeDocs)
|
||||
{
|
||||
this.mergerMinMergeDocs = mergerMinMergeDocs;
|
||||
}
|
||||
|
||||
public int getMergerTargetOverlays()
|
||||
{
|
||||
return mergerTargetOverlays;
|
||||
}
|
||||
|
||||
public void setMergerTargetOverlays(int mergerTargetOverlays)
|
||||
{
|
||||
this.mergerTargetOverlays = mergerTargetOverlays;
|
||||
}
|
||||
|
||||
public boolean isMergerUseCompoundFile()
|
||||
{
|
||||
return mergerUseCompoundFile;
|
||||
}
|
||||
|
||||
public void setMergerUseCompoundFile(boolean mergerUseCompoundFile)
|
||||
{
|
||||
this.mergerUseCompoundFile = mergerUseCompoundFile;
|
||||
}
|
||||
|
||||
public int getWriterMaxMergeDocs()
|
||||
{
|
||||
return writerMaxMergeDocs;
|
||||
}
|
||||
|
||||
public void setWriterMaxMergeDocs(int writerMaxMergeDocs)
|
||||
{
|
||||
this.writerMaxMergeDocs = writerMaxMergeDocs;
|
||||
}
|
||||
|
||||
public int getWriterMergeFactor()
|
||||
{
|
||||
return writerMergeFactor;
|
||||
}
|
||||
|
||||
public void setWriterMergeFactor(int writerMergeFactor)
|
||||
{
|
||||
this.writerMergeFactor = writerMergeFactor;
|
||||
}
|
||||
|
||||
public int getWriterMinMergeDocs()
|
||||
{
|
||||
return writerMinMergeDocs;
|
||||
}
|
||||
|
||||
public void setWriterMinMergeDocs(int writerMinMergeDocs)
|
||||
{
|
||||
this.writerMinMergeDocs = writerMinMergeDocs;
|
||||
}
|
||||
|
||||
public boolean isWriterUseCompoundFile()
|
||||
{
|
||||
return writerUseCompoundFile;
|
||||
}
|
||||
|
||||
public void setWriterUseCompoundFile(boolean writerUseCompoundFile)
|
||||
{
|
||||
this.writerUseCompoundFile = writerUseCompoundFile;
|
||||
}
|
||||
|
||||
private boolean isGUID(String guid)
|
||||
{
|
||||
try
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -28,6 +28,7 @@ import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.BitSet;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
@@ -35,12 +36,15 @@ import java.util.List;
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.search.SearcherException;
|
||||
import org.alfresco.repo.search.impl.lucene.index.CachingIndexReader;
|
||||
import org.alfresco.repo.search.impl.lucene.index.IndexInfo;
|
||||
import org.alfresco.service.cmr.dictionary.AspectDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.TypeDefinition;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
@@ -58,6 +62,11 @@ import org.apache.lucene.search.Weight;
|
||||
*/
|
||||
public class LeafScorer extends Scorer
|
||||
{
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static Log s_logger = LogFactory.getLog(IndexInfo.class);
|
||||
|
||||
static class Counter
|
||||
{
|
||||
int count = 0;
|
||||
@@ -116,7 +125,7 @@ public class LeafScorer extends Scorer
|
||||
|
||||
private int[] cats;
|
||||
|
||||
private TermPositions tp;
|
||||
// private TermPositions tp;
|
||||
|
||||
/**
|
||||
* Constructor - should use an arg object ...
|
||||
@@ -143,7 +152,7 @@ public class LeafScorer extends Scorer
|
||||
this.containerScorer = containerScorer;
|
||||
this.sfps = sfps;
|
||||
this.allNodes = allNodes;
|
||||
this.tp = tp;
|
||||
// this.tp = tp;
|
||||
if (selfIds == null)
|
||||
{
|
||||
this.selfIds = new HashMap<String, Counter>();
|
||||
@@ -169,12 +178,12 @@ public class LeafScorer extends Scorer
|
||||
|
||||
}
|
||||
|
||||
private String getId(IndexReader reader, int n) throws IOException
|
||||
private String getPathLinkId(IndexReader reader, int n) throws IOException
|
||||
{
|
||||
if (reader instanceof CachingIndexReader)
|
||||
{
|
||||
CachingIndexReader cachingIndexReader = (CachingIndexReader) reader;
|
||||
return cachingIndexReader.getId(n);
|
||||
return cachingIndexReader.getPathLinkId(n);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -221,7 +230,7 @@ public class LeafScorer extends Scorer
|
||||
return (path == null) ? null : path.stringValue();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private String getType(IndexReader reader, int n) throws IOException
|
||||
{
|
||||
if (reader instanceof CachingIndexReader)
|
||||
@@ -236,7 +245,7 @@ public class LeafScorer extends Scorer
|
||||
return (path == null) ? null : path.stringValue();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private String[] getParents(IndexReader reader, int n) throws IOException
|
||||
{
|
||||
if (reader instanceof CachingIndexReader)
|
||||
@@ -264,7 +273,7 @@ public class LeafScorer extends Scorer
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private String[] getlinkAspects(IndexReader reader, int n) throws IOException
|
||||
{
|
||||
if (reader instanceof CachingIndexReader)
|
||||
@@ -292,8 +301,6 @@ public class LeafScorer extends Scorer
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
private void initialise() throws IOException
|
||||
{
|
||||
@@ -304,7 +311,7 @@ public class LeafScorer extends Scorer
|
||||
{
|
||||
int doc = containerScorer.doc();
|
||||
|
||||
String id = getId(reader, doc);
|
||||
String id = getPathLinkId(reader, doc);
|
||||
Counter counter = parentIds.get(id);
|
||||
if (counter == null)
|
||||
{
|
||||
@@ -348,7 +355,7 @@ public class LeafScorer extends Scorer
|
||||
while (level0.next())
|
||||
{
|
||||
int doc = level0.doc();
|
||||
String id = getId(reader, doc);
|
||||
String id = getPathLinkId(reader, doc);
|
||||
if (id != null)
|
||||
{
|
||||
Counter counter = parentIds.get(id);
|
||||
@@ -374,7 +381,10 @@ public class LeafScorer extends Scorer
|
||||
}
|
||||
else if (parentIds.size() == 0)
|
||||
{
|
||||
throw new SearcherException("Index has no root node. Check that the correct index locations are being used.");
|
||||
if (s_logger.isWarnEnabled())
|
||||
{
|
||||
s_logger.warn("Index has no root node. Check that the correct index locations are being used.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -382,10 +392,14 @@ public class LeafScorer extends Scorer
|
||||
{
|
||||
int position = 0;
|
||||
parents = new int[10000];
|
||||
for (String parent : parentIds.keySet())
|
||||
ArrayList<String> ordered = new ArrayList<String>(parentIds.size());
|
||||
ordered.addAll(parentIds.keySet());
|
||||
Collections.sort(ordered);
|
||||
for (String parent : ordered)
|
||||
{
|
||||
Counter counter = parentIds.get(parent);
|
||||
tp.seek(new Term("PARENT", parent));
|
||||
// tp.seek(new Term("PARENT", parent));
|
||||
TermPositions tp = reader.termPositions(new Term("PARENT", parent));
|
||||
while (tp.next())
|
||||
{
|
||||
for (int i = 0, l = tp.freq(); i < l; i++)
|
||||
@@ -412,9 +426,13 @@ public class LeafScorer extends Scorer
|
||||
|
||||
position = 0;
|
||||
self = new int[10000];
|
||||
for (String id : selfIds.keySet())
|
||||
ordered = new ArrayList<String>(selfIds.size());
|
||||
ordered.addAll(selfIds.keySet());
|
||||
Collections.sort(ordered);
|
||||
for (String id : ordered)
|
||||
{
|
||||
tp.seek(new Term("ID", id));
|
||||
// tp.seek(new Term("ID", id));
|
||||
TermPositions tp = reader.termPositions(new Term("ID", id));
|
||||
while (tp.next())
|
||||
{
|
||||
Counter counter = selfIds.get(id);
|
||||
@@ -438,7 +456,10 @@ public class LeafScorer extends Scorer
|
||||
|
||||
position = 0;
|
||||
cats = new int[10000];
|
||||
for (String catid : categories.keySet())
|
||||
ordered = new ArrayList<String>(categories.size());
|
||||
ordered.addAll(categories.keySet());
|
||||
Collections.sort(ordered);
|
||||
for (String catid : ordered)
|
||||
{
|
||||
for (QName apsectQName : dictionaryService.getAllAspects())
|
||||
{
|
||||
@@ -449,7 +470,8 @@ public class LeafScorer extends Scorer
|
||||
{
|
||||
if (propDef.getDataType().getName().equals(DataTypeDefinition.CATEGORY))
|
||||
{
|
||||
tp.seek(new Term("@" + propDef.getName().toString(), catid));
|
||||
// tp.seek(new Term("@" + propDef.getName().toString(), catid));
|
||||
TermPositions tp = reader.termPositions(new Term("@" + propDef.getName().toString(), catid));
|
||||
while (tp.next())
|
||||
{
|
||||
for (int i = 0, l = tp.freq(); i < l; i++)
|
||||
@@ -819,9 +841,14 @@ public class LeafScorer extends Scorer
|
||||
}
|
||||
}
|
||||
|
||||
//Document doc = reader.document(doc());
|
||||
// Document doc = reader.document(doc());
|
||||
String[] parentFields = getParents(reader, doc());
|
||||
String[] linkFields = getlinkAspects(reader, doc());
|
||||
|
||||
String[] linkFields = null;
|
||||
if (categories.size() > 0)
|
||||
{
|
||||
linkFields = getlinkAspects(reader, doc());
|
||||
}
|
||||
|
||||
String parentID = null;
|
||||
String linkAspect = null;
|
||||
@@ -846,7 +873,7 @@ public class LeafScorer extends Scorer
|
||||
{
|
||||
return;
|
||||
}
|
||||
String id = getId(reader, doc());
|
||||
String id = getPathLinkId(reader, doc());
|
||||
StructuredFieldPosition last = sfps[sfps.length - 1];
|
||||
if ((last.linkSelf() && selfIds.containsKey(id)))
|
||||
{
|
||||
|
Reference in New Issue
Block a user