mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-07-24 17:32:48 +00:00
Big honkin' merge from head. Sheesh!
git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/BRANCHES/WCM-DEV2/root@3617 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
@@ -21,6 +21,7 @@ import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.repo.search.impl.lucene.analysis.PathAnalyser;
|
||||
import org.alfresco.repo.search.impl.lucene.analysis.VerbatimAnalyser;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
@@ -117,8 +118,23 @@ public class LuceneAnalyser extends Analyzer
|
||||
{
|
||||
QName propertyQName = QName.createQName(fieldName.substring(1));
|
||||
PropertyDefinition propertyDef = dictionaryService.getProperty(propertyQName);
|
||||
DataTypeDefinition dataType = (propertyDef == null) ? dictionaryService.getDataType(DataTypeDefinition.TEXT) : propertyDef.getDataType();
|
||||
analyser = loadAnalyzer(dataType);
|
||||
if (propertyDef != null)
|
||||
{
|
||||
if (propertyDef.isTokenisedInIndex())
|
||||
{
|
||||
DataTypeDefinition dataType = propertyDef.getDataType();
|
||||
analyser = loadAnalyzer(dataType);
|
||||
}
|
||||
else
|
||||
{
|
||||
analyser = new VerbatimAnalyser();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
DataTypeDefinition dataType = dictionaryService.getDataType(DataTypeDefinition.TEXT);
|
||||
analyser = loadAnalyzer(dataType);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -127,28 +143,31 @@ public class LuceneAnalyser extends Analyzer
|
||||
analysers.put(fieldName, analyser);
|
||||
return analyser;
|
||||
}
|
||||
|
||||
|
||||
private Analyzer loadAnalyzer(DataTypeDefinition dataType)
|
||||
{
|
||||
String analyserClassName = dataType.getAnalyserClassName();
|
||||
try
|
||||
{
|
||||
Class<?> clazz = Class.forName(analyserClassName);
|
||||
Analyzer analyser = (Analyzer)clazz.newInstance();
|
||||
Analyzer analyser = (Analyzer) clazz.newInstance();
|
||||
return analyser;
|
||||
}
|
||||
catch (ClassNotFoundException e)
|
||||
{
|
||||
throw new RuntimeException("Unable to load analyser for property of type " + dataType.getName() + " using " + analyserClassName);
|
||||
throw new RuntimeException("Unable to load analyser for property of type " + dataType.getName() + " using "
|
||||
+ analyserClassName);
|
||||
}
|
||||
catch (InstantiationException e)
|
||||
{
|
||||
throw new RuntimeException("Unable to load analyser for property of type " + dataType.getName() + " using " + analyserClassName);
|
||||
throw new RuntimeException("Unable to load analyser for property of type " + dataType.getName() + " using "
|
||||
+ analyserClassName);
|
||||
}
|
||||
catch (IllegalAccessException e)
|
||||
{
|
||||
throw new RuntimeException("Unable to load analyser for property of type " + dataType.getName() + " using " + analyserClassName);
|
||||
throw new RuntimeException("Unable to load analyser for property of type " + dataType.getName() + " using "
|
||||
+ analyserClassName);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@@ -264,6 +264,7 @@ public class LuceneTest extends TestCase
|
||||
testProperties.put(QName.createQName(TEST_NAMESPACE, "category-ista"), new NodeRef(storeRef, "CategoryId"));
|
||||
testProperties.put(QName.createQName(TEST_NAMESPACE, "noderef-ista"), n1);
|
||||
testProperties.put(QName.createQName(TEST_NAMESPACE, "path-ista"), nodeService.getPath(n3));
|
||||
testProperties.put(QName.createQName(TEST_NAMESPACE, "verbatim"), " ");
|
||||
testProperties.put(QName.createQName(TEST_NAMESPACE, "null"), null);
|
||||
testProperties.put(QName.createQName(TEST_NAMESPACE, "list"), new ArrayList());
|
||||
ArrayList<Object> testList = new ArrayList<Object>();
|
||||
@@ -715,7 +716,7 @@ public class LuceneTest extends TestCase
|
||||
}
|
||||
}
|
||||
|
||||
public void testSort() throws Exception
|
||||
public void xtestSort() throws Exception
|
||||
{
|
||||
luceneFTS.pause();
|
||||
buildBaseIndex();
|
||||
@@ -1855,6 +1856,19 @@ public class LuceneTest extends TestCase
|
||||
assertNotNull(results.getRow(0).getValue(QName.createQName(TEST_NAMESPACE, "path-ista")));
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "\\@"
|
||||
+ escapeQName(QName.createQName(TEST_NAMESPACE, "verbatim")) + ":\" \"",
|
||||
null, null);
|
||||
assertEquals(1, results.length());
|
||||
assertNotNull(results.getRow(0).getValue(QName.createQName(TEST_NAMESPACE, "verbatim")));
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "\\@"
|
||||
+ escapeQName(QName.createQName(TEST_NAMESPACE, "verbatim")) + ":\" \"",
|
||||
null, null);
|
||||
assertEquals(0, results.length());
|
||||
results.close();
|
||||
|
||||
results = searcher.query(rootNodeRef.getStoreRef(), "lucene", "TYPE:\"" + testType.toString() + "\"", null,
|
||||
null);
|
||||
assertEquals(1, results.length());
|
||||
|
@@ -41,6 +41,7 @@ import org.alfresco.repo.dictionary.M2Model;
|
||||
import org.alfresco.repo.node.BaseNodeServiceTest;
|
||||
import org.alfresco.repo.search.QueryParameterDefImpl;
|
||||
import org.alfresco.repo.search.QueryRegisterComponent;
|
||||
import org.alfresco.repo.search.impl.lucene.analysis.NumericEncoder;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
|
||||
import org.alfresco.repo.search.results.ChildAssocRefResultSet;
|
||||
import org.alfresco.repo.search.results.DetachedResultSet;
|
||||
@@ -371,6 +372,7 @@ public class LuceneTest2 extends TestCase
|
||||
super(arg0);
|
||||
}
|
||||
|
||||
|
||||
public void test0() throws Exception
|
||||
{
|
||||
luceneFTS.pause();
|
||||
|
@@ -219,6 +219,16 @@
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:verbatim">
|
||||
<type>d:text</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>false</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
</properties>
|
||||
<mandatory-aspects>
|
||||
<aspect>test:testAspect</aspect>
|
||||
|
@@ -0,0 +1,22 @@
|
||||
package org.alfresco.repo.search.impl.lucene.analysis;
|
||||
|
||||
import java.io.Reader;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
|
||||
public class VerbatimAnalyser
|
||||
|
||||
extends Analyzer
|
||||
{
|
||||
|
||||
public VerbatimAnalyser()
|
||||
{
|
||||
super();
|
||||
}
|
||||
|
||||
public TokenStream tokenStream(String fieldName, Reader reader)
|
||||
{
|
||||
return new VerbatimTokenFilter(reader);
|
||||
}
|
||||
}
|
@@ -0,0 +1,42 @@
|
||||
package org.alfresco.repo.search.impl.lucene.analysis;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
|
||||
import org.apache.lucene.analysis.Token;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
|
||||
public class VerbatimTokenFilter extends Tokenizer
|
||||
{
|
||||
boolean readInput = true;
|
||||
|
||||
VerbatimTokenFilter(Reader in)
|
||||
{
|
||||
super(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Token next() throws IOException
|
||||
{
|
||||
if (readInput)
|
||||
{
|
||||
readInput = false;
|
||||
StringBuilder buffer = new StringBuilder();
|
||||
int current;
|
||||
char c;
|
||||
while ((current = input.read()) != -1)
|
||||
{
|
||||
c = (char) current;
|
||||
buffer.append(c);
|
||||
}
|
||||
|
||||
String token = buffer.toString();
|
||||
return new Token(token, 0, token.length() - 1, "VERBATIM");
|
||||
}
|
||||
else
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -154,6 +154,7 @@ class IndexEntry
|
||||
builder.append("Type=").append(getType()).append(" ");
|
||||
builder.append("Status=").append(getStatus()).append(" ");
|
||||
builder.append("Docs=").append(getDocumentCount()).append(" ");
|
||||
builder.append("Deletions=").append(getDeletions()).append(" ");
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
|
@@ -49,13 +49,11 @@ import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.repo.search.IndexerException;
|
||||
import org.alfresco.repo.search.impl.lucene.FilterIndexReaderByNodeRefs2;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.util.GUID;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.MultiReader;
|
||||
@@ -206,6 +204,32 @@ public class IndexInfo
|
||||
|
||||
private static HashMap<File, IndexInfo> indexInfos = new HashMap<File, IndexInfo>();
|
||||
|
||||
private int maxDocsForInMemoryMerge = 10000;
|
||||
|
||||
private int writerMinMergeDocs = 1000;
|
||||
|
||||
private int writerMergeFactor = 5;
|
||||
|
||||
private int writerMaxMergeDocs = 1000000;
|
||||
|
||||
private boolean writerUseCompoundFile = true;
|
||||
|
||||
private int mergerMinMergeDocs = 1000;
|
||||
|
||||
private int mergerMergeFactor = 5;
|
||||
|
||||
private int mergerMaxMergeDocs = 1000000;
|
||||
|
||||
private boolean mergerUseCompoundFile = true;
|
||||
|
||||
private int mergerTargetOverlays = 5;
|
||||
|
||||
// TODO: Something to control the maximum number of overlays
|
||||
|
||||
private boolean enableCleanerThread = true;
|
||||
|
||||
private boolean enableMergerThread = true;
|
||||
|
||||
static
|
||||
{
|
||||
System.setProperty("disableLuceneLocks", "true");
|
||||
@@ -283,6 +307,10 @@ public class IndexInfo
|
||||
try
|
||||
{
|
||||
writer = new IndexWriter(oldIndex, new StandardAnalyzer(), false);
|
||||
writer.setUseCompoundFile(writerUseCompoundFile);
|
||||
writer.minMergeDocs = writerMinMergeDocs;
|
||||
writer.mergeFactor = writerMergeFactor;
|
||||
writer.maxMergeDocs = writerMaxMergeDocs;
|
||||
writer.optimize();
|
||||
long docs = writer.docCount();
|
||||
writer.close();
|
||||
@@ -393,24 +421,32 @@ public class IndexInfo
|
||||
}
|
||||
}
|
||||
// TODO: Add unrecognised folders for deletion.
|
||||
cleanerThread = new Thread(cleaner);
|
||||
cleanerThread.setDaemon(true);
|
||||
cleanerThread.setName("Index cleaner thread");
|
||||
//cleanerThread.start();
|
||||
|
||||
mergerThread = new Thread(merger);
|
||||
mergerThread.setDaemon(true);
|
||||
mergerThread.setName("Index merger thread");
|
||||
//mergerThread.start();
|
||||
if (enableCleanerThread)
|
||||
{
|
||||
cleanerThread = new Thread(cleaner);
|
||||
cleanerThread.setDaemon(true);
|
||||
cleanerThread.setName("Index cleaner thread "+indexDirectory);
|
||||
cleanerThread.start();
|
||||
}
|
||||
|
||||
|
||||
if (enableMergerThread)
|
||||
{
|
||||
mergerThread = new Thread(merger);
|
||||
mergerThread.setDaemon(true);
|
||||
mergerThread.setName("Index merger thread "+indexDirectory);
|
||||
mergerThread.start();
|
||||
}
|
||||
|
||||
IndexWriter writer;
|
||||
try
|
||||
{
|
||||
writer = new IndexWriter(emptyIndex, new StandardAnalyzer(), true);
|
||||
writer.setUseCompoundFile(true);
|
||||
writer.minMergeDocs = 1000;
|
||||
writer.mergeFactor = 5;
|
||||
writer.maxMergeDocs = 1000000;
|
||||
writer.setUseCompoundFile(writerUseCompoundFile);
|
||||
writer.minMergeDocs = writerMinMergeDocs;
|
||||
writer.mergeFactor = writerMergeFactor;
|
||||
writer.maxMergeDocs = writerMaxMergeDocs;
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
@@ -441,21 +477,28 @@ public class IndexInfo
|
||||
// close index writer if required
|
||||
closeDeltaIndexWriter(id);
|
||||
// Check the index knows about the transaction
|
||||
File location = ensureDeltaIsRegistered(id);
|
||||
// Create a dummy index reader to deal with empty indexes and not persist these.
|
||||
if (IndexReader.indexExists(location))
|
||||
{
|
||||
reader = IndexReader.open(location);
|
||||
}
|
||||
else
|
||||
{
|
||||
reader = IndexReader.open(emptyIndex);
|
||||
}
|
||||
reader = buildAndRegisterDeltaReader(id);
|
||||
indexReaders.put(id, reader);
|
||||
}
|
||||
return reader;
|
||||
}
|
||||
|
||||
private IndexReader buildAndRegisterDeltaReader(String id) throws IOException
|
||||
{
|
||||
IndexReader reader;
|
||||
File location = ensureDeltaIsRegistered(id);
|
||||
// Create a dummy index reader to deal with empty indexes and not persist these.
|
||||
if (IndexReader.indexExists(location))
|
||||
{
|
||||
reader = IndexReader.open(location);
|
||||
}
|
||||
else
|
||||
{
|
||||
reader = IndexReader.open(emptyIndex);
|
||||
}
|
||||
return reader;
|
||||
}
|
||||
|
||||
/**
|
||||
* The delta information does not need to be saved to disk.
|
||||
*
|
||||
@@ -511,10 +554,10 @@ public class IndexInfo
|
||||
if (!IndexReader.indexExists(location))
|
||||
{
|
||||
IndexWriter creator = new IndexWriter(location, analyzer, true);
|
||||
creator.setUseCompoundFile(true);
|
||||
creator.minMergeDocs = 1000;
|
||||
creator.mergeFactor = 5;
|
||||
creator.maxMergeDocs = 1000000;
|
||||
creator.setUseCompoundFile(writerUseCompoundFile);
|
||||
creator.minMergeDocs = writerMinMergeDocs;
|
||||
creator.mergeFactor = writerMergeFactor;
|
||||
creator.maxMergeDocs = writerMaxMergeDocs;
|
||||
return creator;
|
||||
}
|
||||
return null;
|
||||
@@ -538,10 +581,10 @@ public class IndexInfo
|
||||
if (writer == null)
|
||||
{
|
||||
writer = new IndexWriter(location, analyzer, false);
|
||||
writer.setUseCompoundFile(true);
|
||||
writer.minMergeDocs = 1000;
|
||||
writer.mergeFactor = 5;
|
||||
writer.maxMergeDocs = 1000000;
|
||||
writer.setUseCompoundFile(writerUseCompoundFile);
|
||||
writer.minMergeDocs = writerMinMergeDocs;
|
||||
writer.mergeFactor = writerMergeFactor;
|
||||
writer.maxMergeDocs = writerMaxMergeDocs;
|
||||
}
|
||||
indexWriters.put(id, writer);
|
||||
}
|
||||
@@ -789,12 +832,14 @@ public class IndexInfo
|
||||
// TODO: Should use the in memory index but we often end up forcing to disk anyway.
|
||||
// Is it worth it?
|
||||
// luceneIndexer.flushPending();
|
||||
IndexReader deltaReader = ReferenceCountingReadOnlyIndexReaderFactory.createReader(id,
|
||||
getDeltaIndexReader(id));
|
||||
ReferenceCounting deltaRefCount = (ReferenceCounting) deltaReader;
|
||||
deltaRefCount.incrementReferenceCount();
|
||||
|
||||
IndexReader deltaReader = buildAndRegisterDeltaReader(id);
|
||||
IndexReader reader = new MultiReader(new IndexReader[] {
|
||||
new FilterIndexReaderByNodeRefs2(mainIndexReader, deletions, deleteOnlyNodes), deltaReader });
|
||||
reader = ReferenceCountingReadOnlyIndexReaderFactory.createReader("MainReader"+id, reader);
|
||||
ReferenceCounting refCounting = (ReferenceCounting)reader;
|
||||
refCounting.incrementReferenceCount();
|
||||
refCounting.setInvalidForReuse();
|
||||
return reader;
|
||||
}
|
||||
finally
|
||||
@@ -1659,7 +1704,18 @@ public class IndexInfo
|
||||
{
|
||||
if (indexIsShared)
|
||||
{
|
||||
long start = 0l;
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug(" ... waiting for file lock");
|
||||
start = System.nanoTime();
|
||||
}
|
||||
fileLock = indexInfoChannel.lock();
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
long end = System.nanoTime();
|
||||
s_logger.debug(" ... got file lock in " + ((end - start)/10e6f) + " ms");
|
||||
}
|
||||
if (!checkVersion())
|
||||
{
|
||||
setStatusFromFile();
|
||||
@@ -1688,6 +1744,10 @@ public class IndexInfo
|
||||
try
|
||||
{
|
||||
fileLock.release();
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug(" ... released file lock");
|
||||
}
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
@@ -1696,6 +1756,11 @@ public class IndexInfo
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to print out index information
|
||||
*
|
||||
* @param args
|
||||
*/
|
||||
public static void main(String[] args)
|
||||
{
|
||||
|
||||
@@ -1721,67 +1786,6 @@ public class IndexInfo
|
||||
}
|
||||
}
|
||||
|
||||
// public static void main(String[] args) throws IOException
|
||||
|
||||
// {
|
||||
// System.setProperty("disableLuceneLocks", "true");
|
||||
//
|
||||
// HashSet<NodeRef> deletions = new HashSet<NodeRef>();
|
||||
// for (int i = 0; i < 0; i++)
|
||||
// {
|
||||
// deletions.add(new NodeRef(new StoreRef("woof", "bingle"), GUID.generate()));
|
||||
// }
|
||||
//
|
||||
// int repeat = 100;
|
||||
// int docs = 1;
|
||||
// final IndexInfo ii = new IndexInfo(new File("c:\\indexTest"));
|
||||
//
|
||||
// long totalTimeA = 0;
|
||||
// long countA = 0;
|
||||
//
|
||||
// while (true)
|
||||
// {
|
||||
// long start = System.nanoTime();
|
||||
// for (int i = 0; i < repeat; i++)
|
||||
// {
|
||||
// String guid = GUID.generate();
|
||||
// ii.setStatus(guid, TransactionStatus.ACTIVE, null, null);
|
||||
// IndexWriter writer = ii.getDeltaIndexWriter(guid, new StandardAnalyzer());
|
||||
//
|
||||
// for (int j = 0; j < docs; j++)
|
||||
// {
|
||||
// Document doc = new Document();
|
||||
// for (int k = 0; k < 15; k++)
|
||||
// {
|
||||
// doc.add(new Field("ID" + k, guid + " " + j + " " + k, false, true, false));
|
||||
// }
|
||||
// writer.addDocument(doc);
|
||||
// }
|
||||
//
|
||||
// ii.closeDeltaIndexWriter(guid);
|
||||
// ii.setStatus(guid, TransactionStatus.PREPARING, null, null);
|
||||
// ii.setPreparedState(guid, deletions, docs, false);
|
||||
// ii.getDeletions(guid);
|
||||
// ii.setStatus(guid, TransactionStatus.PREPARED, null, null);
|
||||
// ii.setStatus(guid, TransactionStatus.COMMITTING, null, null);
|
||||
// ii.setStatus(guid, TransactionStatus.COMMITTED, null, null);
|
||||
// for (int j = 0; j < 0; j++)
|
||||
// {
|
||||
// ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// long end = System.nanoTime();
|
||||
//
|
||||
// totalTimeA += (end - start);
|
||||
// countA += repeat;
|
||||
// float average = countA * 1000000000f / totalTimeA;
|
||||
//
|
||||
// System.out.println("Repeated "
|
||||
// + repeat + " in " + ((end - start) / 1000000000.0) + " average = " + average);
|
||||
// }
|
||||
// }
|
||||
|
||||
/**
|
||||
* Clean up support.
|
||||
*
|
||||
@@ -1796,6 +1800,7 @@ public class IndexInfo
|
||||
while (runnable)
|
||||
{
|
||||
String id = null;
|
||||
HashSet<String> fails = new HashSet<String>();
|
||||
while ((id = deleteQueue.poll()) != null)
|
||||
{
|
||||
if (s_logger.isDebugEnabled())
|
||||
@@ -1811,9 +1816,10 @@ public class IndexInfo
|
||||
s_logger.debug("DELETE FAILED");
|
||||
}
|
||||
// try again later
|
||||
deleteQueue.add(id);
|
||||
fails.add(id);
|
||||
}
|
||||
}
|
||||
deleteQueue.addAll(fails);
|
||||
synchronized (this)
|
||||
{
|
||||
try
|
||||
@@ -1977,7 +1983,7 @@ public class IndexInfo
|
||||
if (!mergingIndexes && !applyingDeletions)
|
||||
{
|
||||
|
||||
if ((indexes > 5) || (deltas > 5))
|
||||
if ((indexes > mergerMergeFactor) || (deltas > mergerTargetOverlays))
|
||||
{
|
||||
if (indexes > deltas)
|
||||
{
|
||||
@@ -2331,7 +2337,7 @@ public class IndexInfo
|
||||
}
|
||||
}
|
||||
|
||||
int position = findMergeIndex(1, 1000000, 5, mergeList);
|
||||
int position = findMergeIndex(1, mergerMaxMergeDocs, mergerMergeFactor, mergeList);
|
||||
String firstMergeId = mergeList.get(position).getName();
|
||||
|
||||
long count = 0;
|
||||
@@ -2415,7 +2421,7 @@ public class IndexInfo
|
||||
else if (entry.getStatus() == TransactionStatus.MERGE_TARGET)
|
||||
{
|
||||
outputLocation = location;
|
||||
if (docCount < 10000)
|
||||
if (docCount < maxDocsForInMemoryMerge)
|
||||
{
|
||||
ramDirectory = new RAMDirectory();
|
||||
writer = new IndexWriter(ramDirectory, new StandardAnalyzer(), true);
|
||||
@@ -2423,11 +2429,12 @@ public class IndexInfo
|
||||
else
|
||||
{
|
||||
writer = new IndexWriter(location, new StandardAnalyzer(), true);
|
||||
|
||||
}
|
||||
writer.setUseCompoundFile(true);
|
||||
writer.minMergeDocs = 1000;
|
||||
writer.mergeFactor = 5;
|
||||
writer.maxMergeDocs = 1000000;
|
||||
writer.setUseCompoundFile(mergerUseCompoundFile);
|
||||
writer.minMergeDocs = mergerMinMergeDocs;
|
||||
writer.mergeFactor = mergerMergeFactor;
|
||||
writer.maxMergeDocs = mergerMaxMergeDocs;
|
||||
}
|
||||
}
|
||||
writer.addIndexes(readers);
|
||||
@@ -2523,17 +2530,18 @@ public class IndexInfo
|
||||
indexEntries.remove(id);
|
||||
deleteQueue.add(id);
|
||||
}
|
||||
synchronized (cleaner)
|
||||
{
|
||||
cleaner.notify();
|
||||
}
|
||||
|
||||
|
||||
dumpInfo();
|
||||
|
||||
writeStatus();
|
||||
|
||||
clearOldReaders();
|
||||
|
||||
synchronized (cleaner)
|
||||
{
|
||||
cleaner.notify();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -2601,10 +2609,19 @@ public class IndexInfo
|
||||
|
||||
private void getWriteLock()
|
||||
{
|
||||
String threadName = null;
|
||||
long start = 0l;
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
threadName = Thread.currentThread().getName();
|
||||
s_logger.debug("Waiting for WRITE lock - " + threadName);
|
||||
start = System.nanoTime();
|
||||
}
|
||||
readWriteLock.writeLock().lock();
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug("GOT WRITE LOCK - " + Thread.currentThread().getName());
|
||||
long end = System.nanoTime();
|
||||
s_logger.debug("...GOT WRITE LOCK - " + threadName + " - in " + ((end - start)/10e6f) + " ms");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2612,17 +2629,26 @@ public class IndexInfo
|
||||
{
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug("RELEASES WRITE LOCK - " + Thread.currentThread().getName());
|
||||
s_logger.debug("RELEASED WRITE LOCK - " + Thread.currentThread().getName());
|
||||
}
|
||||
readWriteLock.writeLock().unlock();
|
||||
}
|
||||
|
||||
private void getReadLock()
|
||||
{
|
||||
String threadName = null;
|
||||
long start = 0l;
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
threadName = Thread.currentThread().getName();
|
||||
s_logger.debug("Waiting for READ lock - " + threadName);
|
||||
start = System.nanoTime();
|
||||
}
|
||||
readWriteLock.readLock().lock();
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug("GOT READ LOCK - " + Thread.currentThread().getName());
|
||||
long end = System.nanoTime();
|
||||
s_logger.debug("...GOT READ LOCK - " + threadName + " - in " + ((end - start)/10e6f) + " ms");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2630,7 +2656,7 @@ public class IndexInfo
|
||||
{
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug("RELEASES READ LOCK - " + Thread.currentThread().getName());
|
||||
s_logger.debug("RELEASED READ LOCK - " + Thread.currentThread().getName());
|
||||
}
|
||||
readWriteLock.readLock().unlock();
|
||||
}
|
||||
@@ -2639,4 +2665,136 @@ public class IndexInfo
|
||||
{
|
||||
return indexDirectory.toString();
|
||||
}
|
||||
|
||||
public boolean isEnableCleanerThread()
|
||||
{
|
||||
return enableCleanerThread;
|
||||
}
|
||||
|
||||
public void setEnableCleanerThread(boolean enableCleanerThread)
|
||||
{
|
||||
this.enableCleanerThread = enableCleanerThread;
|
||||
}
|
||||
|
||||
public boolean isEnableMergerThread()
|
||||
{
|
||||
return enableMergerThread;
|
||||
}
|
||||
|
||||
public void setEnableMergerThread(boolean enableMergerThread)
|
||||
{
|
||||
this.enableMergerThread = enableMergerThread;
|
||||
}
|
||||
|
||||
public boolean isIndexIsShared()
|
||||
{
|
||||
return indexIsShared;
|
||||
}
|
||||
|
||||
public void setIndexIsShared(boolean indexIsShared)
|
||||
{
|
||||
this.indexIsShared = indexIsShared;
|
||||
}
|
||||
|
||||
public int getMaxDocsForInMemoryMerge()
|
||||
{
|
||||
return maxDocsForInMemoryMerge;
|
||||
}
|
||||
|
||||
public void setMaxDocsForInMemoryMerge(int maxDocsForInMemoryMerge)
|
||||
{
|
||||
this.maxDocsForInMemoryMerge = maxDocsForInMemoryMerge;
|
||||
}
|
||||
|
||||
public int getMergerMaxMergeDocs()
|
||||
{
|
||||
return mergerMaxMergeDocs;
|
||||
}
|
||||
|
||||
public void setMergerMaxMergeDocs(int mergerMaxMergeDocs)
|
||||
{
|
||||
this.mergerMaxMergeDocs = mergerMaxMergeDocs;
|
||||
}
|
||||
|
||||
public int getMergerMergeFactor()
|
||||
{
|
||||
return mergerMergeFactor;
|
||||
}
|
||||
|
||||
public void setMergerMergeFactor(int mergerMergeFactor)
|
||||
{
|
||||
this.mergerMergeFactor = mergerMergeFactor;
|
||||
}
|
||||
|
||||
public int getMergerMinMergeDocs()
|
||||
{
|
||||
return mergerMinMergeDocs;
|
||||
}
|
||||
|
||||
public void setMergerMinMergeDocs(int mergerMinMergeDocs)
|
||||
{
|
||||
this.mergerMinMergeDocs = mergerMinMergeDocs;
|
||||
}
|
||||
|
||||
public int getMergerTargetOverlays()
|
||||
{
|
||||
return mergerTargetOverlays;
|
||||
}
|
||||
|
||||
public void setMergerTargetOverlays(int mergerTargetOverlays)
|
||||
{
|
||||
this.mergerTargetOverlays = mergerTargetOverlays;
|
||||
}
|
||||
|
||||
public boolean isMergerUseCompoundFile()
|
||||
{
|
||||
return mergerUseCompoundFile;
|
||||
}
|
||||
|
||||
public void setMergerUseCompoundFile(boolean mergerUseCompoundFile)
|
||||
{
|
||||
this.mergerUseCompoundFile = mergerUseCompoundFile;
|
||||
}
|
||||
|
||||
public int getWriterMaxMergeDocs()
|
||||
{
|
||||
return writerMaxMergeDocs;
|
||||
}
|
||||
|
||||
public void setWriterMaxMergeDocs(int writerMaxMergeDocs)
|
||||
{
|
||||
this.writerMaxMergeDocs = writerMaxMergeDocs;
|
||||
}
|
||||
|
||||
public int getWriterMergeFactor()
|
||||
{
|
||||
return writerMergeFactor;
|
||||
}
|
||||
|
||||
public void setWriterMergeFactor(int writerMergeFactor)
|
||||
{
|
||||
this.writerMergeFactor = writerMergeFactor;
|
||||
}
|
||||
|
||||
public int getWriterMinMergeDocs()
|
||||
{
|
||||
return writerMinMergeDocs;
|
||||
}
|
||||
|
||||
public void setWriterMinMergeDocs(int writerMinMergeDocs)
|
||||
{
|
||||
this.writerMinMergeDocs = writerMinMergeDocs;
|
||||
}
|
||||
|
||||
public boolean isWriterUseCompoundFile()
|
||||
{
|
||||
return writerUseCompoundFile;
|
||||
}
|
||||
|
||||
public void setWriterUseCompoundFile(boolean writerUseCompoundFile)
|
||||
{
|
||||
this.writerUseCompoundFile = writerUseCompoundFile;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@@ -207,6 +207,7 @@ public static final String[] UPDATE_LIST_2 = { "alpha2", "bravo2", "charlie2", "
|
||||
writer.addDocument(doc);
|
||||
|
||||
ii.closeDeltaIndexWriter(guid);
|
||||
|
||||
ii.setStatus(guid, TransactionStatus.PREPARING, null, null);
|
||||
ii.setPreparedState(guid, new HashSet<NodeRef>(), 1, false);
|
||||
ii.getDeletions(guid);
|
||||
|
@@ -32,16 +32,17 @@ public class ReferenceCountingReadOnlyIndexReaderFactory
|
||||
public static class ReferenceCountingReadOnlyIndexReader extends FilterIndexReader implements ReferenceCounting
|
||||
{
|
||||
private static Logger s_logger = Logger.getLogger(ReferenceCountingReadOnlyIndexReader.class);
|
||||
|
||||
|
||||
|
||||
private static final long serialVersionUID = 7693185658022810428L;
|
||||
|
||||
String id;
|
||||
|
||||
|
||||
int refCount = 0;
|
||||
|
||||
boolean invalidForReuse = false;
|
||||
|
||||
boolean allowsDeletions;
|
||||
|
||||
ReferenceCountingReadOnlyIndexReader(String id, IndexReader indexReader)
|
||||
{
|
||||
super(indexReader);
|
||||
@@ -51,18 +52,20 @@ public class ReferenceCountingReadOnlyIndexReaderFactory
|
||||
public synchronized void incrementReferenceCount()
|
||||
{
|
||||
refCount++;
|
||||
if(s_logger.isDebugEnabled())
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug(Thread.currentThread().getName()+ ": Reader "+id+ " - increment - ref count is "+refCount);
|
||||
s_logger.debug(Thread.currentThread().getName()
|
||||
+ ": Reader " + id + " - increment - ref count is " + refCount);
|
||||
}
|
||||
}
|
||||
|
||||
public synchronized void decrementReferenceCount() throws IOException
|
||||
{
|
||||
refCount--;
|
||||
if(s_logger.isDebugEnabled())
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug(Thread.currentThread().getName()+ ": Reader "+id+ " - decrement - ref count is "+refCount);
|
||||
s_logger.debug(Thread.currentThread().getName()
|
||||
+ ": Reader " + id + " - decrement - ref count is " + refCount);
|
||||
}
|
||||
closeIfRequired();
|
||||
}
|
||||
@@ -71,17 +74,19 @@ public class ReferenceCountingReadOnlyIndexReaderFactory
|
||||
{
|
||||
if ((refCount == 0) && invalidForReuse)
|
||||
{
|
||||
if(s_logger.isDebugEnabled())
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug(Thread.currentThread().getName()+ ": Reader "+id+ " closed.");
|
||||
s_logger.debug(Thread.currentThread().getName() + ": Reader " + id + " closed.");
|
||||
}
|
||||
in.close();
|
||||
}
|
||||
else
|
||||
{
|
||||
if(s_logger.isDebugEnabled())
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug(Thread.currentThread().getName()+ ": Reader "+id+ " still open .... ref = "+refCount+" invalidForReuse = "+invalidForReuse);
|
||||
s_logger.debug(Thread.currentThread().getName()
|
||||
+ ": Reader " + id + " still open .... ref = " + refCount + " invalidForReuse = "
|
||||
+ invalidForReuse);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -94,9 +99,9 @@ public class ReferenceCountingReadOnlyIndexReaderFactory
|
||||
public synchronized void setInvalidForReuse() throws IOException
|
||||
{
|
||||
invalidForReuse = true;
|
||||
if(s_logger.isDebugEnabled())
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug(Thread.currentThread().getName()+ ": Reader "+id+ " set invalid for reuse");
|
||||
s_logger.debug(Thread.currentThread().getName() + ": Reader " + id + " set invalid for reuse");
|
||||
}
|
||||
closeIfRequired();
|
||||
}
|
||||
@@ -104,9 +109,9 @@ public class ReferenceCountingReadOnlyIndexReaderFactory
|
||||
@Override
|
||||
protected void doClose() throws IOException
|
||||
{
|
||||
if(s_logger.isDebugEnabled())
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug(Thread.currentThread().getName()+ ": Reader "+id+ " closing");
|
||||
s_logger.debug(Thread.currentThread().getName() + ": Reader " + id + " closing");
|
||||
}
|
||||
decrementReferenceCount();
|
||||
}
|
||||
@@ -116,7 +121,6 @@ public class ReferenceCountingReadOnlyIndexReaderFactory
|
||||
{
|
||||
throw new UnsupportedOperationException("Delete is not supported by read only index readers");
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user