mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-07-24 17:32:48 +00:00
Merged V2.1 to HEAD
6274: AR-1609 lucene.indexer.maxFieldLength property 6282: AWC-1347 Orphaned categories git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@6283 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
@@ -59,6 +59,7 @@ import org.alfresco.service.cmr.repository.ContentIOException;
|
|||||||
import org.alfresco.service.cmr.repository.ContentReader;
|
import org.alfresco.service.cmr.repository.ContentReader;
|
||||||
import org.alfresco.service.cmr.repository.ContentService;
|
import org.alfresco.service.cmr.repository.ContentService;
|
||||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||||
|
import org.alfresco.service.cmr.repository.InvalidNodeRefException;
|
||||||
import org.alfresco.service.cmr.repository.MLText;
|
import org.alfresco.service.cmr.repository.MLText;
|
||||||
import org.alfresco.service.cmr.repository.NodeRef;
|
import org.alfresco.service.cmr.repository.NodeRef;
|
||||||
import org.alfresco.service.cmr.repository.NodeService;
|
import org.alfresco.service.cmr.repository.NodeService;
|
||||||
@@ -170,8 +171,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
{
|
{
|
||||||
NodeRef childRef = relationshipRef.getChildRef();
|
NodeRef childRef = relationshipRef.getChildRef();
|
||||||
// If we have the root node we delete all other root nodes first
|
// If we have the root node we delete all other root nodes first
|
||||||
if ((relationshipRef.getParentRef() == null)
|
if ((relationshipRef.getParentRef() == null) && childRef.equals(nodeService.getRootNode(childRef.getStoreRef())))
|
||||||
&& childRef.equals(nodeService.getRootNode(childRef.getStoreRef())))
|
|
||||||
{
|
{
|
||||||
addRootNodesToDeletionList();
|
addRootNodesToDeletionList();
|
||||||
s_logger.warn("Detected root node addition: deleting all nodes from the index");
|
s_logger.warn("Detected root node addition: deleting all nodes from the index");
|
||||||
@@ -283,8 +283,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void updateChildRelationship(ChildAssociationRef relationshipBeforeRef,
|
public void updateChildRelationship(ChildAssociationRef relationshipBeforeRef, ChildAssociationRef relationshipAfterRef) throws LuceneIndexException
|
||||||
ChildAssociationRef relationshipAfterRef) throws LuceneIndexException
|
|
||||||
{
|
{
|
||||||
if (s_logger.isDebugEnabled())
|
if (s_logger.isDebugEnabled())
|
||||||
{
|
{
|
||||||
@@ -339,8 +338,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
* @return - the indexer instance
|
* @return - the indexer instance
|
||||||
* @throws LuceneIndexException
|
* @throws LuceneIndexException
|
||||||
*/
|
*/
|
||||||
public static ADMLuceneIndexerImpl getUpdateIndexer(StoreRef storeRef, String deltaId, LuceneConfig config)
|
public static ADMLuceneIndexerImpl getUpdateIndexer(StoreRef storeRef, String deltaId, LuceneConfig config) throws LuceneIndexException
|
||||||
throws LuceneIndexException
|
|
||||||
{
|
{
|
||||||
if (s_logger.isDebugEnabled())
|
if (s_logger.isDebugEnabled())
|
||||||
{
|
{
|
||||||
@@ -513,8 +511,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<Document> createDocuments(String stringNodeRef, boolean isNew, boolean indexAllProperties,
|
public List<Document> createDocuments(String stringNodeRef, boolean isNew, boolean indexAllProperties, boolean includeDirectoryDocuments)
|
||||||
boolean includeDirectoryDocuments)
|
|
||||||
{
|
{
|
||||||
NodeRef nodeRef = new NodeRef(stringNodeRef);
|
NodeRef nodeRef = new NodeRef(stringNodeRef);
|
||||||
|
|
||||||
@@ -526,8 +523,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
|
|
||||||
Collection<Path> directPaths = nodeService.getPaths(nodeRef, false);
|
Collection<Path> directPaths = nodeService.getPaths(nodeRef, false);
|
||||||
Collection<Pair<Path, QName>> categoryPaths = getCategoryPaths(nodeRef, properties);
|
Collection<Pair<Path, QName>> categoryPaths = getCategoryPaths(nodeRef, properties);
|
||||||
Collection<Pair<Path, QName>> paths = new ArrayList<Pair<Path, QName>>(directPaths.size()
|
Collection<Pair<Path, QName>> paths = new ArrayList<Pair<Path, QName>>(directPaths.size() + categoryPaths.size());
|
||||||
+ categoryPaths.size());
|
|
||||||
for (Path path : directPaths)
|
for (Path path : directPaths)
|
||||||
{
|
{
|
||||||
paths.add(new Pair<Path, QName>(path, null));
|
paths.add(new Pair<Path, QName>(path, null));
|
||||||
@@ -536,8 +532,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
|
|
||||||
Document xdoc = new Document();
|
Document xdoc = new Document();
|
||||||
xdoc.add(new Field("ID", nodeRef.toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
xdoc.add(new Field("ID", nodeRef.toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
xdoc.add(new Field("TX", nodeStatus.getChangeTxnId(), Field.Store.YES, Field.Index.UN_TOKENIZED,
|
xdoc.add(new Field("TX", nodeStatus.getChangeTxnId(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
Field.TermVector.NO));
|
|
||||||
boolean isAtomic = true;
|
boolean isAtomic = true;
|
||||||
for (QName propertyName : properties.keySet())
|
for (QName propertyName : properties.keySet())
|
||||||
{
|
{
|
||||||
@@ -593,12 +588,10 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
qNameBuffer.append(";/");
|
qNameBuffer.append(";/");
|
||||||
}
|
}
|
||||||
qNameBuffer.append(ISO9075.getXPathName(qNameRef.getQName()));
|
qNameBuffer.append(ISO9075.getXPathName(qNameRef.getQName()));
|
||||||
xdoc.add(new Field("PARENT", qNameRef.getParentRef().toString(), Field.Store.YES,
|
xdoc.add(new Field("PARENT", qNameRef.getParentRef().toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
xdoc.add(new Field("ASSOCTYPEQNAME", ISO9075.getXPathName(qNameRef.getTypeQName()), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
|
||||||
xdoc.add(new Field("ASSOCTYPEQNAME", ISO9075.getXPathName(qNameRef.getTypeQName()),
|
xdoc.add(new Field("LINKASPECT", (pair.getSecond() == null) ? "" : ISO9075.getXPathName(pair.getSecond()), Field.Store.YES, Field.Index.UN_TOKENIZED,
|
||||||
Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
|
Field.TermVector.NO));
|
||||||
xdoc.add(new Field("LINKASPECT", (pair.getSecond() == null) ? "" : ISO9075.getXPathName(pair
|
|
||||||
.getSecond()), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -616,22 +609,17 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
if (directPaths.contains(pair.getFirst()))
|
if (directPaths.contains(pair.getFirst()))
|
||||||
{
|
{
|
||||||
Document directoryEntry = new Document();
|
Document directoryEntry = new Document();
|
||||||
directoryEntry.add(new Field("ID", nodeRef.toString(), Field.Store.YES,
|
directoryEntry.add(new Field("ID", nodeRef.toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
directoryEntry.add(new Field("PATH", pathString, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
|
||||||
directoryEntry.add(new Field("PATH", pathString, Field.Store.YES, Field.Index.TOKENIZED,
|
|
||||||
Field.TermVector.NO));
|
|
||||||
for (NodeRef parent : getParents(pair.getFirst()))
|
for (NodeRef parent : getParents(pair.getFirst()))
|
||||||
{
|
{
|
||||||
directoryEntry.add(new Field("ANCESTOR", parent.toString(), Field.Store.NO,
|
directoryEntry.add(new Field("ANCESTOR", parent.toString(), Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
|
||||||
}
|
}
|
||||||
directoryEntry.add(new Field("ISCONTAINER", "T", Field.Store.YES, Field.Index.UN_TOKENIZED,
|
directoryEntry.add(new Field("ISCONTAINER", "T", Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
Field.TermVector.NO));
|
|
||||||
|
|
||||||
if (isCategory(getDictionaryService().getType(nodeService.getType(nodeRef))))
|
if (isCategory(getDictionaryService().getType(nodeService.getType(nodeRef))))
|
||||||
{
|
{
|
||||||
directoryEntry.add(new Field("ISCATEGORY", "T", Field.Store.YES,
|
directoryEntry.add(new Field("ISCATEGORY", "T", Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
docs.add(directoryEntry);
|
docs.add(directoryEntry);
|
||||||
@@ -649,8 +637,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
xdoc.add(new Field("PATH", "", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
|
xdoc.add(new Field("PATH", "", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
|
||||||
xdoc.add(new Field("QNAME", "", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
|
xdoc.add(new Field("QNAME", "", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
|
||||||
xdoc.add(new Field("ISROOT", "T", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
xdoc.add(new Field("ISROOT", "T", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
xdoc.add(new Field("PRIMARYASSOCTYPEQNAME", ISO9075.getXPathName(ContentModel.ASSOC_CHILDREN),
|
xdoc.add(new Field("PRIMARYASSOCTYPEQNAME", ISO9075.getXPathName(ContentModel.ASSOC_CHILDREN), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
|
||||||
Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
|
|
||||||
xdoc.add(new Field("ISNODE", "T", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
xdoc.add(new Field("ISNODE", "T", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
docs.add(xdoc);
|
docs.add(xdoc);
|
||||||
|
|
||||||
@@ -658,45 +645,36 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
else
|
else
|
||||||
// not a root node
|
// not a root node
|
||||||
{
|
{
|
||||||
xdoc.add(new Field("QNAME", qNameBuffer.toString(), Field.Store.YES, Field.Index.TOKENIZED,
|
xdoc.add(new Field("QNAME", qNameBuffer.toString(), Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
|
||||||
Field.TermVector.NO));
|
|
||||||
// xdoc.add(new Field("PARENT", parentBuffer.toString(), true, true,
|
// xdoc.add(new Field("PARENT", parentBuffer.toString(), true, true,
|
||||||
// true));
|
// true));
|
||||||
|
|
||||||
ChildAssociationRef primary = nodeService.getPrimaryParent(nodeRef);
|
ChildAssociationRef primary = nodeService.getPrimaryParent(nodeRef);
|
||||||
xdoc.add(new Field("PRIMARYPARENT", primary.getParentRef().toString(), Field.Store.YES,
|
xdoc.add(new Field("PRIMARYPARENT", primary.getParentRef().toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
xdoc.add(new Field("PRIMARYASSOCTYPEQNAME", ISO9075.getXPathName(primary.getTypeQName()), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
|
||||||
xdoc.add(new Field("PRIMARYASSOCTYPEQNAME", ISO9075.getXPathName(primary.getTypeQName()), Field.Store.YES,
|
|
||||||
Field.Index.NO, Field.TermVector.NO));
|
|
||||||
QName typeQName = nodeService.getType(nodeRef);
|
QName typeQName = nodeService.getType(nodeRef);
|
||||||
|
|
||||||
xdoc.add(new Field("TYPE", ISO9075.getXPathName(typeQName), Field.Store.YES, Field.Index.UN_TOKENIZED,
|
xdoc.add(new Field("TYPE", ISO9075.getXPathName(typeQName), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
Field.TermVector.NO));
|
|
||||||
for (QName classRef : nodeService.getAspects(nodeRef))
|
for (QName classRef : nodeService.getAspects(nodeRef))
|
||||||
{
|
{
|
||||||
xdoc.add(new Field("ASPECT", ISO9075.getXPathName(classRef), Field.Store.YES, Field.Index.UN_TOKENIZED,
|
xdoc.add(new Field("ASPECT", ISO9075.getXPathName(classRef), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
Field.TermVector.NO));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
xdoc.add(new Field("ISROOT", "F", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
xdoc.add(new Field("ISROOT", "F", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
xdoc.add(new Field("ISNODE", "T", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
xdoc.add(new Field("ISNODE", "T", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
if (isAtomic || indexAllProperties)
|
if (isAtomic || indexAllProperties)
|
||||||
{
|
{
|
||||||
xdoc
|
xdoc.add(new Field("FTSSTATUS", "Clean", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
.add(new Field("FTSSTATUS", "Clean", Field.Store.NO, Field.Index.UN_TOKENIZED,
|
|
||||||
Field.TermVector.NO));
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
if (isNew)
|
if (isNew)
|
||||||
{
|
{
|
||||||
xdoc.add(new Field("FTSSTATUS", "New", Field.Store.NO, Field.Index.UN_TOKENIZED,
|
xdoc.add(new Field("FTSSTATUS", "New", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
Field.TermVector.NO));
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
xdoc.add(new Field("FTSSTATUS", "Dirty", Field.Store.NO, Field.Index.UN_TOKENIZED,
|
xdoc.add(new Field("FTSSTATUS", "Dirty", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
Field.TermVector.NO));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -713,11 +691,9 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
* true to ignore all properties that must be indexed non-atomically
|
* true to ignore all properties that must be indexed non-atomically
|
||||||
* @return Returns true if the property was indexed atomically, or false if it should be done asynchronously
|
* @return Returns true if the property was indexed atomically, or false if it should be done asynchronously
|
||||||
*/
|
*/
|
||||||
protected boolean indexProperty(NodeRef nodeRef, QName propertyName, Serializable value, Document doc,
|
protected boolean indexProperty(NodeRef nodeRef, QName propertyName, Serializable value, Document doc, boolean indexAtomicPropertiesOnly)
|
||||||
boolean indexAtomicPropertiesOnly)
|
|
||||||
{
|
{
|
||||||
String attributeName = "@"
|
String attributeName = "@" + QName.createQName(propertyName.getNamespaceURI(), ISO9075.encode(propertyName.getLocalName()));
|
||||||
+ QName.createQName(propertyName.getNamespaceURI(), ISO9075.encode(propertyName.getLocalName()));
|
|
||||||
|
|
||||||
boolean store = true;
|
boolean store = true;
|
||||||
boolean index = true;
|
boolean index = true;
|
||||||
@@ -764,8 +740,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
}
|
}
|
||||||
catch (TypeConversionException e)
|
catch (TypeConversionException e)
|
||||||
{
|
{
|
||||||
doc.add(new Field(attributeName, NOT_INDEXED_NO_TYPE_CONVERSION, Field.Store.NO,
|
doc.add(new Field(attributeName, NOT_INDEXED_NO_TYPE_CONVERSION, Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (strValue == null)
|
if (strValue == null)
|
||||||
@@ -784,10 +759,8 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
}
|
}
|
||||||
// store mimetype in index - even if content does not index it is useful
|
// store mimetype in index - even if content does not index it is useful
|
||||||
// Added szie and locale - size needs to be tokenised correctly
|
// Added szie and locale - size needs to be tokenised correctly
|
||||||
doc.add(new Field(attributeName + ".mimetype", contentData.getMimetype(), Field.Store.NO,
|
doc.add(new Field(attributeName + ".mimetype", contentData.getMimetype(), Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
doc.add(new Field(attributeName + ".size", Long.toString(contentData.getSize()), Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.NO));
|
||||||
doc.add(new Field(attributeName + ".size", Long.toString(contentData.getSize()), Field.Store.NO,
|
|
||||||
Field.Index.TOKENIZED, Field.TermVector.NO));
|
|
||||||
|
|
||||||
// TODO: Use the node locale in preferanced to the system locale
|
// TODO: Use the node locale in preferanced to the system locale
|
||||||
Locale locale = contentData.getLocale();
|
Locale locale = contentData.getLocale();
|
||||||
@@ -803,40 +776,33 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
{
|
{
|
||||||
locale = I18NUtil.getLocale();
|
locale = I18NUtil.getLocale();
|
||||||
}
|
}
|
||||||
doc.add(new Field(attributeName + ".locale", locale.toString().toLowerCase(), Field.Store.NO,
|
doc.add(new Field(attributeName + ".locale", locale.toString().toLowerCase(), Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||||
Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
|
||||||
|
|
||||||
ContentReader reader = contentService.getReader(nodeRef, propertyName);
|
ContentReader reader = contentService.getReader(nodeRef, propertyName);
|
||||||
if (reader != null && reader.exists())
|
if (reader != null && reader.exists())
|
||||||
{
|
{
|
||||||
boolean readerReady = true;
|
boolean readerReady = true;
|
||||||
// transform if necessary (it is not a UTF-8 text document)
|
// transform if necessary (it is not a UTF-8 text document)
|
||||||
if (!EqualsHelper.nullSafeEquals(reader.getMimetype(), MimetypeMap.MIMETYPE_TEXT_PLAIN)
|
if (!EqualsHelper.nullSafeEquals(reader.getMimetype(), MimetypeMap.MIMETYPE_TEXT_PLAIN) || !EqualsHelper.nullSafeEquals(reader.getEncoding(), "UTF-8"))
|
||||||
|| !EqualsHelper.nullSafeEquals(reader.getEncoding(), "UTF-8"))
|
|
||||||
{
|
{
|
||||||
// get the transformer
|
// get the transformer
|
||||||
ContentTransformer transformer = contentService.getTransformer(reader.getMimetype(),
|
ContentTransformer transformer = contentService.getTransformer(reader.getMimetype(), MimetypeMap.MIMETYPE_TEXT_PLAIN);
|
||||||
MimetypeMap.MIMETYPE_TEXT_PLAIN);
|
|
||||||
// is this transformer good enough?
|
// is this transformer good enough?
|
||||||
if (transformer == null)
|
if (transformer == null)
|
||||||
{
|
{
|
||||||
// log it
|
// log it
|
||||||
if (s_logger.isDebugEnabled())
|
if (s_logger.isDebugEnabled())
|
||||||
{
|
{
|
||||||
s_logger.debug("Not indexed: No transformation: \n"
|
s_logger.debug("Not indexed: No transformation: \n" + " source: " + reader + "\n" + " target: " + MimetypeMap.MIMETYPE_TEXT_PLAIN);
|
||||||
+ " source: " + reader + "\n" + " target: "
|
|
||||||
+ MimetypeMap.MIMETYPE_TEXT_PLAIN);
|
|
||||||
}
|
}
|
||||||
// don't index from the reader
|
// don't index from the reader
|
||||||
readerReady = false;
|
readerReady = false;
|
||||||
// not indexed: no transformation
|
// not indexed: no transformation
|
||||||
// doc.add(new Field("TEXT", NOT_INDEXED_NO_TRANSFORMATION, Field.Store.NO,
|
// doc.add(new Field("TEXT", NOT_INDEXED_NO_TRANSFORMATION, Field.Store.NO,
|
||||||
// Field.Index.TOKENIZED, Field.TermVector.NO));
|
// Field.Index.TOKENIZED, Field.TermVector.NO));
|
||||||
doc.add(new Field(attributeName, NOT_INDEXED_NO_TRANSFORMATION, Field.Store.NO,
|
doc.add(new Field(attributeName, NOT_INDEXED_NO_TRANSFORMATION, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.NO));
|
||||||
Field.Index.TOKENIZED, Field.TermVector.NO));
|
|
||||||
}
|
}
|
||||||
else if (indexAtomicPropertiesOnly
|
else if (indexAtomicPropertiesOnly && transformer.getTransformationTime() > maxAtomicTransformationTime)
|
||||||
&& transformer.getTransformationTime() > maxAtomicTransformationTime)
|
|
||||||
{
|
{
|
||||||
// only indexing atomic properties
|
// only indexing atomic properties
|
||||||
// indexing will take too long, so push it to the background
|
// indexing will take too long, so push it to the background
|
||||||
@@ -869,8 +835,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
// failed
|
// failed
|
||||||
// doc.add(new Field("TEXT", NOT_INDEXED_TRANSFORMATION_FAILED, Field.Store.NO,
|
// doc.add(new Field("TEXT", NOT_INDEXED_TRANSFORMATION_FAILED, Field.Store.NO,
|
||||||
// Field.Index.TOKENIZED, Field.TermVector.NO));
|
// Field.Index.TOKENIZED, Field.TermVector.NO));
|
||||||
doc.add(new Field(attributeName, NOT_INDEXED_TRANSFORMATION_FAILED, Field.Store.NO,
|
doc.add(new Field(attributeName, NOT_INDEXED_TRANSFORMATION_FAILED, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.NO));
|
||||||
Field.Index.TOKENIZED, Field.TermVector.NO));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -917,10 +882,8 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
+ (reader == null ? " --- " : Boolean.toString(reader.exists())));
|
+ (reader == null ? " --- " : Boolean.toString(reader.exists())));
|
||||||
}
|
}
|
||||||
// not indexed: content missing
|
// not indexed: content missing
|
||||||
doc.add(new Field("TEXT", NOT_INDEXED_CONTENT_MISSING, Field.Store.NO, Field.Index.TOKENIZED,
|
doc.add(new Field("TEXT", NOT_INDEXED_CONTENT_MISSING, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.NO));
|
||||||
Field.TermVector.NO));
|
doc.add(new Field(attributeName, NOT_INDEXED_CONTENT_MISSING, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.NO));
|
||||||
doc.add(new Field(attributeName, NOT_INDEXED_CONTENT_MISSING, Field.Store.NO,
|
|
||||||
Field.Index.TOKENIZED, Field.TermVector.NO));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@@ -954,16 +917,14 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
String localeString = mlText.getValue(locale);
|
String localeString = mlText.getValue(locale);
|
||||||
StringBuilder builder = new StringBuilder();
|
StringBuilder builder = new StringBuilder();
|
||||||
builder.append("\u0000").append(locale.toString()).append("\u0000").append(localeString);
|
builder.append("\u0000").append(locale.toString()).append("\u0000").append(localeString);
|
||||||
doc.add(new Field(attributeName, builder.toString(), fieldStore, fieldIndex,
|
doc.add(new Field(attributeName, builder.toString(), fieldStore, fieldIndex, Field.TermVector.NO));
|
||||||
Field.TermVector.NO));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (isText)
|
else if (isText)
|
||||||
{
|
{
|
||||||
// Temporary special case for uids and gids
|
// Temporary special case for uids and gids
|
||||||
if (propertyName.equals(ContentModel.PROP_USER_USERNAME)
|
if (propertyName.equals(ContentModel.PROP_USER_USERNAME)
|
||||||
|| propertyName.equals(ContentModel.PROP_USERNAME)
|
|| propertyName.equals(ContentModel.PROP_USERNAME) || propertyName.equals(ContentModel.PROP_AUTHORITY_NAME)
|
||||||
|| propertyName.equals(ContentModel.PROP_AUTHORITY_NAME)
|
|
||||||
|| propertyName.equals(ContentModel.PROP_MEMBERS))
|
|| propertyName.equals(ContentModel.PROP_MEMBERS))
|
||||||
{
|
{
|
||||||
doc.add(new Field(attributeName, strValue, fieldStore, fieldIndex, Field.TermVector.NO));
|
doc.add(new Field(attributeName, strValue, fieldStore, fieldIndex, Field.TermVector.NO));
|
||||||
@@ -986,8 +947,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
{
|
{
|
||||||
StringBuilder builder = new StringBuilder();
|
StringBuilder builder = new StringBuilder();
|
||||||
builder.append("\u0000").append(locale.toString()).append("\u0000").append(strValue);
|
builder.append("\u0000").append(locale.toString()).append("\u0000").append(strValue);
|
||||||
doc.add(new Field(attributeName, builder.toString(), fieldStore, fieldIndex,
|
doc.add(new Field(attributeName, builder.toString(), fieldStore, fieldIndex, Field.TermVector.NO));
|
||||||
Field.TermVector.NO));
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -1097,10 +1057,11 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
{
|
{
|
||||||
if (propDef.getDataType().getName().equals(DataTypeDefinition.CATEGORY))
|
if (propDef.getDataType().getName().equals(DataTypeDefinition.CATEGORY))
|
||||||
{
|
{
|
||||||
for (NodeRef catRef : DefaultTypeConverter.INSTANCE.getCollection(NodeRef.class, properties
|
for (NodeRef catRef : DefaultTypeConverter.INSTANCE.getCollection(NodeRef.class, properties.get(propDef.getName())))
|
||||||
.get(propDef.getName())))
|
|
||||||
{
|
{
|
||||||
if (catRef != null)
|
if (catRef != null)
|
||||||
|
{
|
||||||
|
try
|
||||||
{
|
{
|
||||||
for (Path path : nodeService.getPaths(catRef, false))
|
for (Path path : nodeService.getPaths(catRef, false))
|
||||||
{
|
{
|
||||||
@@ -1108,8 +1069,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
{
|
{
|
||||||
Path.ChildAssocElement cae = (Path.ChildAssocElement) path.get(1);
|
Path.ChildAssocElement cae = (Path.ChildAssocElement) path.get(1);
|
||||||
boolean isFakeRoot = true;
|
boolean isFakeRoot = true;
|
||||||
for (ChildAssociationRef car : nodeService.getParentAssocs(cae.getRef()
|
for (ChildAssociationRef car : nodeService.getParentAssocs(cae.getRef().getChildRef()))
|
||||||
.getChildRef()))
|
|
||||||
{
|
{
|
||||||
if (cae.getRef().equals(car))
|
if (cae.getRef().equals(car))
|
||||||
{
|
{
|
||||||
@@ -1126,6 +1086,11 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
catch (InvalidNodeRefException e)
|
||||||
|
{
|
||||||
|
// If the category does not exists we move on the next
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1141,9 +1106,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
{
|
{
|
||||||
Path.ChildAssocElement cae = (Path.ChildAssocElement) pair.getFirst().last();
|
Path.ChildAssocElement cae = (Path.ChildAssocElement) pair.getFirst().last();
|
||||||
ChildAssociationRef assocRef = cae.getRef();
|
ChildAssociationRef assocRef = cae.getRef();
|
||||||
pair.getFirst().append(
|
pair.getFirst().append(new Path.ChildAssocElement(new ChildAssociationRef(assocRef.getTypeQName(), assocRef.getChildRef(), QName.createQName("member"), nodeRef)));
|
||||||
new Path.ChildAssocElement(new ChildAssociationRef(assocRef.getTypeQName(), assocRef
|
|
||||||
.getChildRef(), QName.createQName("member"), nodeRef)));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1233,8 +1196,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
}
|
}
|
||||||
catch (IOException e)
|
catch (IOException e)
|
||||||
{
|
{
|
||||||
throw new LuceneIndexException(
|
throw new LuceneIndexException("Failed to execute query to find content which needs updating in the index", e);
|
||||||
"Failed to execute query to find content which needs updating in the index", e);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for (int i = 0; i < hits.length(); i++)
|
for (int i = 0; i < hits.length(); i++)
|
||||||
|
@@ -102,7 +102,7 @@ public abstract class AbstractLuceneBase
|
|||||||
|
|
||||||
String basePath = getBasePath();
|
String basePath = getBasePath();
|
||||||
File baseDir = new File(basePath);
|
File baseDir = new File(basePath);
|
||||||
indexInfo = IndexInfo.getIndexInfo(baseDir);
|
indexInfo = IndexInfo.getIndexInfo(baseDir, config);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
if (deltaId != null)
|
if (deltaId != null)
|
||||||
|
@@ -885,7 +885,6 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI
|
|||||||
public void setIndexerMaxFieldLength(int indexerMaxFieldLength)
|
public void setIndexerMaxFieldLength(int indexerMaxFieldLength)
|
||||||
{
|
{
|
||||||
this.indexerMaxFieldLength = indexerMaxFieldLength;
|
this.indexerMaxFieldLength = indexerMaxFieldLength;
|
||||||
System.setProperty("org.apache.lucene.maxFieldLength", "" + indexerMaxFieldLength);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@@ -68,4 +68,10 @@ public interface LuceneConfig
|
|||||||
*/
|
*/
|
||||||
public MLAnalysisMode getDefaultMLSearchAnalysisMode();
|
public MLAnalysisMode getDefaultMLSearchAnalysisMode();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the max field length that determine how many tokens are put into the index
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
public int getIndexerMaxFieldLength();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -56,6 +56,7 @@ import java.util.zip.CRC32;
|
|||||||
import org.alfresco.error.AlfrescoRuntimeException;
|
import org.alfresco.error.AlfrescoRuntimeException;
|
||||||
import org.alfresco.repo.search.IndexerException;
|
import org.alfresco.repo.search.IndexerException;
|
||||||
import org.alfresco.repo.search.impl.lucene.FilterIndexReaderByStringId;
|
import org.alfresco.repo.search.impl.lucene.FilterIndexReaderByStringId;
|
||||||
|
import org.alfresco.repo.search.impl.lucene.LuceneConfig;
|
||||||
import org.alfresco.repo.search.impl.lucene.analysis.AlfrescoStandardAnalyser;
|
import org.alfresco.repo.search.impl.lucene.analysis.AlfrescoStandardAnalyser;
|
||||||
import org.alfresco.util.GUID;
|
import org.alfresco.util.GUID;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
@@ -318,7 +319,7 @@ public class IndexInfo
|
|||||||
* @return
|
* @return
|
||||||
* @throws IndexerException
|
* @throws IndexerException
|
||||||
*/
|
*/
|
||||||
public static synchronized IndexInfo getIndexInfo(File file) throws IndexerException
|
public static synchronized IndexInfo getIndexInfo(File file, LuceneConfig config) throws IndexerException
|
||||||
{
|
{
|
||||||
File canonicalFile;
|
File canonicalFile;
|
||||||
try
|
try
|
||||||
@@ -327,7 +328,7 @@ public class IndexInfo
|
|||||||
IndexInfo indexInfo = indexInfos.get(canonicalFile);
|
IndexInfo indexInfo = indexInfos.get(canonicalFile);
|
||||||
if (indexInfo == null)
|
if (indexInfo == null)
|
||||||
{
|
{
|
||||||
indexInfo = new IndexInfo(canonicalFile);
|
indexInfo = new IndexInfo(canonicalFile, config);
|
||||||
indexInfos.put(canonicalFile, indexInfo);
|
indexInfos.put(canonicalFile, indexInfo);
|
||||||
if (s_logger.isDebugEnabled())
|
if (s_logger.isDebugEnabled())
|
||||||
{
|
{
|
||||||
@@ -353,11 +354,16 @@ public class IndexInfo
|
|||||||
*
|
*
|
||||||
* @param indexDirectory
|
* @param indexDirectory
|
||||||
*/
|
*/
|
||||||
private IndexInfo(File indexDirectory)
|
private IndexInfo(File indexDirectory, LuceneConfig config)
|
||||||
{
|
{
|
||||||
super();
|
super();
|
||||||
initialiseTransitions();
|
initialiseTransitions();
|
||||||
|
|
||||||
|
if(config != null)
|
||||||
|
{
|
||||||
|
this.maxFieldLength = config.getIndexerMaxFieldLength();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// Create an empty in memory index
|
// Create an empty in memory index
|
||||||
IndexWriter writer;
|
IndexWriter writer;
|
||||||
@@ -2113,7 +2119,7 @@ public class IndexInfo
|
|||||||
{
|
{
|
||||||
|
|
||||||
String indexLocation = args[0];
|
String indexLocation = args[0];
|
||||||
IndexInfo ii = new IndexInfo(new File(indexLocation));
|
IndexInfo ii = new IndexInfo(new File(indexLocation), null);
|
||||||
while (true)
|
while (true)
|
||||||
{
|
{
|
||||||
ii.readWriteLock.writeLock().lock();
|
ii.readWriteLock.writeLock().lock();
|
||||||
|
@@ -93,7 +93,7 @@ public static final String[] UPDATE_LIST_2 = { "alpha2", "bravo2", "charlie2", "
|
|||||||
File tempLocation = TempFileProvider.getTempDir();
|
File tempLocation = TempFileProvider.getTempDir();
|
||||||
File testArea = new File(tempLocation, "IndexInfoTest");
|
File testArea = new File(tempLocation, "IndexInfoTest");
|
||||||
File testDir = new File(testArea, "" + System.currentTimeMillis());
|
File testDir = new File(testArea, "" + System.currentTimeMillis());
|
||||||
final IndexInfo ii = IndexInfo.getIndexInfo(testDir);
|
final IndexInfo ii = IndexInfo.getIndexInfo(testDir, null);
|
||||||
|
|
||||||
for (int i = 0; i < WORD_LIST.length; i++)
|
for (int i = 0; i < WORD_LIST.length; i++)
|
||||||
{
|
{
|
||||||
@@ -191,7 +191,7 @@ public static final String[] UPDATE_LIST_2 = { "alpha2", "bravo2", "charlie2", "
|
|||||||
File tempLocation = TempFileProvider.getTempDir();
|
File tempLocation = TempFileProvider.getTempDir();
|
||||||
File testArea = new File(tempLocation, "IndexInfoTest");
|
File testArea = new File(tempLocation, "IndexInfoTest");
|
||||||
File testDir = new File(testArea, "" + System.currentTimeMillis());
|
File testDir = new File(testArea, "" + System.currentTimeMillis());
|
||||||
final IndexInfo ii = IndexInfo.getIndexInfo(testDir);
|
final IndexInfo ii = IndexInfo.getIndexInfo(testDir, null);
|
||||||
|
|
||||||
for (int i = 0; i < CREATE_LIST.length; i++)
|
for (int i = 0; i < CREATE_LIST.length; i++)
|
||||||
{
|
{
|
||||||
@@ -381,7 +381,7 @@ public static final String[] UPDATE_LIST_2 = { "alpha2", "bravo2", "charlie2", "
|
|||||||
File tempLocation = TempFileProvider.getTempDir();
|
File tempLocation = TempFileProvider.getTempDir();
|
||||||
File testArea = new File(tempLocation, "IndexInfoTest");
|
File testArea = new File(tempLocation, "IndexInfoTest");
|
||||||
File testDir = new File(testArea, "" + System.currentTimeMillis());
|
File testDir = new File(testArea, "" + System.currentTimeMillis());
|
||||||
final IndexInfo ii = IndexInfo.getIndexInfo(testDir);
|
final IndexInfo ii = IndexInfo.getIndexInfo(testDir, null);
|
||||||
|
|
||||||
for (int i = 0; i < CREATE_LIST.length; i++)
|
for (int i = 0; i < CREATE_LIST.length; i++)
|
||||||
{
|
{
|
||||||
@@ -608,7 +608,7 @@ public static final String[] UPDATE_LIST_2 = { "alpha2", "bravo2", "charlie2", "
|
|||||||
File tempLocation = TempFileProvider.getTempDir();
|
File tempLocation = TempFileProvider.getTempDir();
|
||||||
File testArea = new File(tempLocation, "IndexInfoTest");
|
File testArea = new File(tempLocation, "IndexInfoTest");
|
||||||
File testDir = new File(testArea, "" + System.currentTimeMillis());
|
File testDir = new File(testArea, "" + System.currentTimeMillis());
|
||||||
final IndexInfo ii = IndexInfo.getIndexInfo(testDir);
|
final IndexInfo ii = IndexInfo.getIndexInfo(testDir, null);
|
||||||
|
|
||||||
Thread thread1 = new Thread(new Test(ii, CREATE_LIST, UPDATE_LIST));
|
Thread thread1 = new Thread(new Test(ii, CREATE_LIST, UPDATE_LIST));
|
||||||
Thread thread2 = new Thread(new Test(ii, CREATE_LIST_2, UPDATE_LIST_2));
|
Thread thread2 = new Thread(new Test(ii, CREATE_LIST_2, UPDATE_LIST_2));
|
||||||
|
Reference in New Issue
Block a user