Merge branch 'feature/SEARCH-68_update_solr' into develop

This commit is contained in:
Gethin James
2016-09-13 15:32:51 +02:00
14 changed files with 256 additions and 191 deletions

View File

@@ -37,7 +37,6 @@ import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.uninverting.UninvertingReader.Type;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.QueryBuilder;
@@ -48,6 +47,7 @@ import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.QParser;
import org.apache.solr.uninverting.UninvertingReader.Type;
/**
* Basic behaviour filtched from TextField

View File

@@ -36,9 +36,8 @@ public class ContextAwareQuery extends Query
{
protected final static Logger log = LoggerFactory.getLogger(ContextAwareQuery.class);
private Query luceneQuery;
private SearchParameters searchParameters;
private final Query luceneQuery;
private final SearchParameters searchParameters;
/**
* @param luceneQuery Query
@@ -64,7 +63,7 @@ public class ContextAwareQuery extends Query
* @param searcher IndexSearcher
* @return Weight
* @throws IOException
* @see org.apache.lucene.search.Query#createWeight(org.apache.lucene.search.IndexSearcher)
* @see org.apache.lucene.search.Query#createWeight(IndexSearcher, boolean)
*/
public Weight createWeight(IndexSearcher searcher, boolean needsScore) throws IOException
{
@@ -83,48 +82,24 @@ public class ContextAwareQuery extends Query
{
return luceneQuery.rewrite(reader);
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode()
{
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((luceneQuery == null) ? 0 : luceneQuery.hashCode());
result = prime * result + ((searchParameters == null) ? 0 : searchParameters.hashCode());
return result;
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ContextAwareQuery)) return false;
ContextAwareQuery that = (ContextAwareQuery) o;
if (luceneQuery != null ? !luceneQuery.equals(that.luceneQuery) : that.luceneQuery != null) return false;
return searchParameters != null ? searchParameters.equals(that.searchParameters) : that.searchParameters == null;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
ContextAwareQuery other = (ContextAwareQuery) obj;
if (luceneQuery == null)
{
if (other.luceneQuery != null)
return false;
}
else if (!luceneQuery.equals(other.luceneQuery))
return false;
if (searchParameters == null)
{
if (other.searchParameters != null)
return false;
}
else if (!searchParameters.equals(other.searchParameters))
return false;
return true;
public int hashCode() {
int result = luceneQuery != null ? luceneQuery.hashCode() : 0;
result = 31 * result + (searchParameters != null ? searchParameters.hashCode() : 0);
return result;
}
public Query getLuceneQuery()

View File

@@ -40,7 +40,7 @@ import org.apache.lucene.analysis.minhash.MinHashFilter;
import org.apache.lucene.analysis.minhash.MinHashFilter.LongPair;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.util.CharArraySet;
import org.apache.lucene.analysis.CharArraySet;
/**
* @author Andy

View File

@@ -29,7 +29,7 @@ import org.apache.lucene.search.Weight;
*/
public abstract class AbstractAuthorityQuery extends Query
{
protected String authority;
protected final String authority;
/**
* Construct with authority.
@@ -38,10 +38,10 @@ public abstract class AbstractAuthorityQuery extends Query
*/
public AbstractAuthorityQuery(String authority)
{
if (authority == null) throw new IllegalStateException("authority cannot be null");
this.authority = authority;
}
@Override
public abstract Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException;
@@ -49,33 +49,20 @@ public abstract class AbstractAuthorityQuery extends Query
{
return toString();
}
@Override
public int hashCode()
{
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((authority == null) ? 0 : authority.hashCode());
return result;
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof AbstractAuthorityQuery)) return false;
AbstractAuthorityQuery that = (AbstractAuthorityQuery) o;
return authority.equals(that.authority);
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
AbstractAuthorityQuery other = (AbstractAuthorityQuery) obj;
if (authority == null)
{
if (other.authority != null)
return false;
}
else if (!authority.equals(other.authority))
return false;
return true;
public int hashCode() {
return authority.hashCode();
}
}

View File

@@ -43,7 +43,7 @@ import org.apache.solr.search.SolrIndexSearcher;
*/
public abstract class AbstractAuthoritySetQuery extends Query
{
protected String authorities;
protected final String authorities;
/**
* Construct with authorities.
@@ -53,6 +53,7 @@ public abstract class AbstractAuthoritySetQuery extends Query
public AbstractAuthoritySetQuery(String authorities)
{
super();
if (authorities == null) throw new IllegalStateException("authorities cannot be null");
this.authorities = authorities;
}
@@ -64,34 +65,21 @@ public abstract class AbstractAuthoritySetQuery extends Query
{
return toString();
}
@Override
public int hashCode()
{
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((authorities == null) ? 0 : authorities.hashCode());
return result;
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof AbstractAuthoritySetQuery)) return false;
AbstractAuthoritySetQuery that = (AbstractAuthoritySetQuery) o;
return authorities.equals(that.authorities);
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
AbstractAuthoritySetQuery other = (AbstractAuthoritySetQuery) obj;
if (authorities == null)
{
if (other.authorities != null)
return false;
}
else if (!authorities.equals(other.authorities))
return false;
return true;
public int hashCode() {
return authorities.hashCode();
}
protected HybridBitSet getACLSet(String[] auths, String field, SolrIndexSearcher searcher) throws IOException

View File

@@ -36,10 +36,11 @@ import org.apache.lucene.util.FixedBitSet;
public class BitsFilter extends Filter {
private List<FixedBitSet> bitSets;
private final List<FixedBitSet> bitSets;
public BitsFilter(List<FixedBitSet> bitSets)
{
if (bitSets == null) throw new IllegalStateException("bitSets cannot be null");
this.bitSets = bitSets;
}
@@ -77,4 +78,21 @@ public class BitsFilter extends Filter {
public DocIdSet getDocIdSet(LeafReaderContext context, Bits bits) {
return BitsFilteredDocIdSet.wrap(new BitDocIdSet(bitSets.get(context.ord)), bits);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof BitsFilter)) return false;
BitsFilter that = (BitsFilter) o;
if (!bitSets.equals(that.bitSets)) return false;
return true;
}
@Override
public int hashCode() {
return bitSets.hashCode();
}
}

View File

@@ -39,28 +39,34 @@ import org.apache.solr.search.PostFilter;
public class PostFilterQuery extends Query implements PostFilter
{
private int cost;
private Query query;
private final Query query;
public PostFilterQuery(int cost, Query query)
{
this.cost = cost;
if (query == null) throw new IllegalStateException("query cannot be null");
this.query = query;
}
public int hashcode()
{
return query.hashCode();
@Override
public int hashCode() {
//DON'T INCLUDE COST??
int result = cost;
result = 31 * result + query.hashCode();
return result;
}
public boolean equals(Object o)
{
if(o instanceof PostFilterQuery)
{
PostFilterQuery p = (PostFilterQuery)o;
return query.equals(p.query);
}
//THIS WAS WRONG public int hashcode()
return false;
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof PostFilterQuery)) return false;
PostFilterQuery that = (PostFilterQuery) o;
//DON'T INCLUDE COST?? if (cost != that.cost) return false;
return query.equals(that.query);
}
public int getCost()
@@ -68,6 +74,7 @@ public class PostFilterQuery extends Query implements PostFilter
return cost;
}
@Override
public void setCost(int cost)
{
this.cost = cost;
@@ -78,6 +85,7 @@ public class PostFilterQuery extends Query implements PostFilter
return false;
}
@Override
public void setCache(boolean cache)
{
@@ -88,6 +96,7 @@ public class PostFilterQuery extends Query implements PostFilter
return false;
}
@Override
public void setCacheSep(boolean cacheSep)
{

View File

@@ -34,6 +34,7 @@ import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.TimeZone;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.dictionary.IndexTokenisationMode;
@@ -468,8 +469,8 @@ public class Solr4QueryParser extends QueryParser implements QueryConstants
return spanQueryBuilder(field, first, last, slop, inOrder);
} else
{
BytesRef firstBytes = analyzeMultitermTerm(field, first, getAnalyzer());
BytesRef lastBytes = analyzeMultitermTerm(field, last, getAnalyzer());
BytesRef firstBytes = analyzeMultitermTerm(field, first);
BytesRef lastBytes = analyzeMultitermTerm(field, last);
SpanQuery firstTerm = new SpanTermQuery(new Term(field, firstBytes));
SpanQuery lastTerm = new SpanTermQuery(new Term(field, lastBytes));
return new SpanNearQuery(new SpanQuery[]
@@ -2833,7 +2834,7 @@ public class Solr4QueryParser extends QueryParser implements QueryConstants
String start = null;
try
{
analyzeMultitermTerm(FIELD_CASCADETX, part1, null);
analyzeMultitermTerm(FIELD_CASCADETX, part1);
start = part1;
} catch (Exception e)
{
@@ -2842,7 +2843,7 @@ public class Solr4QueryParser extends QueryParser implements QueryConstants
String end = null;
try
{
analyzeMultitermTerm(FIELD_CASCADETX, part2, null);
analyzeMultitermTerm(FIELD_CASCADETX, part2);
end = part2;
} catch (Exception e)
{
@@ -2973,7 +2974,7 @@ public class Solr4QueryParser extends QueryParser implements QueryConstants
String start = null;
try
{
analyzeMultitermTerm(solrField, part1, null);
analyzeMultitermTerm(solrField, part1);
start = part1;
} catch (Exception e)
{
@@ -2982,7 +2983,7 @@ public class Solr4QueryParser extends QueryParser implements QueryConstants
String end = null;
try
{
analyzeMultitermTerm(solrField, part2, null);
analyzeMultitermTerm(solrField, part2);
end = part2;
} catch (Exception e)
{
@@ -3041,7 +3042,7 @@ public class Solr4QueryParser extends QueryParser implements QueryConstants
String start = null;
try
{
analyzeMultitermTerm(solrField, part1, null);
analyzeMultitermTerm(solrField, part1);
start = part1;
} catch (Exception e)
{
@@ -3050,7 +3051,7 @@ public class Solr4QueryParser extends QueryParser implements QueryConstants
String end = null;
try
{
analyzeMultitermTerm(solrField, part2, null);
analyzeMultitermTerm(solrField, part2);
end = part2;
} catch (Exception e)
{
@@ -4766,7 +4767,8 @@ public class Solr4QueryParser extends QueryParser implements QueryConstants
*/
protected Query createAuthorityQuery(String queryText) throws ParseException
{
return new SolrAuthorityQuery(queryText);
//return new SolrAuthorityQuery(queryText);
return new SolrAuthoritySetQuery(","+queryText);
}
// TODO: correct field names
@@ -5000,8 +5002,7 @@ public class Solr4QueryParser extends QueryParser implements QueryConstants
private void addLocaleSpecificMLOrTextAttribute(PropertyDefinition pDef, String queryText, SubQuery subQueryBuilder,
AnalysisMode analysisMode, LuceneFunction luceneFunction, Builder booleanQuery, Locale locale,
String textFieldName, IndexTokenisationMode tokenisationMode,
IndexTokenisationMode preferredTokenisationMode) throws ParseException
{
IndexTokenisationMode preferredTokenisationMode) throws ParseException {
FieldInstance fieldInstance = getFieldInstance(textFieldName, pDef, locale, preferredTokenisationMode);
StringBuilder builder = new StringBuilder(queryText.length() + 10);
@@ -5177,8 +5178,7 @@ public class Solr4QueryParser extends QueryParser implements QueryConstants
protected void addTextSpanQuery(String field, PropertyDefinition pDef, String first, String last, int slop,
boolean inOrder, String expandedFieldName, IndexTokenisationMode tokenisationMode, Builder booleanQuery,
Locale locale)
{
Locale locale) {
addMLTextOrTextSpanQuery(field, pDef, first, last, slop, inOrder, expandedFieldName, tokenisationMode,
booleanQuery, locale);
}
@@ -5310,8 +5310,7 @@ public class Solr4QueryParser extends QueryParser implements QueryConstants
protected void addMLTextSpanQuery(String field, PropertyDefinition pDef, String first, String last, int slop,
boolean inOrder, String expandedFieldName, PropertyDefinition propertyDef,
IndexTokenisationMode tokenisationMode, Builder booleanQuery, Locale locale)
{
IndexTokenisationMode tokenisationMode, Builder booleanQuery, Locale locale) {
addMLTextOrTextSpanQuery(field, pDef, first, last, slop, inOrder, expandedFieldName, tokenisationMode,
booleanQuery, locale);
}
@@ -5390,4 +5389,54 @@ public class Solr4QueryParser extends QueryParser implements QueryConstants
{
return new SolrDenySetQuery(queryText);
}
private BytesRef analyzeMultitermTerm(String field, String part) {
return analyzeMultitermTerm(field, part, getAnalyzer());
}
protected BytesRef analyzeMultitermTerm(String field, String part, Analyzer analyzerIn) {
if (analyzerIn == null) analyzerIn = getAnalyzer();
try (TokenStream source = analyzerIn.tokenStream(field, part)) {
source.reset();
TermToBytesRefAttribute termAtt = source.getAttribute(TermToBytesRefAttribute.class);
if (!source.incrementToken())
throw new IllegalArgumentException("analyzer returned no terms for multiTerm term: " + part);
BytesRef bytes = BytesRef.deepCopyOf(termAtt.getBytesRef());
if (source.incrementToken())
throw new IllegalArgumentException("analyzer returned too many terms for multiTerm term: " + part);
source.end();
return bytes;
} catch (IOException e) {
throw new RuntimeException("Error analyzing multiTerm term: " + part, e);
}
}
private boolean analyzeRangeTerms = true;
protected Query newRangeQuery(String field, String part1, String part2, boolean startInclusive, boolean endInclusive) {
final BytesRef start;
final BytesRef end;
if (part1 == null) {
start = null;
} else {
start = analyzeRangeTerms ? analyzeMultitermTerm(field, part1) : new BytesRef(part1);
}
if (part2 == null) {
end = null;
} else {
end = analyzeRangeTerms ? analyzeMultitermTerm(field, part2) : new BytesRef(part2);
}
final TermRangeQuery query = new TermRangeQuery(field, start, end, startInclusive, endInclusive);
query.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_REWRITE);
return query;
}
}

View File

@@ -20,10 +20,14 @@ package org.alfresco.solr.query;
import java.io.IOException;
import org.alfresco.solr.cache.CacheConstants;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Weight;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.WrappedQuery;
/**
* Decorator that executes a SolrPathQuery and returns cached results where possible.
@@ -32,23 +36,40 @@ import org.apache.solr.search.SolrIndexSearcher;
*/
public class SolrCachingPathQuery extends Query
{
SolrPathQuery pathQuery;
final SolrPathQuery pathQuery;
public SolrCachingPathQuery(SolrPathQuery pathQuery)
{
if (pathQuery == null) throw new IllegalStateException("pathQuery cannot be null");
this.pathQuery = pathQuery;
}
/*
* @see org.apache.lucene.search.Query#createWeight(org.apache.lucene.search.Searcher)
*/
public Weight createWeight(IndexSearcher searcher, boolean requiresScore) throws IOException
public Weight createWeight(IndexSearcher indexSearcher, boolean requiresScore) throws IOException
{
if(!(searcher instanceof SolrIndexSearcher))
SolrIndexSearcher searcher = null;
if(!(indexSearcher instanceof SolrIndexSearcher))
{
throw new IllegalStateException("Must have a SolrIndexSearcher");
}
return new SolrCachingPathWeight(this, (SolrIndexSearcher)searcher);
else
{
searcher = (SolrIndexSearcher)indexSearcher;
}
DocSet results = (DocSet) searcher.cacheLookup(CacheConstants.ALFRESCO_PATH_CACHE, pathQuery);
if (results == null)
{
// Cache miss: get path query results and cache them
WrappedQuery wrapped = new WrappedQuery(pathQuery);
wrapped.setCache(false);
results = searcher.getDocSet(wrapped);
searcher.cacheInsert(CacheConstants.ALFRESCO_PATH_CACHE, pathQuery, results);
}
return new ConstantScoreQuery(results.getTopFilter()).createWeight(searcher, false);
}
/*
@@ -62,34 +83,18 @@ public class SolrCachingPathQuery extends Query
return stringBuilder.toString();
}
@Override
public int hashCode()
{
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((pathQuery == null) ? 0 : pathQuery.hashCode());
return result;
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof SolrCachingPathQuery)) return false;
SolrCachingPathQuery that = (SolrCachingPathQuery) o;
return pathQuery.equals(that.pathQuery);
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
SolrCachingPathQuery other = (SolrCachingPathQuery) obj;
if (pathQuery == null)
{
if (other.pathQuery != null)
return false;
}
else if (!pathQuery.equals(other.pathQuery))
return false;
return true;
public int hashCode() {
return pathQuery.hashCode();
}
}

View File

@@ -19,15 +19,16 @@
package org.alfresco.solr.query;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import org.alfresco.repo.search.adaptor.lucene.QueryConstants;
import org.alfresco.service.cmr.security.AuthorityType;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.*;
import org.apache.lucene.util.FixedBitSet;
import org.apache.solr.search.SolrIndexSearcher;
/**
@@ -48,7 +49,9 @@ public class SolrOwnerQuery extends AbstractAuthorityQuery
{
throw new IllegalStateException("Must have a SolrIndexSearcher");
}
return new SolrOwnerQueryWeight((SolrIndexSearcher)searcher, this, authority);
BitsFilter ownerFilter = getOwnerFilter(authority, (SolrIndexSearcher)searcher);
return new ConstantScoreQuery(ownerFilter).createWeight(searcher, false);
}
@Override
@@ -59,24 +62,53 @@ public class SolrOwnerQuery extends AbstractAuthorityQuery
stringBuilder.append(authority);
return stringBuilder.toString();
}
private class SolrOwnerQueryWeight extends AbstractAuthorityQueryWeight
private BitsFilter getOwnerFilter(String owner, SolrIndexSearcher searcher) throws IOException
{
public SolrOwnerQueryWeight(SolrIndexSearcher searcher, Query query, String authority) throws IOException
Query query = new TermQuery(new Term(QueryConstants.FIELD_OWNER, owner));
BitsFilterCollector collector = new BitsFilterCollector(searcher.getTopReaderContext().leaves().size());
searcher.search(query, collector);
return collector.getBitsFilter();
}
class BitsFilterCollector implements Collector, LeafCollector
{
private List<FixedBitSet> sets;
private FixedBitSet set;
public BitsFilterCollector(int leafCount)
{
super(searcher, false, query, QueryConstants.FIELD_OWNER, authority);
this.sets = new ArrayList<FixedBitSet>(leafCount);
}
public BitsFilter getBitsFilter() {
return new BitsFilter(sets);
}
public boolean acceptsDocsOutOfOrder() {
return false;
}
public void setScorer(Scorer scorer) {
}
public void collect(int doc) {
set.set(doc);
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException
{
return SolrOwnerScorer.createOwnerScorer(this, context, searcher, SolrOwnerQuery.this.authority);
public LeafCollector getLeafCollector(LeafReaderContext context)
throws IOException {
set = new FixedBitSet(context.reader().maxDoc());
sets.add(set);
return this;
}
@Override
public void extractTerms(Set<Term> terms)
{
terms.add(new Term(QueryConstants.FIELD_OWNER, authority));
}
@Override
public boolean needsScores() {
// TODO Auto-generated method stub
return false;
}
}
}

View File

@@ -286,14 +286,11 @@ public class SolrPathQuery extends Query
}
@Override
public int hashCode()
{
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((pathField == null) ? 0 : pathField.hashCode());
result = prime * result + ((pathStructuredFieldPositions == null) ? 0 : pathStructuredFieldPositions.hashCode());
result = prime * result + (repeats ? 1231 : 1237);
result = prime * result + unitSize;
public int hashCode() {
int result = pathField != null ? pathField.hashCode() : 0;
result = 31 * result + unitSize;
result = 31 * result + (pathStructuredFieldPositions != null ? pathStructuredFieldPositions.hashCode() : 0);
result = 31 * result + (repeats ? 1 : 0);
return result;
}
@@ -302,8 +299,6 @@ public class SolrPathQuery extends Query
{
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
SolrPathQuery other = (SolrPathQuery) obj;

View File

@@ -88,7 +88,7 @@ public class AlfrescoSolrHighlighter extends DefaultSolrHighlighter implements
*
* @param query
* The current Query
* @param fieldName
* @param requestFieldname
* The name of the field
* @param request
* The current SolrQueryRequest
@@ -115,7 +115,7 @@ public class AlfrescoSolrHighlighter extends DefaultSolrHighlighter implements
* The current query
* @param tokenStream
* document text CachingTokenStream
* @param fieldName
* @param requestFieldname
* The name of the field
* @param request
* The SolrQueryRequest
@@ -156,7 +156,7 @@ public class AlfrescoSolrHighlighter extends DefaultSolrHighlighter implements
*
* @param query
* The current query
* @param fieldName
* @param requestFieldname
* The name of the field
* @param request
* The SolrQueryRequest
@@ -264,8 +264,12 @@ public class AlfrescoSolrHighlighter extends DefaultSolrHighlighter implements
SolrHighlighter.DEFAULT_PHRASE_LIMIT));
fvhFieldQuery = fvh.getFieldQuery(query, reader);
}
fieldHighlights = null;
FvhContainer fvhContainer = new FvhContainer(fvh, fvhFieldQuery);
fieldHighlights = doHighlightingByFastVectorHighlighter(
doc, docId, schemaField, fvh, fvhFieldQuery,
doc, docId, schemaField, fvhContainer,
reader, req);
} else { // standard/default highlighter
fieldHighlights = doHighlightingByHighlighter(doc, docId,

View File

@@ -20,8 +20,10 @@ package org.alfresco.solr.query;
import java.io.IOException;
import org.alfresco.repo.search.adaptor.lucene.QueryConstants;
import org.alfresco.repo.search.impl.parsers.FTSQueryParser;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.solr.SolrInformationServer;
import org.alfresco.util.Pair;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.search.Query;
@@ -46,15 +48,16 @@ public class AuthQueryTest extends AuthDataLoad
RefCounted<SolrIndexSearcher> refCounted = null;
try
{
assertFTSQuery("TEXT:\"Test\"", count);
assertFTSQuery("TEXT:\"Test\" ", count);
assertFTSQuery("TEXT:\"doc\"", count);
assertFTSQuery("TEXT:\"number\"", count);
//Assert that root, base folder,folder-0 and 100 documents are returned.
assertFTSQuery("AUTHORITY:\"GROUP_EVERYONE\"", 103);
assertFTSQuery("AUTHORITY:\"GROUP_EVERYONE\" AND "+QueryConstants.FIELD_DOC_TYPE+":"+SolrInformationServer.DOC_TYPE_NODE, 103);
//Test data load adds lots of AUTHORITY readers by looping count -1
assertFTSQuery("AUTHORITY:\"READER-1000\"", 100);
assertFTSQuery("AUTHORITY:\"READER-902\"", 2);
assertFTSQuery("AUTHORITY:\"READER-901\"", 1);
assertFTSQuery("AUTHORITY:\"READER-1000\" AND "+QueryConstants.FIELD_DOC_TYPE+":"+SolrInformationServer.DOC_TYPE_NODE, 100);
assertFTSQuery("AUTHORITY:\"READER-902\" AND "+QueryConstants.FIELD_DOC_TYPE+":"+SolrInformationServer.DOC_TYPE_NODE, 2);
assertFTSQuery("AUTHORITY:\"READER-901\" AND "+QueryConstants.FIELD_DOC_TYPE+":"+SolrInformationServer.DOC_TYPE_NODE, 1);
//Grouping boundary test that checks ... Andy can explain.
buildAndRunAuthQuery(count, 8);
buildAndRunAuthQuery(count, 9);
@@ -106,6 +109,7 @@ public class AuthQueryTest extends AuthDataLoad
searchParameters.setQuery(queryString);
Query query = dataModel.getFTSQuery(new Pair<SearchParameters, Boolean>(searchParameters, Boolean.FALSE),
solrQueryRequest, FTSQueryParser.RerankPhase.SINGLE_PASS);
System.out.println("##################### Query:"+query);
TopDocs docs = solrIndexSearcher.search(query, count * 2 + 10);
Assert.assertEquals(count, docs.totalHits);

View File

@@ -10,10 +10,9 @@
<version>6.0-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Alfresco Solr parent</name>
<properties>
<solr.version>6.0.1</solr.version>
<alfresco-solr.version>5.2-SEARCH-API-SNAPSHOT</alfresco-solr.version>
<solr.version>6.2.0</solr.version>
<alfresco-solr.version>5.2.1-SNAPSHOT</alfresco-solr.version>
</properties>
<modules>