mirror of
https://github.com/Alfresco/SearchServices.git
synced 2025-09-10 14:11:25 +00:00
Merge master into feature/SEARCH-1745_WrapNPEForShardKey.
This commit is contained in:
@@ -75,13 +75,13 @@
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>1.7.27</version>
|
||||
<version>1.7.28</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<version>1.7.27</version>
|
||||
<version>1.7.28</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@@ -951,7 +951,7 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
{
|
||||
if (maxNodeId >= midpoint)
|
||||
{
|
||||
if(density >= 1 || density == 0)
|
||||
if(density >= 1)
|
||||
{
|
||||
//This is fully dense shard. I'm not sure if it's possible to have more nodes on the shards
|
||||
//then the offset, but if it does happen don't expand.
|
||||
|
@@ -1195,7 +1195,8 @@ public class AlfrescoSolrDataModel implements QueryConstants
|
||||
|
||||
public void removeModel(QName modelQName)
|
||||
{
|
||||
modelErrors.remove(getM2Model(modelQName).getName());
|
||||
// FIXME: this has no effect. The method should be changed (SEARCH-1482)
|
||||
modelErrors.remove(modelQName);
|
||||
dictionaryDAO.removeModel(modelQName);
|
||||
}
|
||||
|
||||
|
@@ -957,10 +957,9 @@ public class SolrInformationServer implements InformationServer
|
||||
SolrIndexSearcher solrIndexSearcher = refCounted.get();
|
||||
coreSummary.add("Searcher", solrIndexSearcher.getStatistics());
|
||||
Map<String, SolrInfoMBean> infoRegistry = core.getInfoRegistry();
|
||||
for (Entry<String, SolrInfoMBean> infos : infoRegistry.entrySet())
|
||||
for (String key : infoRegistry.keySet())
|
||||
{
|
||||
SolrInfoMBean infoMBean = infos.getValue();
|
||||
String key = infos.getKey();
|
||||
SolrInfoMBean infoMBean = infoRegistry.get(key);
|
||||
if (key.equals("/alfresco"))
|
||||
{
|
||||
// TODO Do we really need to fixStats in solr4?
|
||||
@@ -2118,9 +2117,8 @@ public class SolrInformationServer implements InformationServer
|
||||
static void addPropertiesToDoc(Map<QName, PropertyValue> properties, boolean isContentIndexedForNode,
|
||||
SolrInputDocument newDoc, SolrInputDocument cachedDoc, boolean transformContentFlag)
|
||||
{
|
||||
for (Entry<QName, PropertyValue> property : properties.entrySet())
|
||||
for (QName propertyQName : properties.keySet())
|
||||
{
|
||||
QName propertyQName = property.getKey();
|
||||
newDoc.addField(FIELD_PROPERTIES, propertyQName.toString());
|
||||
newDoc.addField(FIELD_PROPERTIES, propertyQName.getPrefixString());
|
||||
|
||||
@@ -3414,15 +3412,10 @@ public class SolrInformationServer implements InformationServer
|
||||
SolrQueryRequest request, UpdateRequestProcessor processor, LinkedHashSet<Long> stack)
|
||||
throws AuthenticationException, IOException, JSONException
|
||||
{
|
||||
|
||||
// skipDescendantDocsForSpecificAspects is initialised on a synchronised method, so access must be also synchronised
|
||||
synchronized (this)
|
||||
if ((skipDescendantDocsForSpecificTypes && typesForSkippingDescendantDocs.contains(parentNodeMetaData.getType())) ||
|
||||
(skipDescendantDocsForSpecificAspects && shouldBeIgnoredByAnyAspect(parentNodeMetaData.getAspects())))
|
||||
{
|
||||
if ((skipDescendantDocsForSpecificTypes && typesForSkippingDescendantDocs.contains(parentNodeMetaData.getType())) ||
|
||||
(skipDescendantDocsForSpecificAspects && shouldBeIgnoredByAnyAspect(parentNodeMetaData.getAspects())))
|
||||
{
|
||||
return;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
Set<Long> childIds = new HashSet<>();
|
||||
|
@@ -41,7 +41,6 @@ import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
@@ -473,9 +472,8 @@ public class AsyncBuildSuggestComponent extends SearchComponent implements SolrC
|
||||
@Override
|
||||
public long ramBytesUsed() {
|
||||
long sizeInBytes = 0;
|
||||
for (Entry<String, SuggesterCache> suggester : suggesters.entrySet())
|
||||
{
|
||||
sizeInBytes += suggester.getValue().get(ASYNC_CACHE_KEY).ramBytesUsed();
|
||||
for (String key : suggesters.keySet()) {
|
||||
sizeInBytes += suggesters.get(key).get(ASYNC_CACHE_KEY).ramBytesUsed();
|
||||
}
|
||||
return sizeInBytes;
|
||||
}
|
||||
|
@@ -91,7 +91,7 @@ public class RewriteFacetParametersComponent extends SearchComponent
|
||||
String rows = params.get("rows");
|
||||
if(rows != null && !rows.isEmpty())
|
||||
{
|
||||
Integer row = Integer.valueOf(rows);
|
||||
Integer row = new Integer(rows);
|
||||
// Avoid +1 in SOLR code which produces null:java.lang.NegativeArraySizeException at at org.apache.lucene.util.PriorityQueue.<init>(PriorityQueue.java:56)
|
||||
if(row > 1000000)
|
||||
{
|
||||
|
@@ -19,6 +19,7 @@
|
||||
package org.alfresco.solr.component;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
@@ -52,9 +53,9 @@ public class TempFileWarningLogger
|
||||
log.debug("Looking for temp files matching " + glob + " in directory " + dir);
|
||||
}
|
||||
|
||||
try
|
||||
try(DirectoryStream<Path> stream = Files.newDirectoryStream(dir, glob))
|
||||
{
|
||||
for (Path file : Files.newDirectoryStream(dir, glob))
|
||||
for (Path file : stream)
|
||||
{
|
||||
if (log.isDebugEnabled())
|
||||
{
|
||||
@@ -73,9 +74,9 @@ public class TempFileWarningLogger
|
||||
|
||||
public void removeFiles()
|
||||
{
|
||||
try
|
||||
try(DirectoryStream<Path> stream = Files.newDirectoryStream(dir, glob))
|
||||
{
|
||||
for (Path file : Files.newDirectoryStream(dir, glob))
|
||||
for (Path file : stream)
|
||||
{
|
||||
file.toFile().delete();
|
||||
}
|
||||
|
@@ -47,7 +47,7 @@ public abstract class AbstractSolrCachingScorer extends Scorer
|
||||
|
||||
static {
|
||||
for(int i = 0; i < cache.length; i++)
|
||||
cache[i] = Long.valueOf(i);
|
||||
cache[i] = new Long(i);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -20,7 +20,6 @@ package org.alfresco.solr.query;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.alfresco.solr.AlfrescoSolrDataModel;
|
||||
import org.alfresco.solr.AlfrescoSolrDataModel.FieldUse;
|
||||
@@ -112,9 +111,10 @@ public class MimetypeGroupingCollector extends DelegatingCollector
|
||||
rb.rsp.add("analytics", analytics);
|
||||
NamedList<Object> fieldCounts = new NamedList<>();
|
||||
analytics.add("mimetype()", fieldCounts);
|
||||
for (Entry<String, Counter> counter : counters.entrySet())
|
||||
for(String key : counters.keySet())
|
||||
{
|
||||
fieldCounts.add(counter.getKey(), counter.getValue().get());
|
||||
Counter counter = counters.get(key);
|
||||
fieldCounts.add(key, counter.get());
|
||||
}
|
||||
|
||||
if(this.delegate instanceof DelegatingCollector) {
|
||||
|
@@ -3301,35 +3301,44 @@ public class Solr4QueryParser extends QueryParser implements QueryConstants
|
||||
|
||||
protected String getToken(String field, String value, AnalysisMode analysisMode) throws ParseException
|
||||
{
|
||||
|
||||
TokenStream source = getAnalyzer().tokenStream(field, new StringReader(value));
|
||||
try (TokenStream source = getAnalyzer().tokenStream(field, new StringReader(value)))
|
||||
{
|
||||
String tokenised = null;
|
||||
|
||||
CharTermAttribute cta = source.getAttribute(CharTermAttribute.class);
|
||||
OffsetAttribute offsetAtt = source.getAttribute(OffsetAttribute.class);
|
||||
TypeAttribute typeAtt = null;
|
||||
if (source.hasAttribute(TypeAttribute.class))
|
||||
while (source.incrementToken())
|
||||
{
|
||||
CharTermAttribute cta = source.getAttribute(CharTermAttribute.class);
|
||||
OffsetAttribute offsetAtt = source.getAttribute(OffsetAttribute.class);
|
||||
TypeAttribute typeAtt = null;
|
||||
if (source.hasAttribute(TypeAttribute.class))
|
||||
{
|
||||
typeAtt = source.getAttribute(TypeAttribute.class);
|
||||
}
|
||||
PositionIncrementAttribute posIncAtt = null;
|
||||
if (source.hasAttribute(PositionIncrementAttribute.class))
|
||||
{
|
||||
posIncAtt = source.getAttribute(PositionIncrementAttribute.class);
|
||||
}
|
||||
PackedTokenAttributeImpl token = new PackedTokenAttributeImpl();
|
||||
token.setEmpty().copyBuffer(cta.buffer(), 0, cta.length());
|
||||
token.setOffset(offsetAtt.startOffset(), offsetAtt.endOffset());
|
||||
if (typeAtt != null)
|
||||
{
|
||||
token.setType(typeAtt.type());
|
||||
}
|
||||
if (posIncAtt != null)
|
||||
{
|
||||
token.setPositionIncrement(posIncAtt.getPositionIncrement());
|
||||
}
|
||||
|
||||
tokenised = token.toString();
|
||||
}
|
||||
return tokenised;
|
||||
} catch (IOException e)
|
||||
{
|
||||
typeAtt = source.getAttribute(TypeAttribute.class);
|
||||
}
|
||||
PositionIncrementAttribute posIncAtt = null;
|
||||
if (source.hasAttribute(PositionIncrementAttribute.class))
|
||||
{
|
||||
posIncAtt = source.getAttribute(PositionIncrementAttribute.class);
|
||||
}
|
||||
PackedTokenAttributeImpl token = new PackedTokenAttributeImpl();
|
||||
token.setEmpty().copyBuffer(cta.buffer(), 0, cta.length());
|
||||
token.setOffset(offsetAtt.startOffset(), offsetAtt.endOffset());
|
||||
if (typeAtt != null)
|
||||
{
|
||||
token.setType(typeAtt.type());
|
||||
}
|
||||
if (posIncAtt != null)
|
||||
{
|
||||
token.setPositionIncrement(posIncAtt.getPositionIncrement());
|
||||
throw new ParseException("IO" + e.getMessage());
|
||||
}
|
||||
|
||||
return token.toString();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -5463,13 +5472,11 @@ public class Solr4QueryParser extends QueryParser implements QueryConstants
|
||||
}
|
||||
|
||||
protected BytesRef analyzeMultitermTerm(String field, String part, Analyzer analyzerIn) {
|
||||
|
||||
if (analyzerIn == null) analyzerIn = getAnalyzer();
|
||||
|
||||
try
|
||||
{
|
||||
try (TokenStream source = analyzerIn.tokenStream(field, part)) {
|
||||
source.reset();
|
||||
|
||||
TokenStream source = analyzerIn.tokenStream(field, part);
|
||||
TermToBytesRefAttribute termAtt = source.getAttribute(TermToBytesRefAttribute.class);
|
||||
|
||||
if (!source.incrementToken())
|
||||
|
@@ -55,13 +55,6 @@ public class DateQuarterRouter implements DocRouter
|
||||
calendar.setTime(date);
|
||||
int month = calendar.get(Calendar.MONTH);
|
||||
int year = calendar.get(Calendar.YEAR);
|
||||
|
||||
// Avoid using Math.ceil with Integer
|
||||
int countMonths = ((year * 12) + (month+1));
|
||||
int grouping = 3;
|
||||
int ceilGroupInstance = countMonths / grouping + ((countMonths % grouping == 0) ? 0 : 1);
|
||||
|
||||
return ceilGroupInstance % numShards == shardInstance;
|
||||
|
||||
return Math.ceil(((year * 12) + (month+1)) / 3) % numShards == shardInstance;
|
||||
}
|
||||
}
|
@@ -280,7 +280,7 @@ public class AlfrescoSolrClusteringComponent extends SearchComponent implements
|
||||
list.add(doc);
|
||||
|
||||
if (ids != null) {
|
||||
ids.put(doc, Integer.valueOf(docid));
|
||||
ids.put(doc, new Integer(docid));
|
||||
}
|
||||
}
|
||||
return list;
|
||||
@@ -356,7 +356,7 @@ public class AlfrescoSolrClusteringComponent extends SearchComponent implements
|
||||
/**
|
||||
* @return Expose for tests.
|
||||
*/
|
||||
Map<String, SearchClusteringEngine> getSearchClusteringEnginesView() {
|
||||
Map<String, SearchClusteringEngine> getSearchClusteringEngines() {
|
||||
return searchClusteringEnginesView;
|
||||
}
|
||||
|
||||
|
@@ -73,7 +73,7 @@
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<version>1.7.27</version>
|
||||
<version>1.7.28</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
@@ -26,8 +26,6 @@
|
||||
|
||||
package org.alfresco.solr;
|
||||
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -66,21 +64,8 @@ public class TrackerState
|
||||
private volatile boolean checkedLastAclTransactionTime = false;
|
||||
private volatile boolean checkedLastTransactionTime = false;
|
||||
|
||||
private volatile boolean check = false;
|
||||
// Handle Thread Safe operations
|
||||
private volatile TrackerCyclesInteger trackerCycles;
|
||||
class TrackerCyclesInteger
|
||||
{
|
||||
private AtomicInteger value = new AtomicInteger(0);
|
||||
private void increase()
|
||||
{
|
||||
value.incrementAndGet();
|
||||
}
|
||||
private int getValue()
|
||||
{
|
||||
return value.get();
|
||||
}
|
||||
}
|
||||
private volatile boolean check = false;
|
||||
private volatile int trackerCycles;
|
||||
private long timeToStopIndexing;
|
||||
|
||||
private long lastGoodChangeSetCommitTimeInIndex;
|
||||
@@ -252,13 +237,13 @@ public class TrackerState
|
||||
|
||||
public int getTrackerCycles()
|
||||
{
|
||||
return this.trackerCycles.getValue();
|
||||
return this.trackerCycles;
|
||||
}
|
||||
|
||||
public synchronized void incrementTrackerCycles()
|
||||
{
|
||||
log.debug("incrementTrackerCycles from :" + trackerCycles);
|
||||
this.trackerCycles.increase();
|
||||
this.trackerCycles++;
|
||||
log.debug("incremented TrackerCycles to :" + trackerCycles);
|
||||
}
|
||||
|
||||
|
@@ -38,7 +38,6 @@ import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
@@ -742,7 +741,7 @@ public class SOLRAPIClient
|
||||
String localeStr = o.has("locale") && !o.isNull("locale") ? o.getString("locale") : null;
|
||||
Locale locale = (o.has("locale") && !o.isNull("locale") ? deserializer.deserializeValue(Locale.class, localeStr) : null);
|
||||
|
||||
long size = o.has("size") && !o.isNull("size") ? o.getLong("size") : 0;
|
||||
Long size = o.has("size") && !o.isNull("size") ? o.getLong("size") : null;
|
||||
|
||||
String encoding = o.has("encoding") && !o.isNull("encoding") ? o.getString("encoding") : null;
|
||||
String mimetype = o.has("mimetype") && !o.isNull("mimetype") ? o.getString("mimetype") : null;
|
||||
@@ -1248,13 +1247,17 @@ public class SOLRAPIClient
|
||||
this.namespaceDAO = namespaceDAO;
|
||||
|
||||
// add all default converters to this converter
|
||||
for (Entry<Class<?>, Map<Class<?>, Converter<?, ?>>> source : DefaultTypeConverter.INSTANCE.getConverters().entrySet())
|
||||
{
|
||||
for (Entry<Class<?>, Converter<?, ?>> dest : source.getValue().entrySet())
|
||||
{
|
||||
instance.addConverter((Class) source.getKey(), (Class) dest.getKey(), dest.getValue());
|
||||
}
|
||||
}
|
||||
// TODO find a better way of doing this
|
||||
Map<Class<?>, Map<Class<?>, Converter<?,?>>> converters = DefaultTypeConverter.INSTANCE.getConverters();
|
||||
for(Class source : converters.keySet())
|
||||
{
|
||||
Map<Class<?>, Converter<?,?>> converters1 = converters.get(source);
|
||||
for(Class dest : converters1.keySet())
|
||||
{
|
||||
Converter<?,?> converter = converters1.get(dest);
|
||||
instance.addConverter(source, dest, converter);
|
||||
}
|
||||
}
|
||||
|
||||
// dates
|
||||
instance.addConverter(String.class, Date.class, new TypeConverter.Converter<String, Date>()
|
||||
|
@@ -29,12 +29,9 @@ import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.alfresco.solr.InformationServerCollectionProvider;
|
||||
import org.alfresco.solr.adapters.ISimpleOrderedMap;
|
||||
import org.alfresco.util.Pair;
|
||||
@@ -286,10 +283,11 @@ public class TrackerStats
|
||||
map.add("StdDev", getStandardDeviation());
|
||||
if (incdludeDetail)
|
||||
{
|
||||
for (Entry<String, IncrementalStats> copy : copies.entrySet())
|
||||
{
|
||||
map.add(copy.getKey(), copy.getValue().getNamedList(includeHist, includeValues));
|
||||
}
|
||||
for (String key : copies.keySet())
|
||||
{
|
||||
IncrementalStats value = copies.get(key);
|
||||
map.add(key, value.getNamedList(includeHist, includeValues));
|
||||
}
|
||||
}
|
||||
|
||||
return map;
|
||||
@@ -384,7 +382,6 @@ public class TrackerStats
|
||||
|
||||
}
|
||||
|
||||
@NotThreadSafe
|
||||
public static class IncrementalStats
|
||||
{
|
||||
Date start = new Date();
|
||||
@@ -772,7 +769,7 @@ public class TrackerStats
|
||||
{
|
||||
IncrementalStats copy = new IncrementalStats(this.scale, this.buckets, this.server);
|
||||
copy.start = this.start;
|
||||
copy.max = this.getMax();
|
||||
copy.max = this.max;
|
||||
copy.min = this.min;
|
||||
copy.moments[0] = this.moments[0];
|
||||
copy.moments[1] = this.moments[1];
|
||||
|
Reference in New Issue
Block a user