mirror of
https://github.com/Alfresco/SearchServices.git
synced 2025-09-10 14:11:25 +00:00
Fix/acs 4061 (#1584)
- Update lucene-solr version to 6.6.5-patched.11 - Fix vulnerabilities in the admin console
This commit is contained in:
2
pom.xml
2
pom.xml
@@ -43,7 +43,7 @@
|
||||
<properties>
|
||||
<maven.build.sourceVersion>11</maven.build.sourceVersion>
|
||||
<solr.base.version>6.6.5</solr.base.version>
|
||||
<solr.version>${solr.base.version}-patched.9</solr.version>
|
||||
<solr.version>${solr.base.version}-patched.11</solr.version>
|
||||
<!-- The location to download the solr zip file from. -->
|
||||
<!-- <solr.zip>https://archive.apache.org/dist/lucene/solr/${solr.version}/solr-${solr.version}.zip</solr.zip> -->
|
||||
<!-- Solr startup scripts do not work with any Java version higher than 9 so the scripts have been patched -->
|
||||
|
@@ -1,297 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Search Services
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.apache.solr.handler.component;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.google.common.base.Objects;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.common.params.HighlightParams;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||
import org.apache.solr.core.PluginInfo;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.highlight.DefaultSolrHighlighter;
|
||||
import org.apache.solr.highlight.PostingsSolrHighlighter;
|
||||
import org.apache.solr.highlight.SolrHighlighter;
|
||||
import org.apache.solr.highlight.UnifiedSolrHighlighter;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.search.QParser;
|
||||
import org.apache.solr.search.QParserPlugin;
|
||||
import org.apache.solr.search.QueryParsing;
|
||||
import org.apache.solr.search.SyntaxError;
|
||||
import org.apache.solr.util.SolrPluginUtils;
|
||||
import org.apache.solr.util.plugin.PluginInfoInitialized;
|
||||
import org.apache.solr.util.plugin.SolrCoreAware;
|
||||
|
||||
import static java.util.Optional.ofNullable;
|
||||
import static java.util.stream.Collectors.toMap;
|
||||
|
||||
/**
|
||||
* TODO!
|
||||
*
|
||||
*
|
||||
* @since solr 1.3
|
||||
*/
|
||||
public class HighlightComponent extends SearchComponent implements PluginInfoInitialized, SolrCoreAware
|
||||
{
|
||||
public enum HighlightMethod {
|
||||
UNIFIED("unified"),
|
||||
FAST_VECTOR("fastVector"),
|
||||
POSTINGS("postings"),
|
||||
ORIGINAL("original");
|
||||
|
||||
private static final Map<String, HighlightMethod> METHODS = Collections.unmodifiableMap(Stream.of(values())
|
||||
.collect(toMap(HighlightMethod::getMethodName, Function.identity())));
|
||||
|
||||
private final String methodName;
|
||||
|
||||
HighlightMethod(String method) {
|
||||
this.methodName = method;
|
||||
}
|
||||
|
||||
public String getMethodName() {
|
||||
return methodName;
|
||||
}
|
||||
|
||||
public static HighlightMethod parse(String method) {
|
||||
return METHODS.get(method);
|
||||
}
|
||||
}
|
||||
|
||||
public static final String COMPONENT_NAME = "highlight";
|
||||
|
||||
private PluginInfo info = PluginInfo.EMPTY_INFO;
|
||||
|
||||
@Deprecated // DWS: in 7.0 lets restructure the abstractions/relationships
|
||||
private SolrHighlighter solrConfigHighlighter;
|
||||
|
||||
/**
|
||||
* @deprecated instead depend on {@link #process(ResponseBuilder)} to choose the highlighter based on
|
||||
* {@link HighlightParams#METHOD}
|
||||
*/
|
||||
@Deprecated
|
||||
public static SolrHighlighter getHighlighter(SolrCore core) {
|
||||
HighlightComponent hl = (HighlightComponent) core.getSearchComponents().get(HighlightComponent.COMPONENT_NAME);
|
||||
return hl==null ? null: hl.getHighlighter();
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public SolrHighlighter getHighlighter() {
|
||||
return solrConfigHighlighter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init(PluginInfo info) {
|
||||
this.info = info;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void prepare(ResponseBuilder rb) throws IOException {
|
||||
SolrParams params = rb.req.getParams();
|
||||
rb.doHighlights = solrConfigHighlighter.isHighlightingEnabled(params);
|
||||
if(rb.doHighlights){
|
||||
rb.setNeedDocList(true);
|
||||
String hlq = params.get(HighlightParams.Q);
|
||||
String hlparser =
|
||||
ofNullable(params.get(HighlightParams.QPARSER))
|
||||
.orElseGet( () -> params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE));
|
||||
if(hlq != null){
|
||||
try {
|
||||
QParser parser = QParser.getParser(hlq, hlparser, rb.req);
|
||||
rb.setHighlightQuery(parser.getHighlightQuery());
|
||||
} catch (SyntaxError e) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void inform(SolrCore core) {
|
||||
List<PluginInfo> children = info.getChildren("highlighting");
|
||||
if(children.isEmpty()) {
|
||||
PluginInfo pluginInfo = core.getSolrConfig().getPluginInfo(SolrHighlighter.class.getName()); //TODO deprecated configuration remove later
|
||||
if (pluginInfo != null) {
|
||||
solrConfigHighlighter = core.createInitInstance(pluginInfo, SolrHighlighter.class, null, DefaultSolrHighlighter.class.getName());
|
||||
} else {
|
||||
DefaultSolrHighlighter defHighlighter = new DefaultSolrHighlighter(core);
|
||||
defHighlighter.init(PluginInfo.EMPTY_INFO);
|
||||
solrConfigHighlighter = defHighlighter;
|
||||
}
|
||||
} else {
|
||||
solrConfigHighlighter = core.createInitInstance(children.get(0),SolrHighlighter.class,null, DefaultSolrHighlighter.class.getName());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(ResponseBuilder rb) throws IOException {
|
||||
|
||||
if (rb.doHighlights) {
|
||||
SolrQueryRequest req = rb.req;
|
||||
SolrParams params = req.getParams();
|
||||
|
||||
SolrHighlighter highlighter = getHighlighter(params);
|
||||
|
||||
String[] defaultHighlightFields; //TODO: get from builder by default?
|
||||
if (rb.getQparser() != null) {
|
||||
defaultHighlightFields = rb.getQparser().getDefaultHighlightFields();
|
||||
} else {
|
||||
defaultHighlightFields = params.getParams(CommonParams.DF);
|
||||
}
|
||||
|
||||
Query highlightQuery = rb.getHighlightQuery();
|
||||
if(highlightQuery==null) {
|
||||
if (rb.getQparser() != null) {
|
||||
try {
|
||||
highlightQuery = rb.getQparser().getHighlightQuery();
|
||||
rb.setHighlightQuery( highlightQuery );
|
||||
} catch (Exception e) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
|
||||
}
|
||||
} else {
|
||||
highlightQuery = rb.getQuery();
|
||||
rb.setHighlightQuery( highlightQuery );
|
||||
}
|
||||
}
|
||||
|
||||
// No highlighting if there is no query -- consider q.alt=*:*
|
||||
if( highlightQuery != null ) {
|
||||
NamedList sumData = highlighter.doHighlighting(
|
||||
rb.getResults().docList,
|
||||
highlightQuery,
|
||||
req, defaultHighlightFields );
|
||||
|
||||
if(sumData != null) {
|
||||
// TODO ???? add this directly to the response?
|
||||
rb.rsp.add("highlighting", sumData);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected SolrHighlighter getHighlighter(SolrParams params) {
|
||||
HighlightMethod method = HighlightMethod.parse(params.get(HighlightParams.METHOD));
|
||||
if (method == null) {
|
||||
return solrConfigHighlighter;
|
||||
}
|
||||
|
||||
switch (method) {
|
||||
case UNIFIED:
|
||||
if (solrConfigHighlighter instanceof UnifiedSolrHighlighter) {
|
||||
return solrConfigHighlighter;
|
||||
}
|
||||
return new UnifiedSolrHighlighter(); // TODO cache one?
|
||||
case POSTINGS:
|
||||
if (solrConfigHighlighter instanceof PostingsSolrHighlighter) {
|
||||
return solrConfigHighlighter;
|
||||
}
|
||||
return new PostingsSolrHighlighter(); // TODO cache one?
|
||||
case FAST_VECTOR: // fall-through
|
||||
case ORIGINAL:
|
||||
if (solrConfigHighlighter instanceof DefaultSolrHighlighter) {
|
||||
return solrConfigHighlighter;
|
||||
} else {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
|
||||
"In order to use " + HighlightParams.METHOD + "=" + method.getMethodName() + " the configured" +
|
||||
" highlighter in solrconfig must be " + DefaultSolrHighlighter.class);
|
||||
}
|
||||
default: throw new AssertionError();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void modifyRequest(ResponseBuilder rb, SearchComponent who, ShardRequest sreq) {
|
||||
if (!rb.doHighlights) return;
|
||||
|
||||
// Turn on highlighting only only when retrieving fields
|
||||
if ((sreq.purpose & ShardRequest.PURPOSE_GET_FIELDS) != 0) {
|
||||
sreq.purpose |= ShardRequest.PURPOSE_GET_HIGHLIGHTS;
|
||||
// should already be true...
|
||||
sreq.params.set(HighlightParams.HIGHLIGHT, "true");
|
||||
} else {
|
||||
sreq.params.set(HighlightParams.HIGHLIGHT, "false");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleResponses(ResponseBuilder rb, ShardRequest sreq) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void finishStage(ResponseBuilder rb) {
|
||||
if (rb.doHighlights && rb.stage == ResponseBuilder.STAGE_GET_FIELDS) {
|
||||
|
||||
NamedList.NamedListEntry[] arr = new NamedList.NamedListEntry[rb.resultIds.size()];
|
||||
|
||||
// TODO: make a generic routine to do automatic merging of id keyed data
|
||||
for (ShardRequest sreq : rb.finished) {
|
||||
if ((sreq.purpose & ShardRequest.PURPOSE_GET_HIGHLIGHTS) == 0) continue;
|
||||
for (ShardResponse srsp : sreq.responses) {
|
||||
if (srsp.getException() != null) {
|
||||
// can't expect the highlight content if there was an exception for this request
|
||||
// this should only happen when using shards.tolerant=true
|
||||
continue;
|
||||
}
|
||||
NamedList hl = (NamedList)srsp.getSolrResponse().getResponse().get("highlighting");
|
||||
SolrPluginUtils.copyNamedListIntoArrayByDocPosInResponse(hl, rb.resultIds, arr);
|
||||
}
|
||||
}
|
||||
|
||||
// remove nulls in case not all docs were able to be retrieved
|
||||
rb.rsp.add("highlighting", SolrPluginUtils.removeNulls(arr, new SimpleOrderedMap<>()));
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////
|
||||
/// SolrInfoMBean
|
||||
////////////////////////////////////////////
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "Highlighting";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Category getCategory() {
|
||||
return Category.HIGHLIGHTER;
|
||||
}
|
||||
|
||||
@Override
|
||||
public URL[] getDocs() {
|
||||
return null;
|
||||
}
|
||||
}
|
@@ -1,935 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Search Services
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.apache.solr.handler.component;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.*;
|
||||
import java.nio.ByteBuffer;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.queries.function.FunctionValues;
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.solr.common.EnumFieldValue;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||
import org.apache.solr.handler.component.StatsField.Stat;
|
||||
import org.apache.solr.schema.*;
|
||||
|
||||
import com.tdunning.math.stats.AVLTreeDigest;
|
||||
import com.google.common.hash.HashFunction;
|
||||
|
||||
import org.apache.solr.util.hll.HLL;
|
||||
import org.apache.solr.util.hll.HLLType;
|
||||
|
||||
/**
|
||||
* Factory class for creating instance of
|
||||
* {@link org.apache.solr.handler.component.StatsValues}
|
||||
*/
|
||||
public class StatsValuesFactory {
|
||||
|
||||
/**
|
||||
* Creates an instance of StatsValues which supports values from the specified
|
||||
* {@link StatsField}
|
||||
*
|
||||
* @param statsField
|
||||
* {@link StatsField} whose statistics will be created by the
|
||||
* resulting {@link StatsValues}
|
||||
* @return Instance of {@link StatsValues} that will create statistics from
|
||||
* values from the specified {@link StatsField}
|
||||
*/
|
||||
public static StatsValues createStatsValues(StatsField statsField) {
|
||||
|
||||
final SchemaField sf = statsField.getSchemaField();
|
||||
|
||||
if (null == sf) {
|
||||
// function stats
|
||||
return new NumericStatsValues(statsField);
|
||||
}
|
||||
|
||||
final FieldType fieldType = sf.getType(); // TODO: allow FieldType to provide impl.
|
||||
|
||||
if (TrieDateField.class.isInstance(fieldType) || DatePointField.class.isInstance(fieldType)) {
|
||||
DateStatsValues statsValues = new DateStatsValues(statsField);
|
||||
if (sf.multiValued()) {
|
||||
return new SortedDateStatsValues(statsValues, statsField);
|
||||
}
|
||||
return statsValues;
|
||||
} else if (TrieField.class.isInstance(fieldType) || PointField.class.isInstance(fieldType)) {
|
||||
|
||||
NumericStatsValues statsValue = new NumericStatsValues(statsField);
|
||||
if (sf.multiValued()) {
|
||||
return new SortedNumericStatsValues(statsValue, statsField);
|
||||
}
|
||||
return statsValue;
|
||||
} else if (StrField.class.isInstance(fieldType)) {
|
||||
return new StringStatsValues(statsField);
|
||||
} else if (sf.getType().getClass().equals(EnumField.class)) {
|
||||
return new EnumStatsValues(statsField);
|
||||
} else {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
|
||||
"Field type " + fieldType + " is not currently supported");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Abstract implementation of
|
||||
* {@link org.apache.solr.handler.component.StatsValues} that provides the
|
||||
* default behavior for most StatsValues implementations.
|
||||
*
|
||||
* There are very few requirements placed on what statistics concrete
|
||||
* implementations should collect, with the only required statistics being the
|
||||
* minimum and maximum values.
|
||||
*/
|
||||
abstract class AbstractStatsValues<T> implements StatsValues {
|
||||
private static final String FACETS = "facets";
|
||||
|
||||
/** Tracks all data about tthe stats we need to collect */
|
||||
final protected StatsField statsField;
|
||||
|
||||
/** may be null if we are collecting stats directly from a function ValueSource */
|
||||
final protected SchemaField sf;
|
||||
/**
|
||||
* may be null if we are collecting stats directly from a function ValueSource
|
||||
*/
|
||||
final protected FieldType ft;
|
||||
|
||||
// final booleans from StatsField to allow better inlining & JIT optimizing
|
||||
final protected boolean computeCount;
|
||||
final protected boolean computeMissing;
|
||||
final protected boolean computeCalcDistinct; // needed for either countDistinct or distinctValues
|
||||
final protected boolean computeMin;
|
||||
final protected boolean computeMax;
|
||||
final protected boolean computeMinOrMax;
|
||||
final protected boolean computeCardinality;
|
||||
|
||||
/**
|
||||
* Either a function value source to collect from, or the ValueSource associated
|
||||
* with a single valued field we are collecting from. Will be null until/unless
|
||||
* {@link #setNextReader} is called at least once
|
||||
*/
|
||||
private ValueSource valueSource;
|
||||
/**
|
||||
* Context to use when retrieving FunctionValues, will be null until/unless
|
||||
* {@link #setNextReader} is called at least once
|
||||
*/
|
||||
private Map vsContext;
|
||||
/**
|
||||
* Values to collect, will be null until/unless {@link #setNextReader} is
|
||||
* called at least once
|
||||
*/
|
||||
protected FunctionValues values;
|
||||
|
||||
protected T max;
|
||||
protected T min;
|
||||
protected long missing;
|
||||
protected long count;
|
||||
protected long countDistinct;
|
||||
protected final Set<T> distinctValues;
|
||||
|
||||
/**
|
||||
* Hash function that must be used by implementations of {@link #hash}
|
||||
*/
|
||||
protected final HashFunction hasher;
|
||||
// if null, no HLL logic can be computed; not final because of "union" optimization (see below)
|
||||
private HLL hll;
|
||||
|
||||
// facetField facetValue
|
||||
protected Map<String,Map<String, StatsValues>> facets = new HashMap<>();
|
||||
|
||||
protected AbstractStatsValues(StatsField statsField) {
|
||||
this.statsField = statsField;
|
||||
this.computeCount = statsField.calculateStats(Stat.count);
|
||||
this.computeMissing = statsField.calculateStats(Stat.missing);
|
||||
this.computeCalcDistinct = statsField.calculateStats(Stat.countDistinct)
|
||||
|| statsField.calculateStats(Stat.distinctValues);
|
||||
this.computeMin = statsField.calculateStats(Stat.min);
|
||||
this.computeMax = statsField.calculateStats(Stat.max);
|
||||
this.computeMinOrMax = computeMin || computeMax;
|
||||
|
||||
this.distinctValues = computeCalcDistinct ? new TreeSet<>() : null;
|
||||
|
||||
this.computeCardinality = statsField.calculateStats(Stat.cardinality);
|
||||
if ( computeCardinality ) {
|
||||
|
||||
hasher = statsField.getHllOptions().getHasher();
|
||||
hll = statsField.getHllOptions().newHLL();
|
||||
assert null != hll : "Cardinality requires an HLL";
|
||||
} else {
|
||||
hll = null;
|
||||
hasher = null;
|
||||
}
|
||||
|
||||
// alternatively, we could refactor a common base class that doesn't know/care
|
||||
// about either SchemaField or ValueSource - but then there would be a lot of
|
||||
// duplicate code between "NumericSchemaFieldStatsValues" and
|
||||
// "NumericValueSourceStatsValues" which would have diff parent classes
|
||||
//
|
||||
// part of the complexity here being that the StatsValues API serves two
|
||||
// masters: collecting concrete Values from things like DocValuesStats and
|
||||
// the distributed aggregation logic, but also collecting docIds which it
|
||||
// then
|
||||
// uses to go out and pull concreate values from the ValueSource
|
||||
// (from a func, or single valued field)
|
||||
if (null != statsField.getSchemaField()) {
|
||||
assert null == statsField.getValueSource();
|
||||
this.sf = statsField.getSchemaField();
|
||||
this.ft = sf.getType();
|
||||
} else {
|
||||
assert null != statsField.getValueSource();
|
||||
assert null == statsField.getSchemaField();
|
||||
this.sf = null;
|
||||
this.ft = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void accumulate(NamedList stv) {
|
||||
if (computeCount) {
|
||||
count += (Long) stv.get("count");
|
||||
}
|
||||
if (computeMissing) {
|
||||
missing += (Long) stv.get("missing");
|
||||
}
|
||||
if (computeCalcDistinct) {
|
||||
distinctValues.addAll((Collection<T>) stv.get("distinctValues"));
|
||||
countDistinct = distinctValues.size();
|
||||
}
|
||||
|
||||
if (computeMinOrMax) {
|
||||
updateMinMax((T) stv.get("min"), (T) stv.get("max"));
|
||||
}
|
||||
|
||||
if (computeCardinality) {
|
||||
byte[] data = (byte[]) stv.get("cardinality");
|
||||
HLL other = HLL.fromBytes(data);
|
||||
if (hll.getType().equals(HLLType.EMPTY)) {
|
||||
// The HLL.union method goes out of it's way not to modify the "other" HLL.
|
||||
// Which means in the case of merging into an "EMPTY" HLL (garunteed to happen at
|
||||
// least once in every coordination of shard requests) it always clones all
|
||||
// of the internal storage -- but since we're going to throw "other" away after
|
||||
// the merge, this just means a short term doubling of RAM that we can skip.
|
||||
hll = other;
|
||||
} else {
|
||||
hll.union(other);
|
||||
}
|
||||
}
|
||||
|
||||
updateTypeSpecificStats(stv);
|
||||
|
||||
NamedList f = (NamedList) stv.get(FACETS);
|
||||
if (f == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < f.size(); i++) {
|
||||
String field = f.getName(i);
|
||||
NamedList vals = (NamedList) f.getVal(i);
|
||||
Map<String, StatsValues> addTo = facets.get(field);
|
||||
if (addTo == null) {
|
||||
addTo = new HashMap<>();
|
||||
facets.put(field, addTo);
|
||||
}
|
||||
for (int j = 0; j < vals.size(); j++) {
|
||||
String val = vals.getName(j);
|
||||
StatsValues vvals = addTo.get(val);
|
||||
if (vvals == null) {
|
||||
vvals = StatsValuesFactory.createStatsValues(statsField);
|
||||
addTo.put(val, vvals);
|
||||
}
|
||||
vvals.accumulate((NamedList) vals.getVal(j));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void accumulate(BytesRef value, int count) {
|
||||
if (null == ft) {
|
||||
throw new IllegalStateException(
|
||||
"Can't collect & convert BytesRefs on stats that do't use a a FieldType: "
|
||||
+ statsField);
|
||||
}
|
||||
T typedValue = (T) ft.toObject(sf, value);
|
||||
accumulate(typedValue, count);
|
||||
}
|
||||
|
||||
public void accumulate(T value, int count) {
|
||||
assert null != value : "Can't accumulate null";
|
||||
|
||||
if (computeCount) {
|
||||
this.count += count;
|
||||
}
|
||||
if (computeCalcDistinct) {
|
||||
distinctValues.add(value);
|
||||
countDistinct = distinctValues.size();
|
||||
}
|
||||
if (computeMinOrMax) {
|
||||
updateMinMax(value, value);
|
||||
}
|
||||
if (computeCardinality) {
|
||||
if (null == hasher) {
|
||||
assert value instanceof Number : "pre-hashed value support only works with numeric longs";
|
||||
hll.addRaw(((Number)value).longValue());
|
||||
} else {
|
||||
hll.addRaw(hash(value));
|
||||
}
|
||||
}
|
||||
updateTypeSpecificStats(value, count);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void missing() {
|
||||
if (computeMissing) {
|
||||
missing++;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void addMissing(int count) {
|
||||
missing += count;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void addFacet(String facetName, Map<String, StatsValues> facetValues) {
|
||||
facets.put(facetName, facetValues);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public NamedList<?> getStatsValues() {
|
||||
NamedList<Object> res = new SimpleOrderedMap<>();
|
||||
|
||||
if (statsField.includeInResponse(Stat.min)) {
|
||||
res.add("min", min);
|
||||
}
|
||||
if (statsField.includeInResponse(Stat.max)) {
|
||||
res.add("max", max);
|
||||
}
|
||||
if (statsField.includeInResponse(Stat.count)) {
|
||||
res.add("count", count);
|
||||
}
|
||||
if (statsField.includeInResponse(Stat.missing)) {
|
||||
res.add("missing", missing);
|
||||
}
|
||||
if (statsField.includeInResponse(Stat.distinctValues)) {
|
||||
res.add("distinctValues", distinctValues);
|
||||
}
|
||||
if (statsField.includeInResponse(Stat.countDistinct)) {
|
||||
res.add("countDistinct", countDistinct);
|
||||
}
|
||||
if (statsField.includeInResponse(Stat.cardinality)) {
|
||||
if (statsField.getIsShard()) {
|
||||
res.add("cardinality", hll.toBytes());
|
||||
} else {
|
||||
res.add("cardinality", hll.cardinality());
|
||||
}
|
||||
}
|
||||
|
||||
addTypeSpecificStats(res);
|
||||
|
||||
if (!facets.isEmpty()) {
|
||||
|
||||
// add the facet stats
|
||||
NamedList<NamedList<?>> nl = new SimpleOrderedMap<>();
|
||||
for (Map.Entry<String,Map<String,StatsValues>> entry : facets.entrySet()) {
|
||||
NamedList<NamedList<?>> nl2 = new SimpleOrderedMap<>();
|
||||
nl.add(entry.getKey(), nl2);
|
||||
for (Map.Entry<String,StatsValues> e2 : entry.getValue().entrySet()) {
|
||||
nl2.add(e2.getKey(), e2.getValue().getStatsValues());
|
||||
}
|
||||
}
|
||||
|
||||
res.add(FACETS, nl);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public void setNextReader(LeafReaderContext ctx) throws IOException {
|
||||
if (valueSource == null) {
|
||||
// first time we've collected local values, get the right ValueSource
|
||||
valueSource = (null == ft)
|
||||
? statsField.getValueSource()
|
||||
: ft.getValueSource(sf, null);
|
||||
vsContext = ValueSource.newContext(statsField.getSearcher());
|
||||
}
|
||||
values = valueSource.getValues(vsContext, ctx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Hash function to be used for computing cardinality.
|
||||
*
|
||||
* This method will not be called in cases where the user has indicated the values
|
||||
* are already hashed. If this method is called, then {@link #hasher} will be non-null,
|
||||
* and should be used to generate the appropriate hash value.
|
||||
*
|
||||
* @see Stat#cardinality
|
||||
* @see #hasher
|
||||
*/
|
||||
protected abstract long hash(T value);
|
||||
|
||||
/**
|
||||
* Updates the minimum and maximum statistics based on the given values
|
||||
*
|
||||
* @param min
|
||||
* Value that the current minimum should be updated against
|
||||
* @param max
|
||||
* Value that the current maximum should be updated against
|
||||
*/
|
||||
protected abstract void updateMinMax(T min, T max);
|
||||
|
||||
/**
|
||||
* Updates the type specific statistics based on the given value
|
||||
*
|
||||
* @param value
|
||||
* Value the statistics should be updated against
|
||||
* @param count
|
||||
* Number of times the value is being accumulated
|
||||
*/
|
||||
protected abstract void updateTypeSpecificStats(T value, int count);
|
||||
|
||||
/**
|
||||
* Updates the type specific statistics based on the values in the given list
|
||||
*
|
||||
* @param stv
|
||||
* List containing values the current statistics should be updated
|
||||
* against
|
||||
*/
|
||||
protected abstract void updateTypeSpecificStats(NamedList stv);
|
||||
|
||||
/**
|
||||
* Add any type specific statistics to the given NamedList
|
||||
*
|
||||
* @param res
|
||||
* NamedList to add the type specific statistics too
|
||||
*/
|
||||
protected abstract void addTypeSpecificStats(NamedList<Object> res);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implementation of StatsValues that supports Double values
|
||||
*/
|
||||
class NumericStatsValues extends AbstractStatsValues<Number> {
|
||||
|
||||
double sum;
|
||||
double sumOfSquares;
|
||||
|
||||
AVLTreeDigest tdigest;
|
||||
|
||||
double minD; // perf optimization, only valid if (null != this.min)
|
||||
double maxD; // perf optimization, only valid if (null != this.max)
|
||||
|
||||
final protected boolean computeSum;
|
||||
final protected boolean computeSumOfSquares;
|
||||
final protected boolean computePercentiles;
|
||||
|
||||
public NumericStatsValues(StatsField statsField) {
|
||||
super(statsField);
|
||||
|
||||
this.computeSum = statsField.calculateStats(Stat.sum);
|
||||
this.computeSumOfSquares = statsField.calculateStats(Stat.sumOfSquares);
|
||||
|
||||
this.computePercentiles = statsField.calculateStats(Stat.percentiles);
|
||||
if ( computePercentiles ) {
|
||||
tdigest = new AVLTreeDigest(statsField.getTdigestCompression());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public long hash(Number v) {
|
||||
// have to use a bit of reflection to ensure good hash values since
|
||||
// we don't have truely type specific stats
|
||||
if (v instanceof Long) {
|
||||
return hasher.hashLong(v.longValue()).asLong();
|
||||
} else if (v instanceof Integer) {
|
||||
return hasher.hashInt(v.intValue()).asLong();
|
||||
} else if (v instanceof Double) {
|
||||
return hasher.hashLong(Double.doubleToRawLongBits(v.doubleValue())).asLong();
|
||||
} else if (v instanceof Float) {
|
||||
return hasher.hashInt(Float.floatToRawIntBits(v.floatValue())).asLong();
|
||||
} else if (v instanceof Byte) {
|
||||
return hasher.newHasher().putByte(v.byteValue()).hash().asLong();
|
||||
} else if (v instanceof Short) {
|
||||
return hasher.newHasher().putShort(v.shortValue()).hash().asLong();
|
||||
}
|
||||
// else...
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
|
||||
"Unsupported Numeric Type ("+v.getClass()+") for hashing: " +statsField);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accumulate(int docID) {
|
||||
if (values.exists(docID)) {
|
||||
Number value = (Number) values.objectVal(docID);
|
||||
accumulate(value, 1);
|
||||
} else {
|
||||
missing();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void updateTypeSpecificStats(NamedList stv) {
|
||||
if (computeSum) {
|
||||
sum += ((Number) stv.get("sum")).doubleValue();
|
||||
}
|
||||
if (computeSumOfSquares) {
|
||||
sumOfSquares += ((Number) stv.get("sumOfSquares")).doubleValue();
|
||||
}
|
||||
|
||||
if (computePercentiles) {
|
||||
byte[] data = (byte[]) stv.get("percentiles");
|
||||
ByteBuffer buf = ByteBuffer.wrap(data);
|
||||
tdigest.add(AVLTreeDigest.fromBytes(buf));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void updateTypeSpecificStats(Number v, int count) {
|
||||
double value = v.doubleValue();
|
||||
if (computeSumOfSquares) {
|
||||
sumOfSquares += (value * value * count); // for std deviation
|
||||
}
|
||||
if (computeSum) {
|
||||
sum += value * count;
|
||||
}
|
||||
if (computePercentiles) {
|
||||
tdigest.add(value, count);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
protected void updateMinMax(Number min, Number max) {
|
||||
// we always use the double values, because that way the response Object class is
|
||||
// consistent regardless of whether we only have 1 value or many that we min/max
|
||||
//
|
||||
// TODO: would be nice to have subclasses for each type of Number ... breaks backcompat
|
||||
|
||||
if (computeMin) { // nested if to encourage JIT to optimize aware final var?
|
||||
if (null != min) {
|
||||
double minD = min.doubleValue();
|
||||
if (null == this.min || minD < this.minD) {
|
||||
// Double for result & cached primitive double to minimize unboxing in future comparisons
|
||||
this.min = this.minD = minD;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (computeMax) { // nested if to encourage JIT to optimize aware final var?
|
||||
if (null != max) {
|
||||
double maxD = max.doubleValue();
|
||||
if (null == this.max || this.maxD < maxD) {
|
||||
// Double for result & cached primitive double to minimize unboxing in future comparisons
|
||||
this.max = this.maxD = maxD;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds sum, sumOfSquares, mean, stddev, and percentiles to the given
|
||||
* NamedList
|
||||
*
|
||||
* @param res
|
||||
* NamedList to add the type specific statistics too
|
||||
*/
|
||||
@Override
|
||||
protected void addTypeSpecificStats(NamedList<Object> res) {
|
||||
if (statsField.includeInResponse(Stat.sum)) {
|
||||
res.add("sum", sum);
|
||||
}
|
||||
if (statsField.includeInResponse(Stat.sumOfSquares)) {
|
||||
res.add("sumOfSquares", sumOfSquares);
|
||||
}
|
||||
if (statsField.includeInResponse(Stat.mean)) {
|
||||
res.add("mean", sum / count);
|
||||
}
|
||||
if (statsField.includeInResponse(Stat.stddev)) {
|
||||
res.add("stddev", getStandardDeviation());
|
||||
}
|
||||
if (statsField.includeInResponse(Stat.percentiles)) {
|
||||
if (statsField.getIsShard()) {
|
||||
// as of current t-digest version, smallByteSize() internally does a full conversion in
|
||||
// order to determine what the size is (can't be precomputed?) .. so rather then
|
||||
// serialize to a ByteBuffer twice, allocate the max possible size buffer,
|
||||
// serialize once, and then copy only the byte[] subset that we need, and free up the buffer
|
||||
ByteBuffer buf = ByteBuffer.allocate(tdigest.byteSize()); // upper bound
|
||||
tdigest.asSmallBytes(buf);
|
||||
res.add("percentiles", Arrays.copyOf(buf.array(), buf.position()) );
|
||||
} else {
|
||||
NamedList<Object> percentileNameList = new NamedList<Object>();
|
||||
for (Double percentile : statsField.getPercentilesList()) {
|
||||
// Empty document set case
|
||||
if (tdigest.size() == 0) {
|
||||
percentileNameList.add(percentile.toString(), null);
|
||||
} else {
|
||||
Double cutoff = tdigest.quantile(percentile / 100);
|
||||
percentileNameList.add(percentile.toString(), cutoff);
|
||||
}
|
||||
}
|
||||
res.add("percentiles", percentileNameList);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Calculates the standard deviation statistic
|
||||
*
|
||||
* @return Standard deviation statistic
|
||||
*/
|
||||
private double getStandardDeviation() {
|
||||
if (count <= 1.0D) {
|
||||
return 0.0D;
|
||||
}
|
||||
|
||||
return Math.sqrt(((count * sumOfSquares) - (sum * sum)) / (count * (count - 1.0D)));
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Implementation of StatsValues that supports EnumField values
|
||||
*/
|
||||
class EnumStatsValues extends AbstractStatsValues<EnumFieldValue> {
|
||||
|
||||
public EnumStatsValues(StatsField statsField) {
|
||||
super(statsField);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long hash(EnumFieldValue v) {
|
||||
return hasher.hashInt(v.toInt().intValue()).asLong();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void accumulate(int docID) {
|
||||
if (values.exists(docID)) {
|
||||
Integer intValue = (Integer) values.objectVal(docID);
|
||||
String stringValue = values.strVal(docID);
|
||||
EnumFieldValue enumFieldValue = new EnumFieldValue(intValue, stringValue);
|
||||
accumulate(enumFieldValue, 1);
|
||||
} else {
|
||||
missing();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
protected void updateMinMax(EnumFieldValue min, EnumFieldValue max) {
|
||||
if (computeMin) { // nested if to encourage JIT to optimize aware final var?
|
||||
if (null != min) {
|
||||
if (null == this.min || (min.compareTo(this.min) < 0)) {
|
||||
this.min = min;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (computeMax) { // nested if to encourage JIT to optimize aware final var?
|
||||
if (null != max) {
|
||||
if (null == this.max || (max.compareTo(this.max) > 0)) {
|
||||
this.max = max;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
protected void updateTypeSpecificStats(NamedList stv) {
|
||||
// No type specific stats
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
protected void updateTypeSpecificStats(EnumFieldValue value, int count) {
|
||||
// No type specific stats
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds no type specific statistics
|
||||
*/
|
||||
@Override
|
||||
protected void addTypeSpecificStats(NamedList<Object> res) {
|
||||
// Add no statistics
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* /** Implementation of StatsValues that supports Date values
|
||||
*/
|
||||
class DateStatsValues extends AbstractStatsValues<Date> {
|
||||
|
||||
private double sum = 0.0;
|
||||
double sumOfSquares = 0;
|
||||
|
||||
final protected boolean computeSum;
|
||||
final protected boolean computeSumOfSquares;
|
||||
|
||||
public DateStatsValues(StatsField statsField) {
|
||||
super(statsField);
|
||||
this.computeSum = statsField.calculateStats(Stat.sum);
|
||||
this.computeSumOfSquares = statsField.calculateStats(Stat.sumOfSquares);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long hash(Date v) {
|
||||
return hasher.hashLong(v.getTime()).asLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accumulate(int docID) {
|
||||
if (values.exists(docID)) {
|
||||
accumulate((Date) values.objectVal(docID), 1);
|
||||
} else {
|
||||
missing();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
protected void updateTypeSpecificStats(NamedList stv) {
|
||||
if (computeSum) {
|
||||
sum += ((Number) stv.get("sum")).doubleValue();
|
||||
}
|
||||
if (computeSumOfSquares) {
|
||||
sumOfSquares += ((Number) stv.get("sumOfSquares")).doubleValue();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void updateTypeSpecificStats(Date v, int count) {
|
||||
long value = v.getTime();
|
||||
if (computeSumOfSquares) {
|
||||
sumOfSquares += ((double)value * value * count); // for std deviation
|
||||
}
|
||||
if (computeSum) {
|
||||
sum += value * count;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
protected void updateMinMax(Date min, Date max) {
|
||||
if (computeMin) { // nested if to encourage JIT to optimize aware final var?
|
||||
if (null != min && (this.min==null || this.min.after(min))) {
|
||||
this.min = min;
|
||||
}
|
||||
}
|
||||
if (computeMax) { // nested if to encourage JIT to optimize aware final var?
|
||||
if (null != max && (this.max==null || this.max.before(max))) {
|
||||
this.max = max;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds sum and mean statistics to the given NamedList
|
||||
*
|
||||
* @param res
|
||||
* NamedList to add the type specific statistics too
|
||||
*/
|
||||
@Override
|
||||
protected void addTypeSpecificStats(NamedList<Object> res) {
|
||||
if (statsField.includeInResponse(Stat.sum)) {
|
||||
res.add("sum", sum);
|
||||
}
|
||||
if (statsField.includeInResponse(Stat.mean)) {
|
||||
res.add("mean", (count > 0) ? new Date((long)(sum / count)) : null);
|
||||
}
|
||||
if (statsField.includeInResponse(Stat.sumOfSquares)) {
|
||||
res.add("sumOfSquares", sumOfSquares);
|
||||
}
|
||||
if (statsField.includeInResponse(Stat.stddev)) {
|
||||
res.add("stddev", getStandardDeviation());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the standard deviation. For dates, this is really the MS
|
||||
* deviation
|
||||
*
|
||||
* @return Standard deviation statistic
|
||||
*/
|
||||
private double getStandardDeviation() {
|
||||
if (count <= 1) {
|
||||
return 0.0D;
|
||||
}
|
||||
return Math.sqrt(((count * sumOfSquares) - (sum * sum))
|
||||
/ (count * (count - 1.0D)));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Implementation of StatsValues that supports String values
|
||||
*/
|
||||
class StringStatsValues extends AbstractStatsValues<String> {
|
||||
|
||||
public StringStatsValues(StatsField statsField) {
|
||||
super(statsField);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long hash(String v) {
|
||||
return hasher.hashString(v, Charset.defaultCharset()).asLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accumulate(int docID) {
|
||||
if (values.exists(docID)) {
|
||||
String value = values.strVal(docID);
|
||||
if (value != null) {
|
||||
accumulate(value, 1);
|
||||
} else {
|
||||
missing();
|
||||
}
|
||||
} else {
|
||||
missing();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
protected void updateTypeSpecificStats(NamedList stv) {
|
||||
// No type specific stats
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
protected void updateTypeSpecificStats(String value, int count) {
|
||||
// No type specific stats
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
protected void updateMinMax(String min, String max) {
|
||||
if (computeMin) { // nested if to encourage JIT to optimize aware final var?
|
||||
this.min = min(this.min, min);
|
||||
}
|
||||
if (computeMax) { // nested if to encourage JIT to optimize aware final var?
|
||||
this.max = max(this.max, max);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds no type specific statistics
|
||||
*/
|
||||
@Override
|
||||
protected void addTypeSpecificStats(NamedList<Object> res) {
|
||||
// Add no statistics
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines which of the given Strings is the maximum, as computed by
|
||||
* {@link String#compareTo(String)}
|
||||
*
|
||||
* @param str1
|
||||
* String to compare against b
|
||||
* @param str2
|
||||
* String compared against a
|
||||
* @return str1 if it is considered greater by
|
||||
* {@link String#compareTo(String)}, str2 otherwise
|
||||
*/
|
||||
private static String max(String str1, String str2) {
|
||||
if (str1 == null) {
|
||||
return str2;
|
||||
} else if (str2 == null) {
|
||||
return str1;
|
||||
}
|
||||
return (str1.compareTo(str2) > 0) ? str1 : str2;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines which of the given Strings is the minimum, as computed by
|
||||
* {@link String#compareTo(String)}
|
||||
*
|
||||
* @param str1
|
||||
* String to compare against b
|
||||
* @param str2
|
||||
* String compared against a
|
||||
* @return str1 if it is considered less by {@link String#compareTo(String)},
|
||||
* str2 otherwise
|
||||
*/
|
||||
private static String min(String str1, String str2) {
|
||||
if (str1 == null) {
|
||||
return str2;
|
||||
} else if (str2 == null) {
|
||||
return str1;
|
||||
}
|
||||
return (str1.compareTo(str2) < 0) ? str1 : str2;
|
||||
}
|
||||
}
|
@@ -28,7 +28,7 @@ function getSummary() {
|
||||
{
|
||||
resultStr = resultStr + "<dt style=\"width: 45%;\">" + index + ": </dt><dd style=\"width: 45%;\">" + responseObj.Summary.alfresco[summaryList[index]] + "</dd>"
|
||||
}
|
||||
document.getElementById("summaryReportLink").innerHTML = "<a href=\"" + window.location.pathname + "admin/cores?action=SUMMARY&wt=xml\" target=\"new\">View full report (opens in a new window)</a>";
|
||||
document.getElementById("summaryReportLink").innerHTML = "<a href=\"" + encodeURI(window.location.pathname) + "admin/cores?action=SUMMARY&wt=xml\" target=\"new\">View full report (opens in a new window)</a>";
|
||||
document.getElementById("reportSummaryResults").innerHTML = resultStr;
|
||||
}
|
||||
}
|
||||
@@ -71,7 +71,7 @@ function getftsStatus() {
|
||||
var newVal = (ftsValues["New"]) ? (ftsValues["New"]) : 0;
|
||||
resultStr = resultStr + "<dt>FTS Status New: </dt><dd>" + newVal + "</dd>";
|
||||
|
||||
document.getElementById("ftsStatusReportLink").innerHTML = "<a href=\"" + window.location.pathname + "admin/cores?action=REPORT&wt=xml\" target=\"_blank\">View full report (opens in a new window)</a><br/>Note: The FTS status report can take some time to generate";
|
||||
document.getElementById("ftsStatusReportLink").innerHTML = "<a href=\"" + encodeURI(window.location.pathname) + "admin/cores?action=REPORT&wt=xml\" target=\"_blank\">View full report (opens in a new window)</a><br/>Note: The FTS status report can take some time to generate";
|
||||
|
||||
document.getElementById("reportFTSStatusResults").innerHTML = resultStr;
|
||||
}
|
||||
@@ -141,9 +141,9 @@ getftsStatus();
|
||||
<div>
|
||||
|
||||
<script type="text/javascript">
|
||||
document.getElementById("errorLink").innerHTML = "<a href=\"" + window.location.pathname + "alfresco/query?q=ERROR*&wt=xml\" target=\"_blank\">Solr Errors</a>";
|
||||
document.getElementById("exceptionMessageLink").innerHTML = "<a href=\"" + window.location.pathname + "alfresco/query?q=EXCEPTIONMESSAGE:*&wt=xml\" target=\"_blank\">Solr Exception Messages</a>";
|
||||
document.getElementById("exceptionStackLink").innerHTML = "<a href=\"" + window.location.pathname + "alfresco/query?q=EXCEPTIONSTACK:*\" target=\"_blank\">Solr Exceptions Stack</a>";
|
||||
document.getElementById("errorLink").innerHTML = "<a href=\"" + ncodeURI(window.location.pathname) + "alfresco/query?q=ERROR*&wt=xml\" target=\"_blank\">Solr Errors</a>";
|
||||
document.getElementById("exceptionMessageLink").innerHTML = "<a href=\"" + encodeURI(window.location.pathname) + "alfresco/query?q=EXCEPTIONMESSAGE:*&wt=xml\" target=\"_blank\">Solr Exception Messages</a>";
|
||||
document.getElementById("exceptionStackLink").innerHTML = "<a href=\"" + encodeURI(window.location.pathname) + "alfresco/query?q=EXCEPTIONSTACK:*\" target=\"_blank\">Solr Exceptions Stack</a>";
|
||||
</script>
|
||||
|
||||
|
||||
|
@@ -185,28 +185,28 @@ jsonic-1.2.7.jar
|
||||
jul-to-slf4j-1.7.7.jar
|
||||
juniversalchardet-1.0.3.jar
|
||||
langdetect-1.1-20120112.jar
|
||||
lucene-analyzers-common-6.6.5-patched.9.jar
|
||||
lucene-analyzers-icu-6.6.5-patched.9.jar
|
||||
lucene-analyzers-kuromoji-6.6.5-patched.9.jar
|
||||
lucene-analyzers-morfologik-6.6.5-patched.9.jar
|
||||
lucene-analyzers-phonetic-6.6.5-patched.9.jar
|
||||
lucene-analyzers-smartcn-6.6.5-patched.9.jar
|
||||
lucene-analyzers-stempel-6.6.5-patched.9.jar
|
||||
lucene-backward-codecs-6.6.5-patched.9.jar
|
||||
lucene-classification-6.6.5-patched.9.jar
|
||||
lucene-codecs-6.6.5-patched.9.jar
|
||||
lucene-core-6.6.5-patched.9.jar
|
||||
lucene-expressions-6.6.5-patched.9.jar
|
||||
lucene-grouping-6.6.5-patched.9.jar
|
||||
lucene-highlighter-6.6.5-patched.9.jar
|
||||
lucene-join-6.6.5-patched.9.jar
|
||||
lucene-memory-6.6.5-patched.9.jar
|
||||
lucene-misc-6.6.5-patched.9.jar
|
||||
lucene-queries-6.6.5-patched.9.jar
|
||||
lucene-queryparser-6.6.5-patched.9.jar
|
||||
lucene-sandbox-6.6.5-patched.9.jar
|
||||
lucene-spatial-extras-6.6.5-patched.9.jar
|
||||
lucene-suggest-6.6.5-patched.9.jar
|
||||
lucene-analyzers-common-6.6.5-patched.11.jar
|
||||
lucene-analyzers-icu-6.6.5-patched.11.jar
|
||||
lucene-analyzers-kuromoji-6.6.5-patched.11.jar
|
||||
lucene-analyzers-morfologik-6.6.5-patched.11.jar
|
||||
lucene-analyzers-phonetic-6.6.5-patched.11.jar
|
||||
lucene-analyzers-smartcn-6.6.5-patched.11.jar
|
||||
lucene-analyzers-stempel-6.6.5-patched.11.jar
|
||||
lucene-backward-codecs-6.6.5-patched.11.jar
|
||||
lucene-classification-6.6.5-patched.11.jar
|
||||
lucene-codecs-6.6.5-patched.11.jar
|
||||
lucene-core-6.6.5-patched.11.jar
|
||||
lucene-expressions-6.6.5-patched.11.jar
|
||||
lucene-grouping-6.6.5-patched.11.jar
|
||||
lucene-highlighter-6.6.5-patched.11.jar
|
||||
lucene-join-6.6.5-patched.11.jar
|
||||
lucene-memory-6.6.5-patched.11.jar
|
||||
lucene-misc-6.6.5-patched.11.jar
|
||||
lucene-queries-6.6.5-patched.11.jar
|
||||
lucene-queryparser-6.6.5-patched.11.jar
|
||||
lucene-sandbox-6.6.5-patched.11.jar
|
||||
lucene-spatial-extras-6.6.5-patched.11.jar
|
||||
lucene-suggest-6.6.5-patched.11.jar
|
||||
metrics-core-3.2.2.jar
|
||||
metrics-ganglia-3.2.2.jar
|
||||
metrics-graphite-3.2.2.jar
|
||||
@@ -221,11 +221,11 @@ protobuf-java-3.19.6.jar
|
||||
rome-1.5.1.jar
|
||||
slf4j-api-1.7.7.jar
|
||||
slf4j-log4j12-1.7.7.jar
|
||||
solr-analysis-extras-6.6.5-patched.9.jar
|
||||
solr-clustering-6.6.5-patched.9.jar
|
||||
solr-core-6.6.5-patched.9.jar
|
||||
solr-langid-6.6.5-patched.9.jar
|
||||
solr-solrj-6.6.5-patched.9.jar
|
||||
solr-analysis-extras-6.6.5-patched.11.jar
|
||||
solr-clustering-6.6.5-patched.11.jar
|
||||
solr-core-6.6.5-patched.11.jar
|
||||
solr-langid-6.6.5-patched.11.jar
|
||||
solr-solrj-6.6.5-patched.11.jar
|
||||
spatial4j-0.6.jar
|
||||
start.jar
|
||||
stax2-api-3.1.4.jar
|
||||
|
Reference in New Issue
Block a user