19 Commits

Author SHA1 Message Date
35bae4283d get authorities from AuthorityService 2025-01-09 11:53:48 -05:00
d537c8ec49 logging authority for debugging 2025-01-09 11:05:46 -05:00
f17556835a fix afterPropertiesSet() 2025-01-08 17:10:33 -05:00
4531c7af8e changed admin to user auth; using configurable auth 2025-01-08 16:51:47 -05:00
692410f535 moved ASIE custom authorization to AbstractWebScript 2025-01-08 16:47:35 -05:00
1230a07a5a added transaction wrapper to REST declaration 2025-01-08 14:52:34 -05:00
47835d852f wrapped attributeService in tx 2025-01-08 14:33:14 -05:00
7535475581 refactored PersistedNode for serialization 2025-01-08 13:52:58 -05:00
14887ca167 renamed path variables from shardSet to shardCore 2025-01-08 11:55:14 -05:00
632900ecee Merge branch 'feature/community' into develop 2024-11-14 11:02:40 -05:00
af7c9e148e compiling/running module (untested) 2024-11-14 11:01:49 -05:00
de8e0bf2d7 update from refactoring (incomplete) 2024-11-13 18:03:22 -05:00
006597f6fb Merge branch 'develop' into feature/community 2024-11-13 18:02:53 -05:00
8be29fc37d substantial refactoring 2024-11-13 18:02:19 -05:00
3ecbf006dd added query parsing (incomplete) 2024-11-06 13:24:54 -05:00
07d6e63457 Merge branch 'develop' into feature/community 2024-11-06 13:24:23 -05:00
01d2f5ce23 fix v1.1.x pom 2024-11-01 08:35:38 -04:00
8a0db9f11d Merge branch 'develop' into feature/community 2024-11-01 08:35:03 -04:00
3e544c125b initial community-module (incomplete/breaking) 2024-10-31 14:55:42 -04:00
89 changed files with 3779 additions and 737 deletions

View File

@@ -6,7 +6,7 @@
<parent>
<groupId>com.inteligr8.alfresco</groupId>
<artifactId>asie-platform-module-parent</artifactId>
<version>1.1-SNAPSHOT</version>
<version>1.2-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>

12
community-module/.gitignore vendored Normal file
View File

@@ -0,0 +1,12 @@
# Maven
target
pom.xml.versionsBackup
# Eclipse
.project
.classpath
.settings
.vscode
# IDEA
/.idea/

View File

@@ -0,0 +1 @@
# ASIE Platform Module Library

Binary file not shown.

106
community-module/pom.xml Normal file
View File

@@ -0,0 +1,106 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.inteligr8.alfresco</groupId>
<artifactId>asie-platform-module-parent</artifactId>
<version>1.2-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>
<artifactId>asie-community-platform-module</artifactId>
<packaging>jar</packaging>
<name>ASIE Platform Module for ACS Community</name>
<properties>
<alfresco.sdk.version>4.9.0</alfresco.sdk.version>
<alfresco.platform.version>23.3.0</alfresco.platform.version>
<alfresco.platform.war.version>23.3.0.98</alfresco.platform.war.version>
<tomcat-rad.version>10-2.1</tomcat-rad.version>
<beedk.rad.acs-search.enabled>true</beedk.rad.acs-search.enabled>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.alfresco</groupId>
<artifactId>acs-community-packaging</artifactId>
<version>${alfresco.platform.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>com.inteligr8.alfresco</groupId>
<artifactId>cachext-platform-module</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.inteligr8.alfresco</groupId>
<artifactId>asie-shared</artifactId>
<version>${project.version}</version>
</dependency>
<!-- Needed by this module, but provided by ACS -->
<dependency>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-repository</artifactId>
<scope>provided</scope>
</dependency>
<!-- Alfresco Modules required to use this module -->
<dependency>
<groupId>com.inteligr8.alfresco</groupId>
<artifactId>cxf-jaxrs-platform-module</artifactId>
<version>1.3.1-acs-v23.3</version>
<type>amp</type>
</dependency>
<!-- Including for testing purposes only -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>io.repaint.maven</groupId>
<artifactId>tiles-maven-plugin</artifactId>
<version>2.40</version>
<extensions>true</extensions>
<configuration>
<tiles>
<!-- Documentation: https://bitbucket.org/inteligr8/ootbee-beedk/src/stable/beedk-acs-search-rad-tile -->
<tile>com.inteligr8.ootbee:beedk-acs-search-rad-tile:[1.1.6,2.0.0)</tile>
<!-- Documentation: https://bitbucket.org/inteligr8/ootbee-beedk/src/stable/beedk-acs-platform-self-rad-tile -->
<tile>com.inteligr8.ootbee:beedk-acs-platform-self-rad-tile:[1.1.6,2.0.0)</tile>
<!-- Documentation: https://bitbucket.org/inteligr8/ootbee-beedk/src/stable/beedk-acs-platform-module-tile -->
<tile>com.inteligr8.ootbee:beedk-acs-platform-module-tile:[1.1.6,2.0.0)</tile>
</tiles>
</configuration>
</plugin>
</plugins>
</build>
<repositories>
<repository>
<id>alfresco-public</id>
<url>https://artifacts.alfresco.com/nexus/content/groups/public</url>
</repository>
</repositories>
</project>

74
community-module/rad.ps1 Normal file
View File

@@ -0,0 +1,74 @@
function discoverArtifactId {
$script:ARTIFACT_ID=(mvn -q -Dexpression=project"."artifactId -DforceStdout help:evaluate)
}
function rebuild {
echo "Rebuilding project ..."
mvn process-classes
}
function start_ {
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
mvn -Drad process-classes
}
function start_log {
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
mvn -Drad "-Ddocker.showLogs" process-classes
}
function stop_ {
discoverArtifactId
echo "Stopping Docker containers that supported rapid application development ..."
docker container ls --filter name=${ARTIFACT_ID}-*
echo "Stopping containers ..."
docker container stop (docker container ls -q --filter name=${ARTIFACT_ID}-*)
echo "Removing containers ..."
docker container rm (docker container ls -aq --filter name=${ARTIFACT_ID}-*)
}
function tail_logs {
param (
$container
)
discoverArtifactId
docker container logs -f (docker container ls -q --filter name=${ARTIFACT_ID}-${container})
}
function list {
discoverArtifactId
docker container ls --filter name=${ARTIFACT_ID}-*
}
switch ($args[0]) {
"start" {
start_
}
"start_log" {
start_log
}
"stop" {
stop_
}
"restart" {
stop_
start_
}
"rebuild" {
rebuild
}
"tail" {
tail_logs $args[1]
}
"containers" {
list
}
default {
echo "Usage: .\rad.ps1 [ start | start_log | stop | restart | rebuild | tail {container} | containers ]"
}
}
echo "Completed!"

71
community-module/rad.sh Normal file
View File

@@ -0,0 +1,71 @@
#!/bin/sh
discoverArtifactId() {
ARTIFACT_ID=`mvn -q -Dexpression=project.artifactId -DforceStdout help:evaluate | sed 's/\x1B\[[0-9;]\{1,\}[A-Za-z]//g'`
}
rebuild() {
echo "Rebuilding project ..."
mvn process-test-classes
}
start() {
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
mvn -Drad process-test-classes
}
start_log() {
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
mvn -Drad -Ddocker.showLogs process-test-classes
}
stop() {
discoverArtifactId
echo "Stopping Docker containers that supported rapid application development ..."
docker container ls --filter name=${ARTIFACT_ID}-*
echo "Stopping containers ..."
docker container stop `docker container ls -q --filter name=${ARTIFACT_ID}-*`
echo "Removing containers ..."
docker container rm `docker container ls -aq --filter name=${ARTIFACT_ID}-*`
}
tail_logs() {
discoverArtifactId
docker container logs -f `docker container ls -q --filter name=${ARTIFACT_ID}-$1`
}
list() {
discoverArtifactId
docker container ls --filter name=${ARTIFACT_ID}-*
}
case "$1" in
start)
start
;;
start_log)
start_log
;;
stop)
stop
;;
restart)
stop
start
;;
rebuild)
rebuild
;;
tail)
tail_logs $2
;;
containers)
list
;;
*)
echo "Usage: ./rad.sh [ start | start_log | stop | restart | rebuild | tail {container} | containers ]"
exit 1
esac
echo "Completed!"

View File

@@ -0,0 +1,23 @@
package com.inteligr8.alfresco.asie;
public interface CommunityConstants extends Constants {
static final String BEAN_SHARDSETS_CACHE = "asieShardsetsCache";
static final String BEAN_NODES_CACHE = "asieNodesCache";
static final String BEAN_SHARD_NODES_CACHE = "asieShardNodesCache";
static final String BEAN_SHARDINST_STATE_CACHE = "asieShardInstanceStateCache";
static final String BEAN_NODE_DISABLE_CACHE = "asieNodeDisabledCache";
static final String BEAN_NODE_UNAVAIL_CACHE = "asieNodeUnavailableCache";
static final String BEAN_SHARDINST_DISABLE_CACHE = "asieShardInstanceDisabledCache";
static final String BEAN_SHARDINST_UNAVAIL_CACHE = "asieShardInstanceUnavailableCache";
static final String BEAN_CORE_EXPLICIT_CACHE = "asieCoreExplicitCache";
static final String ATTR_ASIE_SHARDSET = "inteligr8.asie.shardSet";
static final String ATTR_ASIE_NODE = "inteligr8.asie.node";
static final String ATTR_ASIE_SHARD_NODES = "inteligr8.asie.shard.nodes";
static final String ATTR_ASIE_SHARD_NODE = "inteligr8.asie.shard.node";
static final String ATTR_OBJECT = "object";
static final String ATTR_DISABLE = "disabled";
static final String ATTR_NODES = "nodes";
}

View File

@@ -0,0 +1,50 @@
package com.inteligr8.alfresco.asie.compute;
import java.util.List;
import java.util.Set;
import org.alfresco.repo.search.impl.parsers.CMISLexer;
import org.alfresco.repo.search.impl.parsers.CMISParser;
import org.alfresco.service.cmr.search.SearchParameters.Operator;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.QName;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.Tree;
import org.apache.commons.collections4.SetUtils;
import org.springframework.stereotype.Component;
@Component
public class CmisQueryInspector implements QueryInspector {
private Set<String> supportedLanguages = SetUtils.unmodifiableSet(
SearchService.LANGUAGE_CMIS_ALFRESCO,
SearchService.LANGUAGE_CMIS_STRICT,
SearchService.LANGUAGE_INDEX_CMIS,
SearchService.LANGUAGE_SOLR_CMIS);
@Override
public Set<String> getSupportedLanguages() {
return this.supportedLanguages;
}
@Override
public List<QueryValue> findRequiredPropertyValues(String query, Operator defaultOperator, QName property, DataTypeDefinition dataTypeDef) throws RecognitionException {
Tree tree = this.parseCmis(query, defaultOperator);
throw new UnsupportedOperationException();
}
protected Tree parseCmis(String cmisQuery, Operator defaultOperator) throws RecognitionException {
CharStream cs = new ANTLRStringStream(cmisQuery);
CMISLexer lexer = new CMISLexer(cs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
CMISParser parser = new CMISParser(tokens);
CommonTree tree = (CommonTree) parser.query().getTree();
return tree;
}
}

View File

@@ -0,0 +1,290 @@
package com.inteligr8.alfresco.asie.compute;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.Period;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import org.alfresco.repo.search.impl.parsers.FTSLexer;
import org.alfresco.repo.search.impl.parsers.FTSParser;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.repository.AssociationRef;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.search.SearchParameters.Operator;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.Tree;
import org.apache.commons.collections4.SetUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class FtsQueryInspector implements QueryInspector {
private final Logger logger = LoggerFactory.getLogger(FtsQueryInspector.class);
private final Set<String> supportedLanguages = SetUtils.unmodifiableSet(
SearchService.LANGUAGE_FTS_ALFRESCO,
SearchService.LANGUAGE_INDEX_FTS_ALFRESCO,
SearchService.LANGUAGE_SOLR_FTS_ALFRESCO,
SearchService.LANGUAGE_LUCENE);
@Autowired
private NamespaceService namespaceService;
@Override
public Set<String> getSupportedLanguages() {
return this.supportedLanguages;
}
@Override
public List<QueryValue> findRequiredPropertyValues(String ftsQuery, Operator defaultOperator, QName property, DataTypeDefinition dataTypeDef) throws RecognitionException {
Tree tree = this.parseFts(ftsQuery, defaultOperator);
tree = this.bypassSingleTermDisjunctions(tree);
if (tree == null)
return null;
Collection<Tree> trees = this.extractRequiredTerms(tree);
this.logger.trace("Found {} required terms in query: {}", trees.size(), ftsQuery);
this.filterPropertyTerms(trees, property);
this.logger.trace("Found {} required terms for property {} in query: {}", trees.size(), property, ftsQuery);
this.filterOutFuzzyTerms(trees);
this.logger.trace("Found {} required definitive terms for property {} in query: {}", trees.size(), property, ftsQuery);
List<QueryValue> values = new ArrayList<>(trees.size());
for (Tree t : trees)
values.add(this.extractValue(t, dataTypeDef));
return values;
}
protected Tree parseFts(String ftsQuery, Operator defaultOperator) throws RecognitionException {
CharStream cs = new ANTLRStringStream(ftsQuery);
FTSLexer lexer = new FTSLexer(cs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
FTSParser parser = new FTSParser(tokens);
parser.setDefaultFieldConjunction(defaultOperator.equals(Operator.AND));
parser.setMode(defaultOperator.equals(Operator.AND) ? FTSParser.Mode.DEFAULT_CONJUNCTION : FTSParser.Mode.DEFAULT_DISJUNCTION);
CommonTree tree = (CommonTree) parser.ftsQuery().getTree();
return tree;
}
protected Tree bypassSingleTermDisjunctions(Tree tree) {
while ("DISJUNCTION".equals(tree.getText()) && tree.getChildCount() == 1)
tree = tree.getChild(0);
if ("DISJUNCTION".equals(tree.getText()))
return null;
return tree;
}
protected Collection<Tree> extractRequiredTerms(Tree tree) {
while ("DISJUNCTION".equals(tree.getText()) && tree.getChildCount() == 1)
tree = tree.getChild(0);
List<Tree> terms = new LinkedList<>();
switch (tree.getText()) {
case "DISJUNCTION":
break;
case "CONJUNCTION":
for (int c = 0; c < tree.getChildCount(); c++) {
Collection<Tree> subtrees = this.extractRequiredTerms(tree.getChild(c));
if (subtrees == null || subtrees.isEmpty())
continue;
terms.addAll(subtrees);
}
break;
case "DEFAULT":
terms.add(tree);
break;
default:
this.logger.warn("Unexpected/unsupported tree: {}", tree.getText());
}
return terms;
}
protected Collection<Tree> filterPropertyTerms(Collection<Tree> trees, QName property) {
if (trees.isEmpty())
return trees;
Set<String> prefixes = new HashSet<>(this.namespaceService.getPrefixes(property.getNamespaceURI()));
if (prefixes.isEmpty()) {
this.logger.warn("Unexpected/unsupported namespace: {}", property.getNamespaceURI());
trees.clear();
return trees;
}
Iterator<Tree> i = trees.iterator();
while (i.hasNext()) {
Tree tree = i.next();
if ("DEFAULT".equals(tree.getText()))
tree = tree.getChild(0);
int skip = -1;
switch (tree.getText()) {
case "TERM":
case "PHRASE":
case "EXACT_TERM":
case "EXACT_PHRASE":
skip = 1; // skip the value child
break;
case "RANGE":
skip = 4; // skip the inclusive, start, end, inclusive children
break;
default:
}
if (skip >= 0) {
Tree fieldRef = tree.getChild(skip);
if (!"FIELD_REF".equals(fieldRef.getText())) {
this.logger.warn("Unexpected/unsupported tree: {}", tree.getText());
} else if (!fieldRef.getChild(0).getText().equals(property.getLocalName())) {
this.logger.trace("Found but ignoring property: {}", fieldRef.getChild(0).getText());
} else {
Tree prefix = fieldRef.getChild(1);
if (!"PREFIX".equals(prefix.getText())) {
this.logger.warn("Unexpected/unsupported tree: {}", tree.getText());
} else if (!prefixes.contains(prefix.getChild(0).getText())) {
this.logger.trace("Found but ignoring property: {}:{}", prefix.getChild(0).getText(), property.getLocalName());
} else {
// this will skip the remove()
continue;
}
}
}
i.remove();
}
return trees;
}
protected Collection<Tree> filterOutFuzzyTerms(Collection<Tree> trees) {
if (trees.isEmpty())
return trees;
Iterator<Tree> i = trees.iterator();
while (i.hasNext()) {
Tree tree = i.next();
if ("DEFAULT".equals(tree.getText()))
tree = tree.getChild(0);
switch (tree.getText()) {
case "EXACT_TERM":
case "EXACT_PHRASE":
case "RANGE":
break;
default:
i.remove();
}
}
return trees;
}
protected QueryValue extractValue(Tree tree, DataTypeDefinition dataTypeDef) {
if ("DEFAULT".equals(tree.getText()))
tree = tree.getChild(0);
switch (tree.getText()) {
case "RANGE":
return this.extractRangeValue(tree, dataTypeDef);
default:
}
String value = this.unquote(tree.getChild(0).getText());
switch (dataTypeDef.getName().getLocalName()) {
case "boolean":
return new QuerySingleValue<Boolean>(Boolean.parseBoolean(value));
case "double":
return new QuerySingleValue<Double>(Double.parseDouble(value));
case "float":
return new QuerySingleValue<Float>(Float.parseFloat(value));
case "int":
return new QuerySingleValue<Integer>(Integer.parseInt(value));
case "long":
return new QuerySingleValue<Long>(Long.parseLong(value));
case "date":
return new QuerySingleValue<LocalDate>(this.evaluateAsDate(value));
case "datetime":
return new QuerySingleValue<LocalDateTime>(this.evaluateAsDateTime(value));
case "period":
return new QuerySingleValue<Period>(Period.parse(value));
case "qname":
return new QuerySingleValue<QName>(QName.createQName(value, this.namespaceService));
case "noderef":
return new QuerySingleValue<NodeRef>(new NodeRef(value));
case "childassocref":
return new QuerySingleValue<ChildAssociationRef>(new ChildAssociationRef(value));
case "assocref":
return new QuerySingleValue<AssociationRef>(new AssociationRef(value));
case "locale":
return new QuerySingleValue<Locale>(new Locale(value));
default:
return new QuerySingleValue<String>(value);
}
}
protected QueryRangeValue<?> extractRangeValue(Tree tree, DataTypeDefinition dataTypeDef) {
boolean includeStart = "INCLUSIVE".equals(tree.getChild(0).getText());
String start = this.unquote(tree.getChild(1).getText());
String end = this.unquote(tree.getChild(2).getText());
boolean includeEnd = "INCLUSIVE".equals(tree.getChild(3).getText());
switch (dataTypeDef.getName().getLocalName()) {
case "double":
return new QueryRangeValue<Double>(includeStart, Double.parseDouble(start), includeEnd, Double.parseDouble(end));
case "float":
return new QueryRangeValue<Float>(includeStart, Float.parseFloat(start), includeEnd, Float.parseFloat(end));
case "int":
return new QueryRangeValue<Integer>(includeStart, Integer.parseInt(start), includeEnd, Integer.parseInt(end));
case "long":
return new QueryRangeValue<Long>(includeStart, Long.parseLong(start), includeEnd, Long.parseLong(end));
case "date":
return new QueryRangeValue<LocalDate>(includeStart, this.evaluateAsDate(start), includeEnd, this.evaluateAsDate(end));
case "datetime":
return new QueryRangeValue<LocalDateTime>(includeStart, this.evaluateAsDateTime(start), includeEnd, this.evaluateAsDateTime(end));
default:
throw new UnsupportedOperationException("The data type does not make sense for range evaluation: " + dataTypeDef.getName());
}
}
protected LocalDate evaluateAsDate(String str) {
if ("now".equalsIgnoreCase(str)) return LocalDate.now();
else return LocalDate.parse(str);
}
protected LocalDateTime evaluateAsDateTime(String str) {
if ("now".equalsIgnoreCase(str)) return LocalDateTime.now();
else return LocalDateTime.parse(str);
}
protected String unquote(String str) {
if (str.length() < 2) return str;
else if (str.charAt(0) == '\'' && str.charAt(str.length()-1) == '\'') return str.substring(1, str.length()-1);
else if (str.charAt(0) == '\"' && str.charAt(str.length()-1) == '\"') return str.substring(1, str.length()-1);
else return str;
}
}

View File

@@ -0,0 +1,74 @@
package com.inteligr8.alfresco.asie.compute;
import java.util.List;
import java.util.Set;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.search.SearchParameters.Operator;
import org.alfresco.service.namespace.QName;
import org.antlr.runtime.RecognitionException;
public interface QueryInspector {
Set<String> getSupportedLanguages();
List<QueryValue> findRequiredPropertyValues(String query, Operator defaultOperator, QName property, DataTypeDefinition dataTypeDef) throws RecognitionException;
public interface QueryValue {
}
public class QuerySingleValue<T> implements QueryValue {
private T value;
public QuerySingleValue(T value) {
this.value = value;
}
public T getValue() {
return value;
}
@Override
public String toString() {
return this.value.toString();
}
}
public class QueryRangeValue<T> implements QueryValue {
private boolean includeStart;
private T start;
private boolean includeEnd;
private T end;
public QueryRangeValue(boolean includeStart, T start, boolean includeEnd, T end) {
this.includeStart = includeStart;
this.start = start;
this.includeEnd = includeEnd;
this.end = end;
}
public boolean isIncludeStart() {
return includeStart;
}
public boolean isIncludeEnd() {
return includeEnd;
}
public T getStart() {
return start;
}
public T getEnd() {
return end;
}
}
}

View File

@@ -0,0 +1,32 @@
package com.inteligr8.alfresco.asie.compute;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.alfresco.service.cmr.search.SearchParameters;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class QueryInspectorFactory implements InitializingBean {
@Autowired
private List<QueryInspector> inspectors;
private Map<String, QueryInspector> languageInspectorMap = new HashMap<>();
@Override
public void afterPropertiesSet() throws Exception {
for (QueryInspector inspector : this.inspectors) {
for (String language : inspector.getSupportedLanguages())
this.languageInspectorMap.put(language, inspector);
}
}
public QueryInspector selectQueryInspector(SearchParameters searchParams) {
return this.languageInspectorMap.get(searchParams.getLanguage());
}
}

View File

@@ -0,0 +1,28 @@
package com.inteligr8.alfresco.asie.provider;
import org.alfresco.repo.index.shard.ShardRegistry;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Scope;
import com.inteligr8.alfresco.asie.Constants;
@Configuration
public class ShardRegistryProvider extends AbstractProvider<ShardRegistry> {
/**
* This allows for the selection of the primary or first ShardRegistry
* registered in the Spring BeanFactory.
*
* @return A ShardRegistry.
*/
@Bean(Constants.BEAN_SHARD_REGISTRY)
@Qualifier(Constants.QUALIFIER_ASIE)
@Scope(ConfigurableBeanFactory.SCOPE_SINGLETON)
public ShardRegistry selectBean() {
return this.getPrimary(ShardRegistry.class);
}
}

View File

@@ -0,0 +1,240 @@
package com.inteligr8.alfresco.asie.service;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.repo.index.shard.ShardMethodEnum;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.Pair;
import org.alfresco.util.collections.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.CommunityConstants;
import com.inteligr8.alfresco.asie.model.Shard;
import com.inteligr8.alfresco.asie.model.ShardInstance;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.cachext.CollectionCache;
import com.inteligr8.alfresco.cachext.MultiValueCache;
@Component
public class ShardDiscoveryService implements com.inteligr8.alfresco.asie.spi.ShardDiscoveryService {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDSETS_CACHE)
private SimpleCache<String, ShardSet> shardsetsCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_NODES_CACHE)
private SimpleCache<String, SolrHost> nodesCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARD_NODES_CACHE)
private MultiValueCache<Shard, SolrHost> shardNodesCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDINST_STATE_CACHE)
private SimpleCache<ShardInstance, ShardInstanceState> shardInstanceStatesCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_NODE_UNAVAIL_CACHE)
private CollectionCache<SolrHost, HashSet<SolrHost>> nodeUnavailableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_NODE_DISABLE_CACHE)
private CollectionCache<SolrHost, HashSet<SolrHost>> nodeDisableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDINST_UNAVAIL_CACHE)
private CollectionCache<ShardInstance, HashSet<ShardInstance>> shardInstanceUnavailableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDINST_DISABLE_CACHE)
private CollectionCache<ShardInstance, HashSet<ShardInstance>> shardInstanceDisableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_CORE_EXPLICIT_CACHE)
private SimpleCache<String, QName> coreExplicitIdCache;
@Override
public ShardSet findSetByCore(String core) {
return this.shardsetsCache.get(core);
}
@Override
public SolrHost findNode(String nodeHostname, int nodePort) {
Map<String, InetAddress> resolvedAddresses = new HashMap<>();
for (String nodeSpec : this.nodesCache.getKeys()) {
SolrHost node = this.nodesCache.get(nodeSpec);
if (!nodeHostname.equalsIgnoreCase(node.getHostname())) {
if (!resolvedAddresses.containsKey(nodeHostname))
resolvedAddresses.put(nodeHostname, this.resolve(nodeHostname));
InetAddress nodeAddress = resolvedAddresses.get(nodeHostname);
this.logger.trace("Resolved: {} => {}", nodeHostname, nodeAddress);
if (nodeAddress == null)
continue;
if (!resolvedAddresses.containsKey(node.getHostname()))
resolvedAddresses.put(node.getHostname(), this.resolve(node.getHostname()));
InetAddress shardInstanceAddress = resolvedAddresses.get(node.getHostname());
this.logger.trace("Resolved: {} => {}", node.getHostname(), shardInstanceAddress);
if (!nodeAddress.equals(shardInstanceAddress))
continue;
}
if (nodePort == node.getPort()) {
this.logger.debug("Found node: {}", node);
return node;
}
}
return null;
}
private InetAddress resolve(String hostname) {
try {
return InetAddress.getByName(hostname);
} catch (UnknownHostException uhe) {
return null;
}
}
@Override
public Map<ShardSet, Map<Integer, ShardInstanceState>> findByNode(SolrHost node) {
Map<ShardSet, Map<Integer, ShardInstanceState>> response = new HashMap<>();
for (Shard shard : this.shardNodesCache.getKeys()) {
ShardSet shardSet = this.shardsetsCache.get(shard.extractShardSetCore());
if (this.shardNodesCache.contains(shard, node)) {
ShardInstance shardNode = ShardInstance.from(shard, node);
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
Map<Integer, ShardInstanceState> shards = response.get(shardSet);
if (shards == null)
response.put(shardSet, shards = new HashMap<>());
shards.put(shard.extractShardId(), state);
}
}
return response;
}
@Override
public Set<ShardSet> findSetsByShardMethod(ShardMethodEnum... shardMethods) {
Set<ShardSet> shardSets = new HashSet<>();
Set<ShardMethodEnum> methods = CollectionUtils.asSet(shardMethods);
for (String core : this.shardsetsCache.getKeys()) {
ShardSet shardSet = this.shardsetsCache.get(core);
if (methods.contains(shardSet.getMethod()))
shardSets.add(shardSet);
}
return shardSets;
}
@Override
public Set<SolrHost> findNodes(ShardSet shardSet) {
Set<SolrHost> nodes = new HashSet<>();
for (Shard shard : this.shardNodesCache.getKeys()) {
if (shardSet.getCore().equals(shard.extractShardSetCore()))
nodes.addAll(this.shardNodesCache.get(shard));
}
return nodes;
}
@Override
public Set<SolrHost> findNodesByShard(ShardSet shardSet, int shardId) {
Set<SolrHost> nodes = new HashSet<>();
for (Shard shard : this.shardNodesCache.getKeys()) {
if (shardSet.getCore().equals(shard.extractShardSetCore()) && shardId == shard.extractShardId())
nodes.addAll(this.shardNodesCache.get(shard));
}
return nodes;
}
@Override
public Map<Integer, Pair<SolrHost, ShardInstanceState>> findLatestNodeStates(ShardSet shardSet) {
Map<Integer, Pair<SolrHost, ShardInstanceState>> response = new HashMap<>();
for (Shard shard : this.shardNodesCache.getKeys()) {
if (!shardSet.getCore().equals(shard.extractShardSetCore()))
continue;
SolrHost latestNode = null;
ShardInstanceState latestState = null;
for (SolrHost node : this.shardNodesCache.get(shard)) {
ShardInstance shardNode = ShardInstance.from(shard, node);
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
if (latestState == null || state.compareTo(latestState) < 0) {
latestState = state;
latestNode = node;
}
}
if (latestNode != null)
response.put(shard.extractShardId(), new Pair<>(latestNode, latestState));
}
return response;
}
@Override
public List<Pair<SolrHost, ShardInstanceState>> findNodeStatesByShard(ShardSet shardSet, int shardId) {
List<Pair<SolrHost, ShardInstanceState>> response = new LinkedList<>();
for (Shard shard : this.shardNodesCache.getKeys()) {
if (!shardSet.getCore().equals(shard.extractShardSetCore()))
continue;
for (SolrHost node : this.shardNodesCache.get(shard)) {
ShardInstance shardNode = ShardInstance.from(shard, node);
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
response.add(new Pair<>(node, state));
}
}
return response;
}
@Override
public Set<Integer> findIdsByNode(ShardSet shardSet, SolrHost node) {
Set<Integer> shardIds = new HashSet<>();
for (Shard shard : this.shardNodesCache.getKeys()) {
if (shardSet.getCore().equals(shard.extractShardSetCore()) && this.shardNodesCache.contains(shard, node))
shardIds.add(shard.extractShardId());
}
return shardIds;
}
@Override
public Map<Integer, ShardInstanceState> findStatesByNode(ShardSet shardSet, SolrHost node) {
// TODO Auto-generated method stub
return null;
}
}

View File

@@ -0,0 +1,25 @@
package com.inteligr8.alfresco.asie.service;
import org.alfresco.service.cmr.attributes.AttributeService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.Constants;
@Component
public class ShardStateService implements com.inteligr8.alfresco.asie.spi.ShardStateService {
@Autowired
@Qualifier(Constants.QUALIFIER_ASIE)
private AttributeService attrService;
@Autowired
private SolrShardRegistry shardRegistry;
@Override
public void clear() {
this.shardRegistry.purge();
}
}

View File

@@ -0,0 +1,628 @@
package com.inteligr8.alfresco.asie.service;
import java.io.Serializable;
import java.time.OffsetDateTime;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.OptionalInt;
import java.util.Random;
import java.util.Set;
import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.repo.index.shard.Floc;
import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.repo.lock.JobLockService;
import org.alfresco.service.cmr.attributes.AttributeService;
import org.alfresco.service.cmr.attributes.AttributeService.AttributeQueryCallback;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.antlr.runtime.RecognitionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.annotation.Primary;
import org.springframework.extensions.surf.util.AbstractLifecycleBean;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.CommunityConstants;
import com.inteligr8.alfresco.asie.Constants;
import com.inteligr8.alfresco.asie.compute.QueryInspector;
import com.inteligr8.alfresco.asie.compute.QueryInspector.QueryRangeValue;
import com.inteligr8.alfresco.asie.compute.QueryInspector.QuerySingleValue;
import com.inteligr8.alfresco.asie.compute.QueryInspector.QueryValue;
import com.inteligr8.alfresco.asie.compute.QueryInspectorFactory;
import com.inteligr8.alfresco.asie.model.Shard;
import com.inteligr8.alfresco.asie.model.ShardInstance;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.asie.spi.ShardRegistry;
import com.inteligr8.alfresco.cachext.CollectionCache;
import com.inteligr8.alfresco.cachext.MultiValueCache;
@Component
@Primary
public class SolrShardRegistry extends AbstractLifecycleBean implements ShardRegistry {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private final Random random = new Random();
private final QName shardLock = QName.createQName(Constants.NAMESPACE_ASIE, "shardLock");
@Autowired
@Qualifier(Constants.QUALIFIER_ASIE)
private AttributeService attrService;
@Autowired
private NamespaceService namespaceService;
@Autowired
private DictionaryService dictionaryService;
@Autowired
private QueryInspectorFactory queryInspectorFactory;
@Autowired
private JobLockService jobLockService;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDSETS_CACHE)
private SimpleCache<String, ShardSet> shardsetsCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_NODES_CACHE)
private SimpleCache<String, SolrHost> nodesCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARD_NODES_CACHE)
private MultiValueCache<Shard, SolrHost> shardNodesCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDINST_STATE_CACHE)
private SimpleCache<ShardInstance, ShardInstanceState> shardInstanceStatesCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_NODE_UNAVAIL_CACHE)
private CollectionCache<SolrHost, HashSet<SolrHost>> nodeUnavailableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_NODE_DISABLE_CACHE)
private CollectionCache<SolrHost, HashSet<SolrHost>> nodeDisableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDINST_UNAVAIL_CACHE)
private CollectionCache<ShardInstance, HashSet<ShardInstance>> shardInstanceUnavailableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDINST_DISABLE_CACHE)
private CollectionCache<ShardInstance, HashSet<ShardInstance>> shardInstanceDisableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_CORE_EXPLICIT_CACHE)
private SimpleCache<String, QName> coreExplicitIdCache;
@Value("${inteligr8.asie.registerUnknownShardDisabled}")
private boolean registerDisabled;
@Value("${inteligr8.asie.offlineIdleShardInSeconds}")
private int offlineIdleShardInSeconds;
@Value("${inteligr8.asie.forgetOfflineShardInSeconds}")
private int forgetOfflineShardInSeconds;
@Override
protected void onBootstrap(ApplicationEvent event) {
this.loadPersistedToCache();
}
@Override
protected void onShutdown(ApplicationEvent event) {
}
protected void loadPersistedToCache() {
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 500L, 10);
try {
this.attrService.getAttributes(new AttributeQueryCallback() {
@Override
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
String core = (String) keys[1];
if (!shardsetsCache.contains(core)) {
ShardSet shardSet = (ShardSet) value;
shardsetsCache.put(core, shardSet);
switch (shardSet.getMethod()) {
case EXPLICIT_ID:
cacheExplicitShard(shardSet, false);
break;
default:
}
}
return true;
}
}, CommunityConstants.ATTR_ASIE_SHARDSET);
this.attrService.getAttributes(new AttributeQueryCallback() {
@Override
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
String nodeSpec = (String) keys[2];
SolrHost node = (SolrHost) value;
if (!nodesCache.contains(nodeSpec))
nodesCache.put(nodeSpec, node);
if (Boolean.TRUE.equals(attrService.getAttribute(CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_DISABLE, nodeSpec))) {
if (!nodeDisableCache.contains(node))
nodeDisableCache.add(node);
} else if (nodeDisableCache.contains(node)) {
nodeDisableCache.remove(node);
}
return true;
}
}, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_OBJECT);
this.attrService.getAttributes(new AttributeQueryCallback() {
@Override
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
Shard shard = (Shard) keys[1];
SolrHost node = (SolrHost) keys[2];
if (!shardNodesCache.contains(shard, node))
shardNodesCache.add(shard, node);
return true;
}
}, CommunityConstants.ATTR_ASIE_SHARD_NODES);
this.attrService.getAttributes(new AttributeQueryCallback() {
@Override
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
ShardInstance shardNode = (ShardInstance) keys[2];
ShardInstanceState state = (ShardInstanceState) value;
if (!shardInstanceStatesCache.contains(shardNode))
shardInstanceStatesCache.put(shardNode, state);
if (Boolean.TRUE.equals(attrService.getAttribute(CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_DISABLE, shardNode))) {
if (!shardInstanceDisableCache.contains(shardNode))
shardInstanceDisableCache.add(shardNode);
} else if (shardInstanceDisableCache.contains(shardNode)) {
shardInstanceDisableCache.remove(shardNode);
}
return true;
}
}, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_OBJECT);
} finally {
this.jobLockService.releaseLock(lockId, this.shardLock);
}
}
private void cacheExplicitShard(ShardSet shardSet, boolean overwrite) {
if (overwrite || !this.coreExplicitIdCache.contains(shardSet.getCore())) {
String property = shardSet.getPrefixedProperty();
QName propertyQName = QName.createQName(property, namespaceService);
this.logger.debug("Mapping core to explicit ID: {} => {}", shardSet.getCore(), propertyQName);
this.coreExplicitIdCache.put(shardSet.getCore(), propertyQName);
}
}
protected void persistCache() {
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 100L, 50);
try {
this.persistShardSetCache();
this.persistNodeCache();
this.persistShardNodesCache();
this.persistShardInstanceCache();
} finally {
this.jobLockService.releaseLock(lockId, this.shardLock);
}
}
private void persistShardSetCache() {
// add anything missing
// update anything changed
for (String core : this.shardsetsCache.getKeys()) {
ShardSet shardSet = this.shardsetsCache.get(core);
this.checkSetAttribute(shardSet, CommunityConstants.ATTR_ASIE_SHARDSET, core);
}
// we are not removing anything removed from the cache, as it might have expired
// it will just recache on the next load
}
private void persistNodeCache() {
// add anything missing
// update anything changed
for (String nodeSpec : this.nodesCache.getKeys()) {
SolrHost node = this.nodesCache.get(nodeSpec);
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_OBJECT, nodeSpec);
}
// we are not removing anything removed from the cache, as it might have expired
// it will just recache on the next load
// add anything disabled
for (SolrHost node : this.nodeDisableCache.values())
this.checkSetAttribute(Boolean.TRUE, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_DISABLE, node.getSpec());
// remove anything not disabled
this.attrService.getAttributes(new AttributeQueryCallback() {
@Override
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
SolrHost node = SolrHost.from((String) keys[2]);
if (!nodeDisableCache.contains(node))
attrService.removeAttribute(keys);
return true;
}
}, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_DISABLE);
}
private void persistShardNodesCache() {
// add anything missing
// update anything changed
for (Shard shard : this.shardNodesCache.getKeys()) {
Collection<SolrHost> nodes = this.shardNodesCache.get(shard);
for (SolrHost node : nodes) {
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_SHARD_NODES, shard, node.getSpec());
}
}
// we are not removing anything removed from the cache, as it might have expired
// it will just recache on the next load
}
private void persistShardInstanceCache() {
// add anything missing
// update anything changed
for (ShardInstance shardNode : this.shardInstanceStatesCache.getKeys()) {
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
this.checkSetAttribute(state, shardNode);
}
// we are not removing anything removed from the cache, as it might have expired
// it will just recache on the next load
// add anything disabled
for (ShardInstance shardNode : this.shardInstanceDisableCache.values())
this.checkSetAttribute(Boolean.TRUE, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_DISABLE, shardNode);
// remove anything not disabled
this.attrService.getAttributes(new AttributeQueryCallback() {
@Override
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
ShardInstance shardNode = (ShardInstance) keys[2];
if (!shardInstanceDisableCache.contains(shardNode))
attrService.removeAttribute(keys);
return true;
}
}, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_DISABLE);
}
private void checkSetAttribute(ShardInstanceState state, ShardInstance shardNode) {
ShardInstanceState currentState = (ShardInstanceState) this.attrService.getAttribute(CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
if (currentState != null) {
if (currentState.compareTo(state) >= 0) {
// current state is older (greater; further down the list)
// do nothing
} else {
this.logger.debug("The persisted state was old; updating: {}: {} => {}", shardNode, currentState, state);
this.attrService.setAttribute(state, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
}
} else {
this.attrService.setAttribute(state, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
}
}
private void checkSetAttribute(Serializable value, Serializable... keys) {
Serializable currentValue = this.attrService.getAttribute(keys);
if (currentValue != null) {
if (currentValue.equals(value))
return;
this.logger.warn("The attribute value unexpectedly changed: {}: {} => {}", keys, currentValue, value);
}
this.attrService.setAttribute(value, keys);
}
@Override
public void registerShardState(ShardState shardNodeState) {
ShardSet shardSet = ShardSet.from(shardNodeState.getShardInstance().getShard().getFloc(), shardNodeState);
Shard shard = Shard.from(shardSet, shardNodeState.getShardInstance().getShard().getInstance());
SolrHost node = SolrHost.from(shardNodeState.getShardInstance());
ShardInstance shardNode = ShardInstance.from(shard, node);
ShardInstanceState state = ShardInstanceState.from(shardNodeState);
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 500L, 10);
try {
if (!this.shardsetsCache.contains(shardSet.getCore()))
this.shardsetsCache.put(shardSet.getCore(), shardSet);
this.checkSetAttribute(shardSet, CommunityConstants.ATTR_ASIE_SHARDSET, shardSet.getCore());
if (!this.nodesCache.contains(node.getSpec()))
this.nodesCache.put(node.getSpec(), node);
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_OBJECT, node.getSpec());
if (!this.shardNodesCache.contains(shard, node))
this.shardNodesCache.add(shard, node);
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_SHARD_NODES, shard, node.getSpec());
ShardInstanceState currentState = this.shardInstanceStatesCache.get(shardNode);
if (currentState == null || currentState.compareTo(state) > 0)
this.shardInstanceStatesCache.put(shardNode, state);
this.checkSetAttribute(state, shardNode);
if (this.registerDisabled && !this.shardInstanceDisableCache.contains(shardNode))
this.shardInstanceDisableCache.add(shardNode);
} finally {
this.jobLockService.releaseLock(lockId, this.shardLock);
}
}
@Override
public void unregisterShardInstance(org.alfresco.repo.index.shard.ShardInstance shardInstance) {
ShardSet shardSet = ShardSet.from(shardInstance.getShard().getFloc(), null);
Shard shard = Shard.from(shardSet, shardInstance.getShard().getInstance());
SolrHost node = SolrHost.from(shardInstance);
ShardInstance shardNode = ShardInstance.from(shard, node);
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 500L, 10);
try {
this.shardInstanceStatesCache.remove(shardNode);
this.shardInstanceDisableCache.remove(shardNode);
this.shardInstanceUnavailableCache.remove(shardNode);
this.nodeDisableCache.remove(node);
this.nodeUnavailableCache.remove(node);
this.attrService.removeAttribute(CommunityConstants.ATTR_ASIE_SHARD_NODES, shard, node.getSpec());
} finally {
this.jobLockService.releaseLock(lockId, this.shardLock);
}
}
@Override
public Map<Floc, Map<org.alfresco.repo.index.shard.Shard, Set<ShardState>>> getFlocs() {
Map<String, Floc> flocs = new HashMap<>();
Map<Floc, Map<org.alfresco.repo.index.shard.Shard, Set<ShardState>>> response = new HashMap<>();
for (Shard shard : this.shardNodesCache.getKeys()) {
String core = shard.extractShardSetCore();
ShardSet shardSet = this.shardsetsCache.get(core);
Map<org.alfresco.repo.index.shard.Shard, Set<ShardState>> shards;
Floc floc = flocs.get(core);
if (floc != null) {
floc = shardSet.toAlfrescoModel();
shards = new HashMap<>();
} else {
shards = response.get(floc);
}
org.alfresco.repo.index.shard.Shard shard_ = shard.toAlfrescoModel(floc);
Set<ShardState> states = shards.get(shard_);
if (states == null)
states = new HashSet<>();
for (SolrHost node : this.shardNodesCache.get(shard)) {
if (this.nodeDisableCache.contains(node) || this.nodeUnavailableCache.contains(node)) {
this.logger.debug("Excluding node as it is disabled or considered unavailable: {}", node);
continue;
}
ShardInstance shardNode = ShardInstance.from(shard, node);
if (this.shardInstanceDisableCache.contains(shardNode) || this.shardInstanceUnavailableCache.contains(shardNode)) {
this.logger.debug("Excluding shard node as it is disabled or considered unavailable: {}", shardNode);
continue;
}
ShardInstanceState shardNodeState = this.shardInstanceStatesCache.get(shardNode);
states.add(shardNodeState.toAlfrescoModel(shardNode.toAlfrescoModel(shard_)));
}
if (!states.isEmpty())
shards.put(shard_, states);
if (!shards.isEmpty())
response.put(floc, shards);
}
return response;
}
@Override
public void purge() {
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 100L, 50);
try {
this.logger.info("Removing all nodes/shards from the shard registry");
this.shardsetsCache.clear();
this.attrService.removeAttributes(CommunityConstants.ATTR_ASIE_SHARDSET);
this.nodesCache.clear();
this.nodeDisableCache.clear();
this.nodeUnavailableCache.clear();
this.attrService.removeAttributes(CommunityConstants.ATTR_ASIE_NODE);
this.shardNodesCache.clear();
this.attrService.removeAttributes(CommunityConstants.ATTR_ASIE_SHARD_NODES);
this.shardInstanceStatesCache.clear();
this.shardInstanceDisableCache.clear();
this.shardInstanceUnavailableCache.clear();
this.attrService.removeAttributes(CommunityConstants.ATTR_ASIE_SHARD_NODE);
this.coreExplicitIdCache.clear();
} finally {
this.jobLockService.releaseLock(lockId, this.shardLock);
}
}
@Override
public void purgeAgedOutShards() {
OffsetDateTime onlineExpired = OffsetDateTime.now().minusSeconds(this.offlineIdleShardInSeconds);
OffsetDateTime offlineExpired = OffsetDateTime.now().minusSeconds(this.forgetOfflineShardInSeconds);
for (ShardInstance shardNode : this.shardInstanceStatesCache.getKeys()) {
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
SolrHost node = shardNode.extractNode();
if (this.shardInstanceDisableCache.contains(shardNode)) {
this.logger.debug("Ignoring disabled shard instance during purgeAgedOutShards()");
} else if (this.nodeDisableCache.contains(node)) {
this.logger.debug("Ignoring disabled node during purgeAgedOutShards()");
} else if (state.getLastUpdated().isBefore(offlineExpired)) {
this.shardInstanceStatesCache.remove(shardNode);
if (this.shardInstanceUnavailableCache.remove(shardNode)) {
this.logger.info("Forgetting about already offline shard: {}", shardNode);
} else if (this.nodeUnavailableCache.remove(node)) {
this.logger.info("Forgetting about already offline shard: {}", shardNode);
} else {
this.logger.warn("Forgetting about online shard: {}", shardNode);
}
} else if (state.getLastUpdated().isBefore(onlineExpired)) {
this.logger.warn("Taking shard offline: {}", shardNode);
this.shardInstanceUnavailableCache.add(shardNode);
}
}
}
@Override
public QName getExplicitIdProperty(String coreName) {
return this.coreExplicitIdCache.get(coreName);
}
@Override
public Set<Integer> getShardInstanceList(String coreName) {
Set<Integer> shardIds = new HashSet<>();
ShardSet shardSet = this.shardsetsCache.get(coreName);
if (shardSet == null)
return Collections.emptySet();
for (Shard shard : this.shardNodesCache.getKeys()) {
if (shardSet.getCore().equals(shard.extractShardSetCore())) {
shardIds.add(shard.extractShardId());
}
}
return shardIds;
}
@Override
public OptionalInt getShardInstanceByTransactionTimestamp(String coreId, long txnTimestamp) {
throw new UnsupportedOperationException();
}
@Override
public List<org.alfresco.repo.index.shard.ShardInstance> getIndexSlice(SearchParameters searchParameters) {
if (searchParameters.getQuery() == null)
return Collections.emptyList();
List<org.alfresco.repo.index.shard.ShardInstance> bestShards = null;
for (String shardSetSpec : this.shardsetsCache.getKeys()) {
ShardSet shardSet = this.shardsetsCache.get(shardSetSpec);
Set<Integer> shardIds = this.getIndexSlice(searchParameters, shardSet);
if (shardIds == null)
continue;
List<org.alfresco.repo.index.shard.ShardInstance> shards = this.selectRandomNodes(shardSet, shardIds);
if (!shards.isEmpty() && (bestShards == null || shards.size() < bestShards.size()))
bestShards = shards;
if (bestShards != null && bestShards.size() == 1)
break;
}
return bestShards;
}
protected Set<Integer> getIndexSlice(SearchParameters searchParameters, ShardSet shardSet) {
try {
switch (shardSet.getMethod()) {
case EXPLICIT_ID:
return this.getExplicitIdIndexSlice(searchParameters, shardSet);
default:
// no optimization available
return null;
}
} catch (RecognitionException re) {
this.logger.debug("Failed to parse the query: " + searchParameters.getQuery(), re);
// no optimization available
return null;
}
}
protected Set<Integer> getExplicitIdIndexSlice(SearchParameters searchParameters, ShardSet shardSet) throws RecognitionException {
this.logger.trace("Found {} shard set, which is the highest priority", shardSet.getMethod());
QueryInspector inspector = this.queryInspectorFactory.selectQueryInspector(searchParameters);
if (inspector == null) {
this.logger.debug("The search is using an unsupported query language; unable to optimize for {}: {}", shardSet.getMethod(), searchParameters.getLanguage());
return null;
}
String property = shardSet.getPrefixedProperty();
QName propertyQName = QName.createQName(property, this.namespaceService);
this.logger.trace("Will attempt to see if search has a required constraint on explicit shard ID property: {}", propertyQName);
DataTypeDefinition dtdef = this.dictionaryService.getProperty(propertyQName).getDataType();
Set<Integer> shardIds = new HashSet<>();
List<QueryValue> values = inspector.findRequiredPropertyValues(searchParameters.getQuery(), searchParameters.getDefaultOperator(), propertyQName, dtdef);
this.logger.trace("Found {} matching terms query: {}: {}", values.size(), propertyQName, searchParameters.getQuery());
for (QueryValue value : values) {
if (value instanceof QuerySingleValue<?>) {
@SuppressWarnings("unchecked")
Number num = ((QuerySingleValue<? extends Number>) value).getValue();
shardIds.add(num.intValue());
} else if (value instanceof QueryRangeValue<?>) {
@SuppressWarnings("unchecked")
QueryRangeValue<? extends Number> num = (QueryRangeValue<? extends Number>) value;
int start = num.getStart().intValue();
if (!num.isIncludeStart())
start++;
int end = num.getStart().intValue();
if (!num.isIncludeEnd())
end--;
for (int shardId = start; shardId <= end; shardId++)
shardIds.add(shardId);
}
}
if (shardIds.isEmpty()) {
this.logger.trace("The {} shard set cannot not be used to optimize the query", shardSet.getMethod());
return null;
}
this.logger.debug("The {} shard set was used to optimize the query to use only shards: {}", shardSet.getMethod(), shardIds);
return shardIds;
}
protected List<org.alfresco.repo.index.shard.ShardInstance> selectRandomNodes(ShardSet shardSet, Collection<Integer> shardIds) {
List<org.alfresco.repo.index.shard.ShardInstance> shardNodes = new LinkedList<>();
for (Integer shardId : shardIds) {
Shard shard = Shard.from(shardSet, shardId);
Collection<SolrHost> nodes = this.shardNodesCache.get(shard);
List<SolrHost> availableNodes = new LinkedList<>();
for (SolrHost node : nodes) {
if (this.nodeDisableCache.contains(node) || this.nodeUnavailableCache.contains(node))
continue;
ShardInstance shardNode = ShardInstance.from(shard, node);
if (this.shardInstanceDisableCache.contains(shardNode) || this.shardInstanceUnavailableCache.contains(shardNode))
continue;
availableNodes.add(node);
}
SolrHost randomNode = availableNodes.get(this.random.nextInt(availableNodes.size()));
shardNodes.add(ShardInstance.from(shard, randomNode).toAlfrescoModel(shard.toAlfrescoModel(shardSet.toAlfrescoModel())));
}
return shardNodes;
}
}

View File

@@ -0,0 +1,79 @@
package com.inteligr8.alfresco.asie.util;
import java.util.Comparator;
import org.alfresco.repo.index.shard.ShardMethodEnum;
import com.inteligr8.alfresco.asie.model.ShardSet;
public class ShardSetSearchComparator implements Comparator<ShardSet> {
@Override
public int compare(ShardSet ss1, ShardSet ss2) {
int compare = this.compare(ss1.getMethod(), ss2.getMethod());
if (compare != 0)
return compare;
return this.compare(ss1.getShards(), ss2.getShards());
}
private int compare(ShardMethodEnum method1, ShardMethodEnum method2) {
if (method1.equals(method2))
return 0;
switch (method1) {
case EXPLICIT_ID:
case EXPLICIT_ID_FALLBACK_LRIS:
return -1;
case PROPERTY:
case DATE:
switch (method2) {
case EXPLICIT_ID:
case EXPLICIT_ID_FALLBACK_LRIS:
return 1;
default:
return -1;
}
case ACL_ID:
case MOD_ACL_ID:
switch (method2) {
case EXPLICIT_ID:
case EXPLICIT_ID_FALLBACK_LRIS:
case PROPERTY:
case DATE:
return 1;
default:
return -1;
}
default:
switch (method2) {
case EXPLICIT_ID:
case EXPLICIT_ID_FALLBACK_LRIS:
case PROPERTY:
case DATE:
case ACL_ID:
case MOD_ACL_ID:
return 1;
default:
}
}
return 0;
}
private int compare(Short shards1, Short shards2) {
// the larger the shard count, the more shards that may need to be queried
// so prefer smaller shard counts
// no shard count (DB_ID_RANGE) should be treated as the worst (unlimited)
if (shards1 == null && shards2 == null) {
return 0;
} else if (shards1 == null) {
return 1;
} else if (shards2 == null) {
return -1;
} else {
return shards1.compareTo(shards2);
}
}
}

View File

@@ -0,0 +1,109 @@
inteligr8.asie.registerUnknownShardDisabled=false
inteligr8.asie.offlineIdleShardInSeconds=120
inteligr8.asie.forgetOfflineShardInSeconds=86400
# we don't want items expiring out of the following caches
# an evicition policy of NONE disables the maxItems limits
# Overrides of alfresco-repository.jar/alfresco/caches.properties
cache.asieShardsetsSharedCache.tx.maxItems=65536
cache.asieShardsetsSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieShardsetsSharedCache.maxItems=65536
cache.asieShardsetsSharedCache.timeToLiveSeconds=0
cache.asieShardsetsSharedCache.maxIdleSeconds=0
cache.asieShardsetsSharedCache.cluster.type=fully-distributed
cache.asieShardsetsSharedCache.backup-count=1
cache.asieShardsetsSharedCache.eviction-policy=NONE
cache.asieShardsetsSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieShardsetsSharedCache.readBackupData=false
cache.asieNodesSharedCache.tx.maxItems=65536
cache.asieNodesSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieNodesSharedCache.maxItems=65536
cache.asieNodesSharedCache.timeToLiveSeconds=0
cache.asieNodesSharedCache.maxIdleSeconds=0
cache.asieNodesSharedCache.cluster.type=fully-distributed
cache.asieNodesSharedCache.backup-count=1
cache.asieNodesSharedCache.eviction-policy=NONE
cache.asieNodesSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieNodesSharedCache.readBackupData=false
cache.asieShardNodesSharedCache.tx.maxItems=65536
cache.asieShardNodesSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieShardNodesSharedCache.maxItems=65536
cache.asieShardNodesSharedCache.timeToLiveSeconds=0
cache.asieShardNodesSharedCache.maxIdleSeconds=0
cache.asieShardNodesSharedCache.cluster.type=fully-distributed
cache.asieShardNodesSharedCache.backup-count=1
cache.asieShardNodesSharedCache.eviction-policy=NONE
cache.asieShardNodesSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieShardNodesSharedCache.readBackupData=false
cache.asieShardInstanceStateSharedCache.tx.maxItems=65536
cache.asieShardInstanceStateSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieShardInstanceStateSharedCache.maxItems=65536
cache.asieShardInstanceStateSharedCache.timeToLiveSeconds=0
cache.asieShardInstanceStateSharedCache.maxIdleSeconds=0
cache.asieShardInstanceStateSharedCache.cluster.type=fully-distributed
cache.asieShardInstanceStateSharedCache.backup-count=1
cache.asieShardInstanceStateSharedCache.eviction-policy=NONE
cache.asieShardInstanceStateSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieShardInstanceStateSharedCache.readBackupData=false
cache.asieNodeDisabledSharedCache.tx.maxItems=65536
cache.asieNodeDisabledSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieNodeDisabledSharedCache.maxItems=65536
cache.asieNodeDisabledSharedCache.timeToLiveSeconds=0
cache.asieNodeDisabledSharedCache.maxIdleSeconds=0
cache.asieNodeDisabledSharedCache.cluster.type=fully-distributed
cache.asieNodeDisabledSharedCache.backup-count=1
cache.asieNodeDisabledSharedCache.eviction-policy=NONE
cache.asieNodeDisabledSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieNodeDisabledSharedCache.readBackupData=false
cache.asieNodeUnavailableSharedCache.tx.maxItems=65536
cache.asieNodeUnavailableSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieNodeUnavailableSharedCache.maxItems=65536
cache.asieNodeUnavailableSharedCache.timeToLiveSeconds=0
cache.asieNodeUnavailableSharedCache.maxIdleSeconds=0
cache.asieNodeUnavailableSharedCache.cluster.type=fully-distributed
cache.asieNodeUnavailableSharedCache.backup-count=1
cache.asieNodeUnavailableSharedCache.eviction-policy=NONE
cache.asieNodeUnavailableSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieNodeUnavailableSharedCache.readBackupData=false
cache.asieShardInstanceDisabledSharedCache.tx.maxItems=65536
cache.asieShardInstanceDisabledSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieShardInstanceDisabledSharedCache.maxItems=65536
cache.asieShardInstanceDisabledSharedCache.timeToLiveSeconds=0
cache.asieShardInstanceDisabledSharedCache.maxIdleSeconds=0
cache.asieShardInstanceDisabledSharedCache.cluster.type=fully-distributed
cache.asieShardInstanceDisabledSharedCache.backup-count=1
cache.asieShardInstanceDisabledSharedCache.eviction-policy=NONE
cache.asieShardInstanceDisabledSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieShardInstanceDisabledSharedCache.readBackupData=false
cache.asieShardInstanceUnavailableSharedCache.tx.maxItems=65536
cache.asieShardInstanceUnavailableSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieShardInstanceUnavailableSharedCache.maxItems=65536
cache.asieShardInstanceUnavailableSharedCache.timeToLiveSeconds=0
cache.asieShardInstanceUnavailableSharedCache.maxIdleSeconds=0
cache.asieShardInstanceUnavailableSharedCache.cluster.type=fully-distributed
cache.asieShardInstanceUnavailableSharedCache.backup-count=1
cache.asieShardInstanceUnavailableSharedCache.eviction-policy=NONE
cache.asieShardInstanceUnavailableSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieShardInstanceUnavailableSharedCache.readBackupData=false
cache.asieCoreExplicitSharedCache.tx.maxItems=65536
cache.asieCoreExplicitSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieCoreExplicitSharedCache.maxItems=65536
cache.asieCoreExplicitSharedCache.timeToLiveSeconds=0
cache.asieCoreExplicitSharedCache.maxIdleSeconds=0
cache.asieCoreExplicitSharedCache.cluster.type=fully-distributed
cache.asieCoreExplicitSharedCache.backup-count=1
cache.asieCoreExplicitSharedCache.eviction-policy=NONE
cache.asieCoreExplicitSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieCoreExplicitSharedCache.readBackupData=false

View File

@@ -0,0 +1,50 @@
<?xml version='1.0' encoding='UTF-8'?>
<!-- Use this file for beans to be loaded in whatever order Alfresco/Spring decides -->
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd">
<bean name="asieShardsetsCache" factory-bean="cacheFactory" factory-method="createCache">
<constructor-arg value="cache.asieShardsetsSharedCache" />
</bean>
<bean name="asieNodesCache" factory-bean="cacheFactory" factory-method="createCache">
<constructor-arg value="cache.asieNodesSharedCache" />
</bean>
<bean name="asieShardNodesCache" factory-bean="cacheFactory" factory-method="createMultiValueCache">
<constructor-arg value="cache.asieShardNodesSharedCache" />
</bean>
<bean name="asieShardInstanceStateCache" factory-bean="cacheFactory" factory-method="createCache">
<constructor-arg value="cache.asieShardInstanceStateSharedCache" />
</bean>
<bean name="asieNodeDisabledCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
<constructor-arg value="cache.asieNodeDisabledSharedCache" />
<constructor-arg value="java.util.HashSet" />
</bean>
<bean name="asieNodeUnavailableCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
<constructor-arg value="cache.asieNodeUnavailableSharedCache" />
<constructor-arg value="java.util.HashSet" />
</bean>
<bean name="asieShardInstanceDisabledCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
<constructor-arg value="cache.asieShardInstanceDisabledSharedCache" />
<constructor-arg value="java.util.HashSet" />
</bean>
<bean name="asieShardInstanceUnavailableCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
<constructor-arg value="cache.asieShardInstanceUnavailableSharedCache" />
<constructor-arg value="java.util.HashSet" />
</bean>
<bean name="asieCoreExplicitCache" factory-bean="cacheFactory" factory-method="createCache">
<constructor-arg value="cache.asieCoreExplicitSharedCache" />
</bean>
</beans>

View File

@@ -0,0 +1,11 @@
module.id=com_inteligr8_alfresco_${project.artifactId}
module.aliases=
module.title=${project.name}
module.description=${project.description}
module.version=${module.version}
module.repo.version.min=23.0
# this is creating all sorts of problems; probably because of the non-standard versioning
module.depends.com.inteligr8.alfresco.cachext-platform-module=*
module.depends.com.inteligr8.alfresco.cxf-jaxrs-platform-module=*

View File

@@ -0,0 +1,146 @@
package com.inteligr8.alfresco.asie;
import java.util.ArrayList;
import java.util.List;
import org.alfresco.repo.search.impl.parsers.FTSLexer;
import org.alfresco.repo.search.impl.parsers.FTSParser;
import org.alfresco.service.cmr.search.SearchParameters.Operator;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.Tree;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
public class QueryConstraintUnitTest {
private static final ObjectMapper om = new ObjectMapper();
@BeforeClass
public static void init() {
SimpleModule module = new SimpleModule();
module.addSerializer(Tree.class, new TreeSerializer());
om.registerModule(module);
}
@Test
public void testSingleExactTerm() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("=@cm:title:test", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
tree = this.validateChildren(tree, "CONJUNCTION");
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "EXACT_TERM", "test");
tree = this.validateChildren(tree, "FIELD_REF", "title");
this.validate(tree, "PREFIX", "cm");
}
@Test
public void testSingleFuzzyTerm() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("@cm:title:test", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
tree = this.validateChildren(tree, "CONJUNCTION");
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "TERM", "test");
tree = this.validateChildren(tree, "FIELD_REF", "title");
this.validate(tree, "PREFIX", "cm");
}
@Test
public void testSingleFuzzyString() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("@cm:title:'testing'", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
tree = this.validateChildren(tree, "CONJUNCTION");
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "PHRASE", "'testing'");
tree = this.validateChildren(tree, "FIELD_REF", "title");
this.validate(tree, "PREFIX", "cm");
}
@Test
public void testSingleFuzzyStringDoubleQuotes() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("cm:title:\"testing\"", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
tree = this.validateChildren(tree, "CONJUNCTION");
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "PHRASE", "\"testing\"");
tree = this.validateChildren(tree, "FIELD_REF", "title");
this.validate(tree, "PREFIX", "cm");
}
@Test
public void testSingleRange() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("@cm:created:[NOW TO '2025-01-01T00:00:00'>", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
tree = this.validateChildren(tree, "CONJUNCTION");
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "RANGE", "INCLUSIVE", "NOW", "'2025-01-01T00:00:00'", "EXCLUSIVE");
tree = this.validateChildren(tree, "FIELD_REF", "created");
this.validate(tree, "PREFIX", "cm");
}
@Test
public void testTwoTerms() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("=@cm:title:test1 AND @cm:author:test2", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
List<Tree> trees = this.validateChildren(tree, "CONJUNCTION", 2);
tree = trees.get(0);
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "EXACT_TERM", "test1");
tree = this.validateChildren(tree, "FIELD_REF", "title");
this.validate(tree, "PREFIX", "cm");
tree = trees.get(1);
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "TERM", "test2");
tree = this.validateChildren(tree, "FIELD_REF", "author");
this.validate(tree, "PREFIX", "cm");
}
protected void validate(Tree tree, String text, String... extraValues) {
Assert.assertNotNull(tree);
Assert.assertEquals(text, tree.getText());
Assert.assertEquals(extraValues.length, tree.getChildCount());
for (int c = 0; c < extraValues.length; c++)
Assert.assertEquals(extraValues[c], tree.getChild(c).getText());
}
protected Tree validateChildren(Tree tree, String text, String... extraValues) {
Assert.assertNotNull(tree);
Assert.assertEquals(text, tree.getText());
Assert.assertEquals(extraValues.length + 1, tree.getChildCount());
for (int c = 0; c < extraValues.length; c++)
Assert.assertEquals(extraValues[c], tree.getChild(c).getText());
return tree.getChild(extraValues.length);
}
protected List<Tree> validateChildren(Tree tree, String text, int count) {
Assert.assertNotNull(tree);
Assert.assertEquals(text, tree.getText());
Assert.assertEquals(count, tree.getChildCount());
List<Tree> children = new ArrayList<>();
for (int c = 0; c < tree.getChildCount(); c++)
children.add(tree.getChild(c));
return children;
}
protected Tree parseFts(String ftsQuery, Operator defaultOperator) throws RecognitionException, JsonProcessingException {
CharStream cs = new ANTLRStringStream(ftsQuery);
FTSLexer lexer = new FTSLexer(cs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
FTSParser parser = new FTSParser(tokens);
parser.setDefaultFieldConjunction(defaultOperator.equals(Operator.AND));
parser.setMode(defaultOperator.equals(Operator.AND) ? FTSParser.Mode.DEFAULT_CONJUNCTION : FTSParser.Mode.DEFAULT_DISJUNCTION);
CommonTree tree = (CommonTree) parser.ftsQuery().getTree();
return tree;
}
}

View File

@@ -0,0 +1,44 @@
package com.inteligr8.alfresco.asie;
import java.io.IOException;
import org.antlr.runtime.tree.Tree;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
public class TreeSerializer extends StdSerializer<Tree> {
private static final long serialVersionUID = -2714782538361726878L;
public TreeSerializer() {
super(Tree.class);
}
public TreeSerializer(Class<Tree> type) {
super(type);
}
public TreeSerializer(JavaType type) {
super(type);
}
@Override
public void serialize(Tree value, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
if (value.getText() != null)
gen.writeStringField("text", value.getText());
if (value.getChildCount() > 0) {
gen.writeArrayFieldStart("children");
for (int c = 0; c < value.getChildCount(); c++)
gen.writeObject(value.getChild(c));
gen.writeEndArray();
}
gen.writeEndObject();
}
}

Binary file not shown.

View File

@@ -6,7 +6,7 @@
<parent>
<groupId>com.inteligr8.alfresco</groupId>
<artifactId>asie-platform-module-parent</artifactId>
<version>1.1-SNAPSHOT</version>
<version>1.2-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>
@@ -16,8 +16,12 @@
<name>ASIE Platform Module for ACS Enterprise</name>
<properties>
<alfresco.sdk.version>5.2.0</alfresco.sdk.version>
<alfresco.sdk.version>4.9.0</alfresco.sdk.version>
<alfresco.platform.version>23.3.0</alfresco.platform.version>
<alfresco.platform.war.version>23.3.0.98</alfresco.platform.war.version>
<tomcat-rad.version>10-2.1</tomcat-rad.version>
<beedk.rad.acs-search.enabled>true</beedk.rad.acs-search.enabled>
</properties>
<dependencyManagement>
@@ -80,7 +84,7 @@
<groupId>com.inteligr8.alfresco</groupId>
<artifactId>cxf-jaxrs-platform-module</artifactId>
<version>1.3.1-acs-v23.3</version>
<scope>provided</scope>
<type>amp</type>
</dependency>
<!-- Including for testing purposes only -->

View File

@@ -1,22 +1,22 @@
#!/bin/sh
discoverArtifactId() {
ARTIFACT_ID=`mvn -q -Dexpression=project.artifactId -DforceStdout help:evaluate`
ARTIFACT_ID=`mvn -q -Dexpression=project.artifactId -DforceStdout help:evaluate | sed 's/\x1B\[[0-9;]\{1,\}[A-Za-z]//g'`
}
rebuild() {
echo "Rebuilding project ..."
mvn process-classes
mvn process-test-classes
}
start() {
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
mvn -Drad process-classes
mvn -Drad process-test-classes
}
start_log() {
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
mvn -Drad -Ddocker.showLogs process-classes
mvn -Drad -Ddocker.showLogs process-test-classes
}
stop() {

View File

@@ -9,6 +9,7 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.alfresco.repo.index.shard.Shard;
import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.service.cmr.attributes.AttributeService;
import org.alfresco.service.cmr.attributes.AttributeService.AttributeQueryCallback;
@@ -23,10 +24,11 @@ import org.springframework.http.HttpStatus;
import com.inteligr8.alfresco.asie.Constants;
import com.inteligr8.alfresco.asie.api.CoreAdminApi;
import com.inteligr8.alfresco.asie.model.NodeParameterSet;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.rest.AbstractAsieNodeWebScript;
import com.inteligr8.alfresco.asie.service.ShardBackupService;
import com.inteligr8.alfresco.asie.spi.ShardStateService;
import com.inteligr8.alfresco.asie.rest.model.NodeParameterSet;
import com.inteligr8.alfresco.asie.spi.ShardBackupService;
import com.inteligr8.alfresco.asie.enterprise.service.ShardStateService;
import com.inteligr8.solr.model.CoreMetadata;
import com.inteligr8.solr.model.core.StatusRequest;
import com.inteligr8.solr.model.core.StatusResponse;
@@ -99,7 +101,9 @@ public abstract class AbstractUnregisterNodeWebScript<T extends NodeParameterSet
}
} finally {
this.sss.remove(matchingCore.getKey());
this.sbs.forget(shardNode);
Shard shard = shardNode.getShardInstance().getShard();
this.sbs.forget(ShardSet.from(shard.getFloc(), shardNode), shard.getInstance());
}
}
} finally {

View File

@@ -4,8 +4,8 @@ import org.alfresco.repo.index.shard.ShardState;
import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.model.NodeShardParameterSet;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.rest.model.NodeShardParameterSet;
@Component(value = "webscript.com.inteligr8.alfresco.asie.nodeShard.delete")
public class UnloadNodeShardWebScript extends AbstractUnregisterNodeWebScript<NodeShardParameterSet> {
@@ -20,7 +20,7 @@ public class UnloadNodeShardWebScript extends AbstractUnregisterNodeWebScript<No
@Override
protected boolean matches(NodeShardParameterSet params, ShardState shardState) {
if (!params.getShardSet().isFor(shardState))
if (!params.getShardSet().contains(shardState))
return false;
if (params.getShardId() != shardState.getShardInstance().getShard().getInstance())
return false;

View File

@@ -3,7 +3,7 @@ package com.inteligr8.alfresco.asie.enterprise.rest;
import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.model.NodeParameterSet;
import com.inteligr8.alfresco.asie.rest.model.NodeParameterSet;
@Component(value = "webscript.com.inteligr8.alfresco.asie.node.delete")
public class UnloadNodeWebScript extends AbstractUnregisterNodeWebScript<NodeParameterSet> {

View File

@@ -0,0 +1,404 @@
package com.inteligr8.alfresco.asie.enterprise.service;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.alfresco.repo.index.shard.Floc;
import org.alfresco.repo.index.shard.Shard;
import org.alfresco.repo.index.shard.ShardInstance;
import org.alfresco.repo.index.shard.ShardMethodEnum;
import org.alfresco.repo.index.shard.ShardRegistry;
import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.util.Pair;
import org.alfresco.util.collections.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.Constants;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
@Component
public class ShardDiscoveryService implements com.inteligr8.alfresco.asie.spi.ShardDiscoveryService {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
@Qualifier(Constants.QUALIFIER_ASIE)
private ShardRegistry shardRegistry;
@Override
public ShardSet findSetByCore(String core) {
Map<Floc, Map<Shard, Set<ShardState>>> flocs = this.shardRegistry.getFlocs();
if (flocs.isEmpty())
return null;
this.logger.trace("Found {} shard sets", flocs.size());
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : flocs.entrySet()) {
for (Entry<Shard, Set<ShardState>> flocShard : floc.getValue().entrySet()) {
for (ShardState shardState : flocShard.getValue()) {
ShardSet shardSet = ShardSet.from(floc.getKey(), shardState);
if (shardSet.getCore().equals(core))
return shardSet;
}
}
}
return null;
}
@Override
public SolrHost findNode(String nodeHostname, int nodePort) {
Map<Floc, Map<Shard, Set<ShardState>>> flocs = this.shardRegistry.getFlocs();
if (flocs.isEmpty())
return null;
this.logger.trace("Found {} shard sets", flocs.size());
Map<String, InetAddress> resolvedAddresses = new HashMap<>();
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : flocs.entrySet()) {
for (Entry<Shard, Set<ShardState>> flocShard : floc.getValue().entrySet()) {
for (ShardState shardState : flocShard.getValue()) {
ShardInstance shardInstance = shardState.getShardInstance();
if (!nodeHostname.equalsIgnoreCase(shardInstance.getHostName())) {
if (!resolvedAddresses.containsKey(nodeHostname))
resolvedAddresses.put(nodeHostname, this.resolve(nodeHostname));
InetAddress nodeAddress = resolvedAddresses.get(nodeHostname);
this.logger.trace("Resolved: {} => {}", nodeHostname, nodeAddress);
if (nodeAddress == null)
continue;
if (!resolvedAddresses.containsKey(shardInstance.getHostName()))
resolvedAddresses.put(shardInstance.getHostName(), this.resolve(shardInstance.getHostName()));
InetAddress shardInstanceAddress = resolvedAddresses.get(shardInstance.getHostName());
this.logger.trace("Resolved: {} => {}", shardInstance.getHostName(), shardInstanceAddress);
if (!nodeAddress.equals(shardInstanceAddress))
continue;
}
if (nodePort == shardInstance.getPort()) {
SolrHost node = SolrHost.from(shardInstance);
this.logger.debug("Found node: {}", node);
return node;
}
}
}
}
return null;
}
@Override
public Map<ShardSet, Map<Integer, ShardInstanceState>> findByNode(SolrHost node) {
Map<Floc, Map<Shard, Set<ShardState>>> flocs = this.shardRegistry.getFlocs();
if (flocs.isEmpty())
return Collections.emptyMap();
this.logger.trace("Found {} shard sets", flocs.size());
Map<ShardSet, Map<Integer, ShardInstanceState>> setShardStates = new HashMap<>();
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : flocs.entrySet())
setShardStates.putAll(this.findByNode(node, floc.getKey(), floc.getValue()));
return setShardStates;
}
private Map<ShardSet, Map<Integer, ShardInstanceState>> findByNode(SolrHost node, Floc floc, Map<Shard, Set<ShardState>> shards) {
ShardSet shardSet = null;
Map<ShardSet, Map<Integer, ShardInstanceState>> setShardStates = new HashMap<>();
int shardStateCount = 0;
for (Entry<Shard, Set<ShardState>> flocShard : shards.entrySet()) {
for (ShardState shardState : flocShard.getValue()) {
if (shardSet == null)
shardSet = ShardSet.from(floc, shardState);
ShardInstance shardInstance = shardState.getShardInstance();
if (node.equals(SolrHost.from(shardInstance))) {
Map<Integer, ShardInstanceState> shardStates = setShardStates.get(shardSet);
if (shardStates == null)
setShardStates.put(shardSet, shardStates = new HashMap<>());
shardStates.put(flocShard.getKey().getInstance(), ShardInstanceState.from(shardState));
shardStateCount++;
}
}
}
this.logger.debug("Found {} shard states for node: {}", shardStateCount, node);
return setShardStates;
}
@Override
public Set<ShardSet> findSetsByShardMethod(ShardMethodEnum... shardMethods) {
Map<Floc, Map<Shard, Set<ShardState>>> flocs = this.shardRegistry.getFlocs();
if (flocs.isEmpty())
return Collections.emptySet();
this.logger.trace("Found {} shard sets", flocs.size());
Set<ShardMethodEnum> shardMethodSet = CollectionUtils.asSet(shardMethods);
Set<ShardSet> shardSets = new HashSet<>();
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : flocs.entrySet()) {
if (shardMethodSet.contains(floc.getKey().getShardMethod())) {
ShardState shardState = this.extractAnyShardState(floc.getValue());
shardSets.add(ShardSet.from(floc.getKey(), shardState));
}
}
this.logger.debug("Found {} shard sets of methods: {}", flocs.size(), shardMethods);
return shardSets;
}
@Override
public Set<SolrHost> findNodes(ShardSet shardSet) {
Map<Floc, Map<Shard, Set<ShardState>>> flocs = this.shardRegistry.getFlocs();
if (flocs.isEmpty())
return Collections.emptySet();
this.logger.trace("Found {} shard sets", flocs.size());
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : flocs.entrySet()) {
Set<SolrHost> nodes = this.findNodes(shardSet, null, floc.getKey(), floc.getValue());
if (nodes != null) {
this.logger.debug("Found {} nodes for set: {}", nodes.size(), shardSet);
return nodes;
}
}
this.logger.debug("Found {} nodes for set: {}", 0, shardSet);
return Collections.emptySet();
}
@Override
public Set<SolrHost> findNodesByShard(ShardSet shardSet, int shardId) {
Map<Floc, Map<Shard, Set<ShardState>>> flocs = this.shardRegistry.getFlocs();
if (flocs.isEmpty())
return Collections.emptySet();
this.logger.trace("Found {} shard sets", flocs.size());
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : flocs.entrySet()) {
Set<SolrHost> nodes = this.findNodes(shardSet, shardId, floc.getKey(), floc.getValue());
if (nodes != null) {
this.logger.debug("Found {} nodes for shard #{} in set: {}", nodes.size(), shardId, shardSet);
return nodes;
}
}
this.logger.debug("Found {} nodes for shard #{} in set: {}", 0, shardId, shardSet);
return Collections.emptySet();
}
private Set<SolrHost> findNodes(ShardSet shardSet, Integer shardId, Floc floc, Map<Shard, Set<ShardState>> shards) {
Set<SolrHost> nodes = new HashSet<>();
boolean checked = false;
for (Entry<Shard, Set<ShardState>> shard : shards.entrySet()) {
if (shardId == null || shardId.intValue() == shard.getKey().getInstance()) {
for (ShardState shardState : shard.getValue()) {
if (!checked && !shardSet.equals(ShardSet.from(floc, shardState)))
return null;
checked = true;
ShardInstance shardInstance = shardState.getShardInstance();
SolrHost node = SolrHost.from(shardInstance);
nodes.add(node);
}
}
}
return nodes;
}
@Override
public Map<Integer, Pair<SolrHost, ShardInstanceState>> findLatestNodeStates(ShardSet shardSet) {
Map<Floc, Map<Shard, Set<ShardState>>> flocs = this.shardRegistry.getFlocs();
if (flocs.isEmpty())
return Collections.emptyMap();
this.logger.trace("Found {} shard sets", flocs.size());
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : flocs.entrySet()) {
Map<Integer, Pair<SolrHost, ShardInstanceState>> shardNodeStates = this.findLatestNodeStates(shardSet, floc.getKey(), floc.getValue());
if (shardNodeStates != null) {
this.logger.debug("Found {} shard node states for set: {}", shardNodeStates.size(), shardSet);
return shardNodeStates;
}
}
this.logger.debug("Found {} shard node states for set: {}", 0, shardSet);
return Collections.emptyMap();
}
private Map<Integer, Pair<SolrHost, ShardInstanceState>> findLatestNodeStates(ShardSet shardSet, Floc floc, Map<Shard, Set<ShardState>> shards) {
Map<Integer, Pair<SolrHost, ShardInstanceState>> shardNodeStates = new HashMap<>();
boolean checked = false;
com.inteligr8.alfresco.asie.spi.ShardDiscoveryService.ShardedNodeShardStateComparator comparator = new com.inteligr8.alfresco.asie.spi.ShardDiscoveryService.ShardedNodeShardStateComparator();
for (Entry<Shard, Set<ShardState>> shard : shards.entrySet()) {
int shardId = shard.getKey().getInstance();
for (ShardState shardState : shard.getValue()) {
if (!checked && !shardSet.equals(ShardSet.from(floc, shardState)))
return null;
checked = true;
ShardInstance shardInstance = shardState.getShardInstance();
SolrHost node = SolrHost.from(shardInstance);
ShardInstanceState nodeShardState = ShardInstanceState.from(shardState);
Pair<SolrHost, ShardInstanceState> pair = new Pair<>(node, nodeShardState);
if (comparator.compare(pair, shardNodeStates.get(shardId)) < 0)
shardNodeStates.put(shardId, pair);
}
}
return shardNodeStates;
}
@Override
public List<Pair<SolrHost, ShardInstanceState>> findNodeStatesByShard(ShardSet shardSet, int shardId) {
Map<Floc, Map<Shard, Set<ShardState>>> flocs = this.shardRegistry.getFlocs();
if (flocs.isEmpty())
return Collections.emptyList();
this.logger.trace("Found {} shard sets", flocs.size());
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : flocs.entrySet()) {
List<Pair<SolrHost, ShardInstanceState>> nodeStates = this.findNodeStates(shardSet, shardId, floc.getKey(), floc.getValue());
if (nodeStates != null) {
this.logger.debug("Found {} node states for shard #{} in set: {}", nodeStates.size(), shardId, shardSet);
return nodeStates;
}
}
this.logger.debug("Found {} node states for shard #{} in set: {}", 0, shardId, shardSet);
return Collections.emptyList();
}
private List<Pair<SolrHost, ShardInstanceState>> findNodeStates(ShardSet shardSet, Integer shardId, Floc floc, Map<Shard, Set<ShardState>> shards) {
List<Pair<SolrHost, ShardInstanceState>> nodeStates = new LinkedList<>();
boolean checked = false;
for (Entry<Shard, Set<ShardState>> shard : shards.entrySet()) {
if (shardId == null || shardId.intValue() == shard.getKey().getInstance()) {
for (ShardState shardState : shard.getValue()) {
if (!checked && !shardSet.equals(ShardSet.from(floc, shardState)))
return null;
checked = true;
ShardInstance shardInstance = shardState.getShardInstance();
SolrHost node = SolrHost.from(shardInstance);
ShardInstanceState nodeShardState = ShardInstanceState.from(shardState);
nodeStates.add(new Pair<>(node, nodeShardState));
}
}
}
return nodeStates;
}
@Override
public Set<Integer> findIdsByNode(ShardSet shardSet, SolrHost node) {
Map<Floc, Map<Shard, Set<ShardState>>> flocs = this.shardRegistry.getFlocs();
if (flocs.isEmpty())
return Collections.emptySet();
this.logger.trace("Found {} shard sets", flocs.size());
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : flocs.entrySet()) {
Set<Integer> shardIds = this.findIdsByNode(shardSet, node, floc.getKey(), floc.getValue());
if (shardIds != null) {
this.logger.debug("Found {} shards for node '{}' in set: {}", shardIds.size(), node, shardSet);
return shardIds;
}
}
this.logger.debug("Found {} node states for node '{}' in set: {}", 0, node, shardSet);
return Collections.emptySet();
}
private Set<Integer> findIdsByNode(ShardSet shardSet, SolrHost node, Floc floc, Map<Shard, Set<ShardState>> shards) {
Set<Integer> shardIds = new HashSet<>();
boolean checked = false;
for (Entry<Shard, Set<ShardState>> shard : shards.entrySet()) {
for (ShardState shardState : shard.getValue()) {
if (!checked && !shardSet.equals(ShardSet.from(floc, shardState)))
return null;
checked = true;
ShardInstance shardInstance = shardState.getShardInstance();
if (node.equals(SolrHost.from(shardInstance)))
shardIds.add(shard.getKey().getInstance());
}
}
return shardIds;
}
@Override
public Map<Integer, ShardInstanceState> findStatesByNode(ShardSet shardSet, SolrHost node) {
Map<Floc, Map<Shard, Set<ShardState>>> flocs = this.shardRegistry.getFlocs();
if (flocs.isEmpty())
return Collections.emptyMap();
this.logger.trace("Found {} shard sets", flocs.size());
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : flocs.entrySet()) {
Map<Integer, ShardInstanceState> shardStates = this.findStatesByNode(shardSet, node, floc.getKey(), floc.getValue());
if (shardStates != null) {
this.logger.debug("Found {} shard states for node '{}' in set: {}", shardStates.size(), node, shardSet);
return shardStates;
}
}
this.logger.debug("Found {} shard states for node '{}' in set: {}", 0, node, shardSet);
return Collections.emptyMap();
}
private Map<Integer, ShardInstanceState> findStatesByNode(ShardSet shardSet, SolrHost node, Floc floc, Map<Shard, Set<ShardState>> shards) {
Map<Integer, ShardInstanceState> shardStates = new HashMap<>();
boolean checked = false;
for (Entry<Shard, Set<ShardState>> shard : shards.entrySet()) {
for (ShardState shardState : shard.getValue()) {
if (!checked && !shardSet.equals(ShardSet.from(floc, shardState)))
return null;
checked = true;
ShardInstance shardInstance = shardState.getShardInstance();
if (node.equals(SolrHost.from(shardInstance)))
shardStates.put(shard.getKey().getInstance(), ShardInstanceState.from(shardState));
}
}
return shardStates;
}
private ShardState extractAnyShardState(Map<Shard, Set<ShardState>> shards) {
if (shards.isEmpty())
return null;
for (Set<ShardState> shardStates : shards.values())
for (ShardState shardState : shardStates)
return shardState;
return null;
}
private InetAddress resolve(String hostname) {
try {
return InetAddress.getByName(hostname);
} catch (UnknownHostException uhe) {
return null;
}
}
}

View File

@@ -34,7 +34,8 @@ public class ShardStateService implements com.inteligr8.alfresco.asie.spi.ShardS
@Autowired
@Qualifier(Constants.BEAN_SHARD_GUID_CACHE)
private SimpleCache<ShardInstance, String> shardToGuidCache;
@Override
public void clear() {
this.logger.info("Removing all nodes/shards from the shard registry");
@@ -76,7 +77,7 @@ public class ShardStateService implements com.inteligr8.alfresco.asie.spi.ShardS
this.shardToGuidCache.remove(shardState.getShardInstance());
}
}
public void iterate(AttributeQueryCallback callback) {
this.attrService.getAttributes(callback, EnterpriseConstants.ATTR_SHARD_STATE);
}

View File

@@ -5,7 +5,7 @@
<groupId>com.inteligr8.alfresco</groupId>
<artifactId>asie-platform-module-parent</artifactId>
<version>1.1-SNAPSHOT</version>
<version>1.2-SNAPSHOT</version>
<packaging>pom</packaging>
<name>ASIE Platform Module Parent</name>
@@ -72,6 +72,7 @@
<module>asie-api</module>
<module>shared</module>
<module>enterprise-module</module>
<module>community-module</module>
</modules>
<profiles>

View File

@@ -6,7 +6,7 @@
<parent>
<groupId>com.inteligr8.alfresco</groupId>
<artifactId>asie-platform-module-parent</artifactId>
<version>1.1-SNAPSHOT</version>
<version>1.2-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>
@@ -16,7 +16,7 @@
<name>ASIE Shared Library for Platform Modules</name>
<properties>
<alfresco.sdk.version>5.2.0</alfresco.sdk.version>
<alfresco.sdk.version>4.9.0</alfresco.sdk.version>
<alfresco.platform.version>23.3.0</alfresco.platform.version>
</properties>

View File

@@ -6,20 +6,15 @@ public interface Constants {
static final String QUALIFIER_ASIE = "asie";
// OOTB
// defined OOTB
static final String BEAN_SHARD_STATE_CACHE = "shardStateCache";
static final String BEAN_SHARD_GUID_CACHE = "shardToGuidCache";
static final String BEAN_OFFILINE_SHARD_STATE_CACHE = "offlineShardStateCache";
static final String BEAN_CORE_EXPLICIT_CACHE = "coreExplicitIdCache";
static final String BEAN_SHARD_REGISTRY = "asie.ShardRegistry";
static final String BEAN_OBJECT_MAPPER = "asie.ObjectMapper";
static final String BEAN_ATTRIBUTE_SERVICE = "asie.AttributeService";
static final String BEAN_SHARD_REGISTRY = "asie.ShardRegistry";
static final String ATTR_ASIE = "inteligr8.asie";
static final String ATTR_ASIE_NODE_SHARD = "inteligr8.asie.nodeShard";
static final String ATTR_STATE = "state";
static final String ATTR_ONLINE = "online";
static final String ATTR_UNLOADED = "unloadedNode.cores";
}

View File

@@ -0,0 +1,48 @@
package com.inteligr8.alfresco.asie;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Arrays;
public class SimpleCaster {
public static <T> T transform(String str, Class<T> returnType) {
if (str == null)
return null;
if (returnType.isAssignableFrom(String.class)) {
@SuppressWarnings("unchecked")
T t = (T) str;
return t;
}
try {
Constructor<T> constructor = returnType.getConstructor(String.class);
return constructor.newInstance(str);
} catch (NoSuchMethodException | IllegalAccessException | InstantiationException | InvocationTargetException e) {
// suppress
}
for (String staticMethod : Arrays.asList("from", "valueOf")) {
try {
return invoke(returnType, staticMethod, str);
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
// suppress
}
}
throw new IllegalArgumentException();
}
private static <T> T invoke(Class<T> returnType, String staticMethodName, Object... arguments) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method method = returnType.getDeclaredMethod(staticMethodName, String.class);
if (!returnType.isAssignableFrom(method.getReturnType()))
throw new NoSuchMethodException();
@SuppressWarnings("unchecked")
T t = (T) method.invoke(null, arguments);
return t;
}
}

View File

@@ -1,56 +0,0 @@
package com.inteligr8.alfresco.asie.model;
import java.io.Serializable;
import org.alfresco.repo.index.shard.ShardInstance;
public class Node implements Serializable {
private static final long serialVersionUID = -8834744746109388928L;
private final String id;
private final ShardInstance shardNode;
public Node(ShardInstance shardNode) {
this.shardNode = shardNode;
this.id = this.getHostname() + ":" + this.getPort() + this.getPath();
}
public String getId() {
return this.id;
}
public String getHostname() {
return this.shardNode.getHostName();
}
public int getPort() {
return this.shardNode.getPort();
}
public String getPath() {
// baseUrl is to the shard; we want to the node, so exclude the core
int lastSlash = this.shardNode.getBaseUrl().lastIndexOf('/');
return this.shardNode.getBaseUrl().substring(0, lastSlash);
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Node))
return false;
Node node = (Node) obj;
return this.id.equals(node.id);
}
@Override
public int hashCode() {
return this.id.hashCode();
}
@Override
public String toString() {
return this.id;
}
}

View File

@@ -0,0 +1,36 @@
package com.inteligr8.alfresco.asie.model;
import java.io.Serializable;
public class PersistedNode implements Serializable {
private static final long serialVersionUID = 4105196543023419818L;
private final SolrHost node;
private final long persistMillis;
private long expireTimeMillis;
public PersistedNode(SolrHost node, int persistMinutes) {
this.node = node;
this.persistMillis = persistMinutes * 60L * 1000L;
this.reset();
}
public void reset() {
this.expireTimeMillis = System.currentTimeMillis() + this.persistMillis;
}
public boolean isExpired() {
return this.expireTimeMillis < System.currentTimeMillis();
}
public SolrHost getNode() {
return this.node;
}
@Override
public String toString() {
return "node: " + this.node + "; expires in: " + (System.currentTimeMillis() - this.expireTimeMillis) + " ms";
}
}

View File

@@ -1,5 +0,0 @@
package com.inteligr8.alfresco.asie.model;
public interface RequestParameterSet {
}

View File

@@ -0,0 +1,68 @@
package com.inteligr8.alfresco.asie.model;
import java.io.Serializable;
import org.alfresco.repo.index.shard.Floc;
public class Shard implements Serializable {
private static final long serialVersionUID = 5683743181748541736L;
public static Shard from(ShardSet shardSet, int shardId) {
return new Shard(shardSet, shardId);
}
public static Shard from(String spec) {
return new Shard(spec);
}
private final String spec;
protected Shard(ShardSet shardSet, int shardId) {
this.spec = shardSet.getCore() + "~" + shardId;
}
protected Shard(String spec) {
this.spec = spec;
}
public org.alfresco.repo.index.shard.Shard toAlfrescoModel(Floc floc) {
org.alfresco.repo.index.shard.Shard shard = new org.alfresco.repo.index.shard.Shard();
shard.setFloc(floc);
shard.setInstance(this.extractShardId());
return shard;
}
public String getSpec() {
return spec;
}
public String extractShardSetCore() {
int pos = this.spec.indexOf('~');
return this.spec.substring(0, pos);
}
public int extractShardId() {
int pos = this.spec.indexOf('~');
return Integer.parseInt(this.spec.substring(pos+1));
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Shard))
return false;
Shard shard = (Shard) obj;
return this.spec.equals(shard.spec);
}
@Override
public int hashCode() {
return this.spec.hashCode();
}
@Override
public String toString() {
return this.spec;
}
}

View File

@@ -0,0 +1,64 @@
package com.inteligr8.alfresco.asie.model;
import java.io.Serializable;
public class ShardInstance implements Serializable {
private static final long serialVersionUID = 7455521296197234581L;
public static ShardInstance from(Shard shard, SolrHost node) {
return new ShardInstance(shard, node);
}
private final String spec;
protected ShardInstance(Shard shard, SolrHost node) {
this.spec = shard.getSpec() + "~" + node.getSpec();
}
public org.alfresco.repo.index.shard.ShardInstance toAlfrescoModel(org.alfresco.repo.index.shard.Shard shard) {
SolrHost node = this.extractNode();
String core = shard.getFloc().getPropertyBag().get("coreName");
org.alfresco.repo.index.shard.ShardInstance shardInstance = new org.alfresco.repo.index.shard.ShardInstance();
shardInstance.setHostName(node.getHostname());
shardInstance.setPort(node.getPort());
shardInstance.setBaseUrl(node.getPath() + "/" + core + "-" + shard.getInstance());
shardInstance.setShard(shard);
return shardInstance;
}
public String getSpec() {
return spec;
}
public Shard extractShard() {
int pos = this.spec.indexOf('~');
return Shard.from(this.spec.substring(0, pos));
}
public SolrHost extractNode() {
int pos = this.spec.indexOf('~');
return SolrHost.from(this.spec.substring(pos+1));
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof ShardInstance))
return false;
ShardInstance shard = (ShardInstance) obj;
return this.spec.equals(shard.spec);
}
@Override
public int hashCode() {
return this.spec.hashCode();
}
@Override
public String toString() {
return this.spec;
}
}

View File

@@ -0,0 +1,102 @@
package com.inteligr8.alfresco.asie.model;
import java.io.Serializable;
import java.time.Instant;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.Map.Entry;
import org.alfresco.repo.index.shard.ShardInstance;
import org.alfresco.repo.index.shard.ShardState;
import org.apache.commons.lang3.builder.HashCodeBuilder;
public class ShardInstanceState implements Serializable, Comparable<ShardInstanceState> {
private static final long serialVersionUID = 2893797002689889478L;
public static ShardInstanceState from(ShardState shardState) {
return new ShardInstanceState(shardState);
}
private final OffsetDateTime lastUpdated;
private final long lastIndexedChangeSetId;
private final OffsetDateTime lastIndexedChangeSetTime;
private final long lastIndexedTxId;
private final OffsetDateTime lastIndexedTxTime;
private transient Integer hash = null;
private ShardInstanceState(ShardState shardState) {
this.lastUpdated = Instant.ofEpochMilli(shardState.getLastUpdated()).atOffset(ZoneOffset.UTC);
this.lastIndexedChangeSetId = shardState.getLastIndexedChangeSetId();
this.lastIndexedChangeSetTime = Instant.ofEpochMilli(shardState.getLastIndexedChangeSetCommitTime()).atOffset(ZoneOffset.UTC);
this.lastIndexedTxId = shardState.getLastIndexedTxId();
this.lastIndexedTxTime = Instant.ofEpochMilli(shardState.getLastIndexedTxCommitTime()).atOffset(ZoneOffset.UTC);
}
public ShardState toAlfrescoModel(ShardInstance shardInstance) {
ShardState state = new ShardState();
state.setLastIndexedChangeSetCommitTime(this.lastIndexedChangeSetTime.toInstant().toEpochMilli());
state.setLastIndexedChangeSetId(this.lastIndexedChangeSetId);
state.setLastIndexedTxCommitTime(this.lastIndexedTxTime.toInstant().toEpochMilli());
state.setLastIndexedTxId(this.lastIndexedTxId);
state.setLastUpdated(this.lastUpdated.toInstant().toEpochMilli());
state.setShardInstance(shardInstance);
for (Entry<String, String> prop : shardInstance.getShard().getFloc().getPropertyBag().entrySet())
if (prop.getKey().startsWith("shard."))
state.getPropertyBag().put(prop.getKey(), prop.getValue());
String core = shardInstance.getShard().getFloc().getPropertyBag().get("coreName");
if (core != null)
state.getPropertyBag().put("coreName", core + "-" + shardInstance.getShard().getInstance());
return state;
}
public OffsetDateTime getLastUpdated() {
return lastUpdated;
}
public long getLastIndexedChangeSetId() {
return lastIndexedChangeSetId;
}
public OffsetDateTime getLastIndexedChangeSetTime() {
return lastIndexedChangeSetTime;
}
public long getLastIndexedTxId() {
return lastIndexedTxId;
}
public OffsetDateTime getLastIndexedTxTime() {
return lastIndexedTxTime;
}
@Override
public int compareTo(ShardInstanceState o) {
return -this.lastUpdated.compareTo(o.lastUpdated);
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof ShardInstanceState))
return false;
ShardInstanceState snss = (ShardInstanceState) obj;
return this.lastIndexedChangeSetId == snss.lastIndexedChangeSetId &&
this.lastIndexedTxId == snss.lastIndexedTxId;
}
@Override
public int hashCode() {
if (this.hash == null) {
this.hash = new HashCodeBuilder()
.append(this.lastIndexedTxId)
.append(this.lastIndexedChangeSetId)
.build();
}
return this.hash.intValue();
}
}

View File

@@ -1,56 +1,194 @@
package com.inteligr8.alfresco.asie.model;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.Matcher;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Pattern;
import org.alfresco.repo.index.shard.Floc;
import org.alfresco.repo.index.shard.ShardMethodEnum;
import org.alfresco.repo.index.shard.ShardState;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.util.collections.CollectionUtils;
import org.apache.commons.collections4.map.CompositeMap;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import com.inteligr8.alfresco.asie.SimpleCaster;
public class ShardSet implements Serializable {
private static final long serialVersionUID = -8891094367429601316L;
/**
* Examples:
*
* MOD_ACL_ID
* ACL_ID
* DB_ID
* DB_ID_RANGE;range:0-20000
* DATE;key:cm:created
* DATE;key:cm:created;date.grouping:3
* PROPERTY;key:cm:created;regex:^d{4}
*/
private static final Pattern shardSetPattern = Pattern.compile("([A-Z]+)(;fulltext)?(;([a-z]+):([^;]+))?(;([a-z]+):([^;]+))?");
private static final String DEFAULT_SOLR_TEMPLATE = "rerank";
private final ShardMethodEnum method;
private final boolean hasContent;
private final Map<String, String> config;
private transient Integer hash;
public ShardSet(Floc floc, ShardState anyShardNode) {
this.method = floc.getShardMethod();
this.hasContent = floc.hasContent();
this.config = (floc.getPropertyBag().isEmpty() && anyShardNode != null) ? anyShardNode.getPropertyBag() : floc.getPropertyBag();
public static ShardSet from(Floc floc, ShardState anyShardState) {
return new ShardSet(floc, anyShardState);
}
public ShardSet(String shardSetSpec) {
Matcher matcher = shardSetPattern.matcher(shardSetSpec);
if (!matcher.find())
throw new IllegalArgumentException("The shard set '" + shardSetSpec + "' is not properly formatted");
this.method = ShardMethodEnum.valueOf(matcher.group(1));
this.hasContent = ";fulltext".equals(matcher.group(2));
this.config = new HashMap<>();
for (int g = 3; g < matcher.groupCount(); g += 3)
if (matcher.group(g) != null)
this.config.put("shard." + matcher.group(g+1), matcher.group(g+2));
public static ShardSet from(String coreName, String spec) {
return new ShardSet(coreName, spec);
}
private final String core;
private final ShardMethodEnum method;
private final boolean hasContent;
private final String template;
private final Set<StoreRef> storeRefs;
private final Short shards;
private final Pair<Long, Long> range;
private final Byte dateGrouping;
private final String prefixedProperty;
private final Pattern regex;
private transient String spec;
private ShardSet(Floc floc, ShardState anyShardNode) {
String shardCoreName = anyShardNode.getPropertyBag().get("coreName");
int lastDash = shardCoreName.lastIndexOf('-');
this.core = shardCoreName.substring(0, lastDash);
this.method = floc.getShardMethod();
this.hasContent = floc.hasContent();
this.template = floc.getTemplate();
this.storeRefs = floc.getStoreRefs();
CompositeMap<String, String> propbag = new CompositeMap<>(floc.getPropertyBag(), anyShardNode.getPropertyBag());
Short shards = null;
Pair<Long, Long> range = null;
Byte dateGrouping = null;
String prefixedProperty = null;
String regex = null;
switch (this.method) {
case DB_ID_RANGE:
range = this.strToRange(propbag.get("shard.range"));
break;
case DATE:
dateGrouping = SimpleCaster.transform(propbag.get("shard.date.grouping"), Byte.class);
case PROPERTY:
case EXPLICIT_ID:
prefixedProperty = StringUtils.trimToNull(propbag.get("shard.key"));
regex = StringUtils.trimToNull(propbag.get("shard.regex"));
default:
shards = (short) floc.getNumberOfShards();
}
this.range = range;
this.shards = shards;
this.dateGrouping = dateGrouping;
this.prefixedProperty = prefixedProperty;
this.regex = regex == null ? null : Pattern.compile(regex);
}
public Floc toAlfrescoModel() {
Floc floc = new Floc();
floc.setShardMethod(this.method);
floc.setHasContent(this.hasContent);
floc.setTemplate(this.template);
floc.setStoreRefs(new HashSet<>(this.storeRefs));
floc.getPropertyBag().put("coreName", this.core);
switch (this.method) {
case DB_ID_RANGE:
floc.getPropertyBag().put("shard.range", this.range.getLeft() + "-" + this.range.getRight());
break;
case DATE:
if (this.dateGrouping != null)
floc.getPropertyBag().put("shard.date.grouping", this.dateGrouping.toString());
case PROPERTY:
case EXPLICIT_ID:
floc.getPropertyBag().put("shard.key", this.prefixedProperty);
if (this.regex != null)
floc.getPropertyBag().put("shard.regex", this.regex.pattern());
default:
if (this.shards != null)
floc.setNumberOfShards(this.shards.intValue());
}
return floc;
}
private ShardSet(String coreName, String shardSetSpec) {
String[] parts = shardSetSpec.split("[;|_]");
if (parts.length == 0)
throw new IllegalArgumentException();
this.core = coreName;
this.method = ShardMethodEnum.valueOf(parts[0].toUpperCase());
boolean hasContent = false;
String template = DEFAULT_SOLR_TEMPLATE;
StoreRef storeRef = StoreRef.STORE_REF_WORKSPACE_SPACESSTORE;
Pair<Long, Long> range = null;
Short shards = null;
Byte dateGrouping = null;
String prefixedProperty = null;
String regex = null;
for (int i = 1; i < parts.length; i++) {
int colon = parts[i].indexOf(":");
String fieldName = colon < 0 ? parts[i] : parts[i].substring(0, colon);
String fieldValue = colon < 0 ? null : parts[i].substring(colon+1);
switch (fieldName) {
case "txt":
case "text":
case "fulltext":
case "content":
case "hasContent":
case "hasText":
hasContent = true;
break;
case "t":
case "template":
template = fieldValue;
break;
case "sref":
case "storeRef":
storeRef = new StoreRef(fieldValue);
break;
case "s":
case "shards":
shards = SimpleCaster.transform(fieldValue, Short.class);
break;
case "range":
range = this.strToRange(fieldValue);
break;
case "date.grouping":
dateGrouping = SimpleCaster.transform(fieldValue, Byte.class);
break;
case "k":
case "key":
prefixedProperty = fieldValue;
break;
case "regex":
regex = fieldValue;
break;
default:
throw new IllegalArgumentException();
}
}
this.hasContent = hasContent;
this.template = template;
this.storeRefs = CollectionUtils.asSet(storeRef);
this.shards = shards;
this.range = range;
this.dateGrouping = dateGrouping;
this.prefixedProperty = prefixedProperty;
this.regex = regex == null ? null : Pattern.compile(regex);
}
private Pair<Long, Long> strToRange(String str) {
str = StringUtils.trimToNull(str);
if (str == null)
return null;
String[] rangeValues = str.split("-");
return Pair.of(Long.valueOf(rangeValues[0]), Long.valueOf(rangeValues[1]));
}
public String getCore() {
return core;
}
public ShardMethodEnum getMethod() {
@@ -61,35 +199,70 @@ public class ShardSet implements Serializable {
return hasContent;
}
public String getTemplate() {
return template;
}
public Set<StoreRef> getStoreRefs() {
return storeRefs;
}
public Short getShards() {
return shards;
}
public Pair<Long, Long> getRange() {
return range;
}
public Byte getDateGrouping() {
return dateGrouping;
}
public String getPrefixedProperty() {
return prefixedProperty;
}
public Pattern getRegex() {
return regex;
}
public String toSpec() {
StringBuilder spec = new StringBuilder(this.method.toString());
if (this.hasContent)
spec.append(";fulltext");
for (Entry<String, String> c : this.config.entrySet()) {
if (!c.getKey().startsWith("shard."))
continue;
spec.append(';').append(c.getKey().substring(6)).append(':').append(c.getValue());
}
return spec.toString();
}
public Map<String, String> getConfig() {
return config;
}
public boolean isFor(ShardState shardState) {
return this.method.equals(shardState.getShardInstance().getShard().getFloc().getShardMethod()) &&
this.hasContent == shardState.getShardInstance().getShard().getFloc().hasContent() &&
this.isConfigurationFor(shardState.getPropertyBag());
}
public boolean isConfigurationFor(Map<String, String> propertyBag) {
for (Entry<String, String> config : this.config.entrySet()) {
if (config.getValue() == null || !config.getValue().equals(propertyBag.get(config.getKey())))
return false;
if (this.spec == null) {
StringBuilder spec = new StringBuilder(this.method.toString());
if (this.hasContent)
spec.append(";txt");
spec.append(";t:").append(this.template);
spec.append(";sref:").append(StringUtils.join(this.storeRefs, ','));
if (this.shards != null)
spec.append(";s:").append(this.shards);
if (this.range != null)
spec.append(";range:").append(this.range.getLeft()).append('-').append(this.range.getRight());
if (this.dateGrouping != null)
spec.append(";date.grouping:").append(this.dateGrouping);
if (this.prefixedProperty != null)
spec.append(";k:").append(this.prefixedProperty);
if (this.regex != null)
spec.append(";regex:").append(this.regex);
this.spec = spec.toString();
}
return true;
return spec;
}
public boolean contains(ShardState shardState) {
Floc floc = shardState.getShardInstance().getShard().getFloc();
CompositeMap<String, String> propbag = new CompositeMap<>(shardState.getPropertyBag(), floc.getPropertyBag());
return this.method.equals(floc.getShardMethod()) &&
this.hasContent == floc.hasContent() &&
StringUtils.equals(this.template, floc.getTemplate()) &&
this.storeRefs.equals(floc.getStoreRefs()) &&
this.equals(this.shards, floc.getNumberOfShards()) &&
this.equals(this.range, this.strToRange(propbag.get("shard.range"))) &&
this.equals(this.dateGrouping, propbag.get("shard.date.grouping")) &&
StringUtils.equals(this.prefixedProperty, propbag.get("shard.key")) &&
this.equals(this.regex, propbag.get("shard.regex"));
}
@Override
@@ -98,21 +271,34 @@ public class ShardSet implements Serializable {
return false;
ShardSet shardSet = (ShardSet) obj;
return this.method.equals(shardSet.method) && this.config.equals(shardSet.config);
return this.core.equals(shardSet.core);
}
private <T> boolean equals(Pattern p1, String s2) {
s2 = StringUtils.trimToNull(s2);
if (p1 == null) {
return s2 == null;
} else {
return p1.pattern().equals(s2);
}
}
private <T> boolean equals(T t1, T t2) {
if (t1 == null) {
return t2 == null;
} else {
return t1.equals(t2);
}
}
@Override
public int hashCode() {
if (this.hash == null) {
this.hash = new HashCodeBuilder().append(this.method).append(this.hasContent).append(this.config).build();
}
return this.hash;
return this.core.hashCode();
}
@Override
public String toString() {
return this.toSpec();
return this.core;
}
}

View File

@@ -0,0 +1,92 @@
package com.inteligr8.alfresco.asie.model;
import java.io.Serializable;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.alfresco.repo.index.shard.ShardInstance;
public class SolrHost implements Serializable {
private static final long serialVersionUID = -8834744746109388928L;
private static final Pattern PATTERN = Pattern.compile("([^:]+):([0-9]+)([^0-9]?.*)");
public static SolrHost from(ShardInstance shardNode) {
return new SolrHost(shardNode);
}
public static SolrHost from(String spec) {
return new SolrHost(spec);
}
private final String spec;
private final String hostname;
private final int port;
private final String path;
protected SolrHost(ShardInstance shardNode) {
this.hostname = shardNode.getHostName();
this.port = shardNode.getPort();
// baseUrl is to the shard; we want to the node, so exclude the core
int lastSlash = shardNode.getBaseUrl().lastIndexOf('/');
this.path = shardNode.getBaseUrl().substring(0, lastSlash);
this.spec = this.hostname + ":" + this.port + this.path;
}
protected SolrHost(String spec) {
this.spec = spec;
Matcher matcher = PATTERN.matcher(spec);
this.hostname = matcher.group(1);
this.port = Integer.parseInt(matcher.group(2));
this.path = matcher.group(3);
}
public String getSpec() {
return this.spec;
}
public String getHostname() {
return this.hostname;
}
public int getPort() {
return this.port;
}
public String getPath() {
return this.path;
}
public URL toUrl(String protocol) {
try {
return new URL(protocol + "://" + this.hostname + ':' + this.port + this.path);
} catch (MalformedURLException mue) {
throw new IllegalArgumentException(mue.getMessage());
}
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SolrHost))
return false;
SolrHost shardNode = (SolrHost) obj;
return this.spec.equals(shardNode.spec);
}
@Override
public int hashCode() {
return this.spec.hashCode();
}
@Override
public String toString() {
return this.spec;
}
}

View File

@@ -1,9 +1,8 @@
package com.inteligr8.alfresco.asie.rest;
import java.io.IOException;
import java.util.Set;
import java.util.Map;
import org.alfresco.repo.index.shard.ShardState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -12,7 +11,10 @@ import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.extensions.webscripts.WebScriptResponse;
import org.springframework.http.HttpStatus;
import com.inteligr8.alfresco.asie.service.ShardDiscoveryService;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
import com.inteligr8.alfresco.asie.spi.ShardDiscoveryService;
public abstract class AbstractAsieNodeWebScript extends AbstractAsieShardableWebScript {
@@ -36,16 +38,17 @@ public abstract class AbstractAsieNodeWebScript extends AbstractAsieShardableWeb
protected void execute(WebScriptRequest req, WebScriptResponse res, String nodeHostname, int nodePort) throws IOException {
this.logger.trace("execute({}, {})", nodeHostname, nodePort);
Set<ShardState> shardsOnNode = this.sds.findByNode(nodeHostname, nodePort);
SolrHost node = this.sds.findNode(nodeHostname, nodePort);
Map<ShardSet, Map<Integer, ShardInstanceState>> shardsOnNode = this.sds.findByNode(node);
if (shardsOnNode == null || shardsOnNode.isEmpty())
throw new WebScriptException(HttpStatus.NOT_FOUND.value(), "The ASIE node could not be found");
this.execute(req, res, shardsOnNode);
this.execute(req, res, node, shardsOnNode);
}
protected void execute(WebScriptRequest req, WebScriptResponse res, Set<ShardState> registeredNodeShards) throws IOException {
this.logger.trace("execute({})", registeredNodeShards.size());
protected void execute(WebScriptRequest req, WebScriptResponse res, SolrHost node, Map<ShardSet, Map<Integer, ShardInstanceState>> shardSetShardStates) throws IOException {
this.logger.trace("execute({})", shardSetShardStates.size());
// made to be optionally overridden
}

View File

@@ -1,9 +1,9 @@
package com.inteligr8.alfresco.asie.rest;
import java.io.IOException;
import java.util.Set;
import java.util.List;
import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.util.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -13,7 +13,9 @@ import org.springframework.extensions.webscripts.WebScriptResponse;
import org.springframework.http.HttpStatus;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.service.ShardDiscoveryService;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
import com.inteligr8.alfresco.asie.spi.ShardDiscoveryService;
public abstract class AbstractAsieShardWebScript extends AbstractAsieShardableWebScript {
@@ -26,21 +28,24 @@ public abstract class AbstractAsieShardWebScript extends AbstractAsieShardableWe
public void executeAuthorized(WebScriptRequest req, WebScriptResponse res) throws IOException {
this.logger.trace("execute()");
ShardSet shardSet = this.getRequiredPathParameter(req, "shardSet", ShardSet.class);
this.logger.debug("Parsed shard set: {}", shardSet);
String coreName = this.getRequiredPathParameter(req, "shardCore");
int shardId = this.getRequiredPathParameter(req, "shardId", Integer.class);
try {
Set<ShardState> registeredShardNodes = this.sds.findByShard(shardSet, shardId);
if (registeredShardNodes == null || registeredShardNodes.isEmpty())
throw new WebScriptException(HttpStatus.NOT_FOUND.value(), "The ASIE shard set or shard could not be found");
ShardSet shardSet = this.sds.findSetByCore(coreName);
if (shardSet == null)
throw new WebScriptException(HttpStatus.NOT_FOUND.value(), "The ASIE shard set could not be found");
this.execute(req, res, registeredShardNodes);
List<Pair<SolrHost, ShardInstanceState>> nodeShardStates = this.sds.findNodeStatesByShard(shardSet, shardId);
if (nodeShardStates == null || nodeShardStates.isEmpty())
throw new WebScriptException(HttpStatus.NOT_FOUND.value(), "The ASIE shard could not be found");
this.execute(req, res, shardId, shardSet, nodeShardStates);
} catch (IllegalArgumentException iae) {
throw new WebScriptException(HttpStatus.BAD_REQUEST.value(), iae.getMessage());
}
}
protected abstract void execute(WebScriptRequest req, WebScriptResponse res, Set<ShardState> registeredShardNodes) throws IOException;
protected abstract void execute(WebScriptRequest req, WebScriptResponse res, int shardId, ShardSet shardSet, List<Pair<SolrHost, ShardInstanceState>> nodeShardStates) throws IOException;
}

View File

@@ -57,7 +57,7 @@ public abstract class AbstractAsieShardableWebScript extends AbstractAsieWebScri
try {
SolrShardHashSampleType type = SolrShardHashSampleType.valueOf(matcher.group(1));
int shards = Integer.parseInt(matcher.group(2));
short shards = Short.parseShort(matcher.group(2));
return this.createSampleHashTable(type, shards);
} catch (NumberFormatException nfe) {
// this should never happen, because of the regex
@@ -67,7 +67,7 @@ public abstract class AbstractAsieShardableWebScript extends AbstractAsieWebScri
}
}
protected SolrShardHashTable<?> createSampleHashTable(SolrShardHashSampleType sampleType, int shards) {
protected SolrShardHashTable<?> createSampleHashTable(SolrShardHashSampleType sampleType, short shards) {
int thisYear = Year.now().getValue();
switch (sampleType) {

View File

@@ -1,13 +1,10 @@
package com.inteligr8.alfresco.asie.rest;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
@@ -23,7 +20,7 @@ import com.inteligr8.rs.ClientCxfImpl;
import jakarta.ws.rs.client.ClientRequestContext;
public abstract class AbstractAsieWebScript extends AbstractWebScript implements InitializingBean {
public abstract class AbstractAsieWebScript extends AbstractWebScript {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@@ -42,9 +39,6 @@ public abstract class AbstractAsieWebScript extends AbstractWebScript implements
@Value("${solr.sharedSecret}")
private String solrSharedSecret;
@Value("${inteligr8.asie.allowedAuthorities}")
private String authorizedAuthoritiesStr;
@Value("${inteligr8.asie.basePath}")
private String solrBaseUrl;
@@ -52,28 +46,11 @@ public abstract class AbstractAsieWebScript extends AbstractWebScript implements
@Qualifier(Constants.QUALIFIER_ASIE)
private ObjectMapper objectMapper;
private Set<String> authorizedAuthorities;
@Override
public void afterPropertiesSet() throws Exception {
this.authorizedAuthorities = new HashSet<>();
String[] authorities = this.authorizedAuthoritiesStr.split(",");
for (String authority : authorities) {
authority = StringUtils.trimToNull(authority);
if (authority != null)
this.authorizedAuthorities.add(authority);
}
if (this.authorizedAuthorities.isEmpty())
this.logger.warn("All authenticated users will be authorized to access ASIE web scripts");
super.afterPropertiesSet();
this.solrSharedSecret = StringUtils.trimToNull(this.solrSharedSecret);
}
@Override
protected Set<String> getAuthorities() {
return this.authorizedAuthorities;
}
protected ObjectMapper getObjectMapper() {
return this.objectMapper;

View File

@@ -4,11 +4,19 @@ import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.service.cmr.security.AuthorityService;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.extensions.webscripts.Description.RequiredAuthentication;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.extensions.webscripts.WebScriptException;
import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.extensions.webscripts.WebScriptResponse;
@@ -16,9 +24,38 @@ import org.springframework.http.HttpStatus;
import net.sf.acegisecurity.GrantedAuthority;
public abstract class AbstractWebScript extends org.springframework.extensions.webscripts.AbstractWebScript {
public abstract class AbstractWebScript extends org.springframework.extensions.webscripts.AbstractWebScript implements InitializingBean {
protected abstract Set<String> getAuthorities();
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Value("${inteligr8.asie.allowedAuthorities}")
private String authorizedAuthoritiesStr;
@Autowired
private AuthorityService authorityService;
private Set<String> authorizedAuthorities;
@Override
public void afterPropertiesSet() throws Exception {
this.authorizedAuthorities = new HashSet<>();
String[] authorities = this.authorizedAuthoritiesStr.split(",");
for (String authority : authorities) {
authority = StringUtils.trimToNull(authority);
if (authority != null)
this.authorizedAuthorities.add(authority);
}
if (this.authorizedAuthorities.isEmpty()) {
this.logger.warn("All authenticated users will be authorized to access web scripts");
} else {
this.logger.debug("Allowing only authorities: {}", this.authorizedAuthorities);
}
}
protected Set<String> getAuthorities() {
return this.authorizedAuthorities;
}
@Override
public final void execute(WebScriptRequest request, WebScriptResponse response) throws IOException {
@@ -38,6 +75,13 @@ public abstract class AbstractWebScript extends org.springframework.extensions.w
return true;
}
Set<String> authorities = this.authorityService.getAuthoritiesForUser(AuthenticationUtil.getFullyAuthenticatedUser());
if (authorities != null) {
if (!Collections.disjoint(this.getAuthorities(), authorities))
return true;
}
this.logger.trace("Not authorized: user '{}'; authorities: {} + {}", AuthenticationUtil.getFullyAuthenticatedUser(), AuthenticationUtil.getFullAuthentication().getAuthorities(), authorities);
return false;
}

View File

@@ -3,7 +3,6 @@ package com.inteligr8.alfresco.asie.rest;
import java.io.IOException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.extensions.webscripts.AbstractWebScript;
import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.extensions.webscripts.WebScriptResponse;
import org.springframework.http.HttpStatus;
@@ -20,13 +19,13 @@ public class ClearRegistryWebScript extends AbstractWebScript {
@Autowired
private ShardStateService sss;
@Override
public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException {
@Override
public void executeAuthorized(WebScriptRequest request, WebScriptResponse response) throws IOException {
this.sss.clear();
this.sbs.forget();
res.setStatus(HttpStatus.OK.value());
response.setStatus(HttpStatus.OK.value());
}
}

View File

@@ -1,19 +1,21 @@
package com.inteligr8.alfresco.asie.rest;
import java.io.IOException;
import java.util.Set;
import java.util.Collections;
import java.util.List;
import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.util.Pair;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.extensions.webscripts.WebScriptException;
import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.extensions.webscripts.WebScriptResponse;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.model.Node;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
import com.inteligr8.alfresco.asie.service.ShardBackupService;
import com.inteligr8.alfresco.asie.spi.ShardDiscoveryService;
@Component(value = "webscript.com.inteligr8.alfresco.asie.backupNode.get")
public class GetBackupNodeWebScript extends AbstractAsieShardWebScript {
@@ -22,15 +24,15 @@ public class GetBackupNodeWebScript extends AbstractAsieShardWebScript {
private ShardBackupService sbs;
@Override
public void execute(WebScriptRequest req, WebScriptResponse res, Set<ShardState> shardNodes) throws IOException {
if (shardNodes.isEmpty())
throw new WebScriptException(HttpStatus.NOT_FOUND.value(), "The ASIE shard state could not be found");
Node node = this.sbs.fetchNode(shardNodes);
public void execute(WebScriptRequest req, WebScriptResponse res, int shardId, ShardSet shardSet, List<Pair<SolrHost, ShardInstanceState>> nodeShardStates) throws IOException {
Collections.sort(nodeShardStates, new ShardDiscoveryService.ShardedNodeShardStateComparator());
SolrHost mostRecentNode = nodeShardStates.iterator().next().getFirst();
SolrHost backupNode = this.sbs.selectNode(shardSet, shardId, mostRecentNode);
res.setContentType(MediaType.APPLICATION_JSON_VALUE);
res.setContentEncoding("utf-8");
this.getObjectMapper().writeValue(res.getWriter(), node.getId());
this.getObjectMapper().writeValue(res.getWriter(), backupNode.getSpec());
}
}

View File

@@ -1,10 +1,9 @@
package com.inteligr8.alfresco.asie.rest;
import java.io.IOException;
import java.util.Set;
import java.util.List;
import org.alfresco.repo.index.shard.ShardInstance;
import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.util.Pair;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.extensions.webscripts.WebScriptException;
import org.springframework.extensions.webscripts.WebScriptRequest;
@@ -13,7 +12,9 @@ import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.model.Node;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
import com.inteligr8.alfresco.asie.spi.ShardDiscoveryService;
@Component(value = "webscript.com.inteligr8.alfresco.asie.leadNode.get")
@@ -23,17 +24,14 @@ public class GetLeadNodeWebScript extends AbstractAsieShardWebScript {
private ShardDiscoveryService sds;
@Override
public void execute(WebScriptRequest req, WebScriptResponse res, Set<ShardState> shardNodesCache) throws IOException {
if (shardNodesCache.isEmpty())
throw new WebScriptException(HttpStatus.NOT_FOUND.value(), "The ASIE shard state could not be found");
ShardInstance latestNode = this.sds.computeLeadShard(shardNodesCache);
public void execute(WebScriptRequest req, WebScriptResponse res, int shardId, ShardSet shardSet, List<Pair<SolrHost, ShardInstanceState>> nodeShardStates) throws IOException {
SolrHost latestNode = this.sds.computeLeadNode(nodeShardStates);
if (latestNode == null)
throw new WebScriptException(HttpStatus.NOT_FOUND.value(), "The ASIE shard state could not be found");
throw new WebScriptException(HttpStatus.NOT_FOUND.value(), "The ASIE node could not be found");
res.setContentType(MediaType.APPLICATION_JSON_VALUE);
res.setContentEncoding("utf-8");
this.getObjectMapper().writeValue(res.getWriter(), new Node(latestNode).getId());
this.getObjectMapper().writeValue(res.getWriter(), latestNode.getSpec());
}
}

View File

@@ -1,18 +1,17 @@
package com.inteligr8.alfresco.asie.rest;
import java.io.IOException;
import java.time.Instant;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.Set;
import java.util.Map;
import java.util.Map.Entry;
import org.alfresco.repo.index.shard.ShardInstance;
import org.alfresco.repo.index.shard.ShardState;
import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.extensions.webscripts.WebScriptResponse;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.compute.SolrShardHashTable;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
import com.inteligr8.alfresco.asie.rest.model.NodeInfo;
import com.inteligr8.alfresco.asie.rest.model.NodeShardInfo;
import com.inteligr8.alfresco.asie.rest.model.ShardInfo;
@@ -22,32 +21,32 @@ import com.inteligr8.alfresco.asie.rest.model.ShardSetInfo;
public class GetNodeWebScript extends AbstractAsieNodeWebScript {
@Override
protected void execute(WebScriptRequest req, WebScriptResponse res, Set<ShardState> registeredNodeShards) throws IOException {
ShardState anyRegisteredNodeShard = registeredNodeShards.iterator().next();
ShardInstance registeredNode = anyRegisteredNodeShard.getShardInstance();
int maxShards = registeredNode.getShard().getFloc().getNumberOfShards();
protected void execute(WebScriptRequest req, WebScriptResponse res, SolrHost node, Map<ShardSet, Map<Integer, ShardInstanceState>> shardSetShardStates) throws IOException {
SolrShardHashSampleType sampleHashType = this.getOptionalQueryParameter(req, "sampleHashType", SolrShardHashSampleType.class);
SolrShardHashTable<?> sampleHashTable = sampleHashType == null ? null : this.createSampleHashTable(sampleHashType, maxShards);
NodeInfo node = new NodeShardInfo(registeredNode);
for (ShardState registeredNodeShard : registeredNodeShards) {
ShardInfo shard = new ShardInfo();
shard.setId(registeredNodeShard.getShardInstance().getShard().getInstance());
shard.setLatestTx(OffsetDateTime.ofInstant(Instant.ofEpochMilli(registeredNodeShard.getLastIndexedTxCommitTime()), ZoneOffset.UTC));
shard.setTxsCompleted(registeredNodeShard.getLastIndexedTxId());
shard.setShardSet(new ShardSetInfo(registeredNodeShard.getShardInstance().getShard().getFloc(), registeredNodeShard));
if (sampleHashTable != null)
this.addShardHashSamples(shard, sampleHashTable);
node.getShards().put(shard.getId(), shard);
}
NodeInfo nodeResponse = NodeShardInfo.from(node);
for (Entry<ShardSet, Map<Integer, ShardInstanceState>> shardSet : shardSetShardStates.entrySet()) {
Short maxShards = shardSet.getKey().getShards();
SolrShardHashTable<?> sampleHashTable = null;
if (sampleHashType != null && maxShards != null)
sampleHashTable = this.createSampleHashTable(sampleHashType, maxShards);
ShardSetInfo shardSetResponse = ShardSetInfo.from(shardSet.getKey());
for (Entry<Integer, ShardInstanceState> shard : shardSet.getValue().entrySet()) {
ShardInfo shardResponse = ShardInfo.from(shard.getKey(), shard.getValue());
shardResponse.setShardSet(shardSetResponse);
if (sampleHashTable != null)
this.addShardHashSamples(shardResponse, sampleHashTable);
nodeResponse.getShards().put(shardResponse.getId(), shardResponse);
}
}
res.setContentType("application/json");
res.setContentEncoding("utf-8");
this.getObjectMapper().writeValue(res.getWriter(), node);
this.getObjectMapper().writeValue(res.getWriter(), nodeResponse);
}
}

View File

@@ -8,6 +8,7 @@ import java.util.TreeMap;
import org.alfresco.repo.index.shard.Floc;
import org.alfresco.repo.index.shard.Shard;
import org.alfresco.repo.index.shard.ShardMethodEnum;
import org.alfresco.repo.index.shard.ShardState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -18,7 +19,9 @@ import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.compute.SolrShardHashTable;
import com.inteligr8.alfresco.asie.model.Node;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
import com.inteligr8.alfresco.asie.rest.model.NodeInfo;
import com.inteligr8.alfresco.asie.rest.model.NodeShardInfo;
import com.inteligr8.alfresco.asie.rest.model.ShardInfo;
@@ -39,34 +42,44 @@ public class GetNodesWebScript extends AbstractAsieShardableWebScript {
SolrShardHashSampleType sampleHashType = this.getOptionalQueryParameter(req, "sampleHashType", SolrShardHashSampleType.class);
Map<String, NodeInfo> nodes = new TreeMap<>();
Map<String, NodeInfo> nodesResponse = new TreeMap<>();
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : flocs.entrySet()) {
int maxShards = floc.getKey().getNumberOfShards();
Short maxShards = floc.getKey().getShardMethod().equals(ShardMethodEnum.DB_ID_RANGE) ? null : (short) floc.getKey().getNumberOfShards();
SolrShardHashTable<?> sampleHashTable = null;
if (sampleHashType != null && maxShards != null)
sampleHashTable = this.createSampleHashTable(sampleHashType, maxShards);
SolrShardHashTable<?> sampleHashTable = sampleHashType == null ? null : this.createSampleHashTable(sampleHashType, maxShards);
ShardSetInfo shardSetResponse = null;
for (Entry<Shard, Set<ShardState>> registeredShards : floc.getValue().entrySet()) {
for (ShardState registeredShardNode : registeredShards.getValue()) {
String nodeId = new Node(registeredShardNode.getShardInstance()).getId();
NodeInfo node = nodes.get(nodeId);
if (node == null) {
node = new NodeShardInfo(registeredShardNode.getShardInstance());
nodes.put(node.getId(), node);
if (shardSetResponse == null) {
ShardSet shardSet = ShardSet.from(floc.getKey(), registeredShardNode);
shardSetResponse = ShardSetInfo.from(shardSet);
}
ShardInfo shard = new ShardInfo(registeredShardNode);
shard.setShardSet(new ShardSetInfo(floc.getKey(), registeredShardNode));
SolrHost node = SolrHost.from(registeredShardNode.getShardInstance());
String nodeSpec = node.getSpec();
NodeInfo nodeResponse = nodesResponse.get(nodeSpec);
if (nodeResponse == null) {
nodeResponse = NodeShardInfo.from(node);
nodesResponse.put(nodeResponse.getId(), nodeResponse);
}
ShardInstanceState nodeShardState = ShardInstanceState.from(registeredShardNode);
ShardInfo shardResponse = ShardInfo.from(registeredShards.getKey().getInstance(), nodeShardState);
shardResponse.setShardSet(shardSetResponse);
if (sampleHashTable != null)
this.addShardHashSamples(shard, sampleHashTable);
node.getShards().put(shard.getId(), shard);
this.addShardHashSamples(shardResponse, sampleHashTable);
nodeResponse.getShards().put(shardResponse.getId(), shardResponse);
}
}
}
res.setContentType("application/json");
res.setContentEncoding("utf-8");
this.getObjectMapper().writeValue(res.getWriter(), nodes);
this.getObjectMapper().writeValue(res.getWriter(), nodesResponse);
}
}

View File

@@ -1,11 +1,7 @@
package com.inteligr8.alfresco.asie.rest;
import java.io.IOException;
import java.time.Instant;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
@@ -14,10 +10,7 @@ import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import org.alfresco.repo.index.shard.Floc;
import org.alfresco.repo.index.shard.Shard;
import org.alfresco.repo.index.shard.ShardMethodEnum;
import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.util.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -30,11 +23,14 @@ import org.springframework.http.MediaType;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.compute.SolrShardHashTable;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
import com.inteligr8.alfresco.asie.rest.model.NodeInfo;
import com.inteligr8.alfresco.asie.rest.model.PropertyHashShardSetInfo;
import com.inteligr8.alfresco.asie.rest.model.ShardInfo;
import com.inteligr8.alfresco.asie.rest.model.ShardNodeInfo;
import com.inteligr8.alfresco.asie.service.ShardDiscoveryService;
import com.inteligr8.alfresco.asie.spi.ShardDiscoveryService;
@Component(value = "webscript.com.inteligr8.alfresco.asie.propertyHashShards.get")
public class GetPropertyHashShardsWebScript extends AbstractAsieShardableWebScript {
@@ -55,19 +51,18 @@ public class GetPropertyHashShardsWebScript extends AbstractAsieShardableWebScri
List<String> values = this.getOptionalQueryParameterAsList(req);
this.validateParameters(min, max, values);
List<PropertyHashShardSetInfo> shardSets = new LinkedList<>();
Collection<Pair<Floc, Map<Shard, Set<ShardState>>>> flocs = this.sds.findByShardMethod(ShardMethodEnum.PROPERTY);
if (flocs.isEmpty())
List<PropertyHashShardSetInfo> shardSetsResponse = new LinkedList<>();
Set<ShardSet> shardSets = this.sds.findSetsByShardMethod(ShardMethodEnum.PROPERTY);
if (shardSets.isEmpty())
throw new WebScriptException(HttpStatus.NO_CONTENT.value(), "There are no property-based shards");
for (Pair<Floc, Map<Shard, Set<ShardState>>> floc : flocs) {
ShardState anyShardNode = this.getAnyShardNode(floc.getSecond());
PropertyHashShardSetInfo shardSet = new PropertyHashShardSetInfo(floc.getFirst(), anyShardNode);
shardSet.setShards(new TreeMap<>());
for (ShardSet shardSet : shardSets) {
PropertyHashShardSetInfo shardSetResponse = PropertyHashShardSetInfo.from(shardSet);
shardSetResponse.setShards(new TreeMap<>());
int maxShards = floc.getFirst().getNumberOfShards();
SolrShardHashTable<?> sampleHashTable = this.createSampleHashTable(sampleHashType, maxShards);
Short shardCount = shardSet.getShards();
SolrShardHashTable<?> sampleHashTable = this.createSampleHashTable(sampleHashType, shardCount);
Map<Integer, List<Object>> shardToHashMap = new HashMap<>();
@@ -83,39 +78,24 @@ public class GetPropertyHashShardsWebScript extends AbstractAsieShardableWebScri
}
}
for (Entry<Shard, Set<ShardState>> shardCache : floc.getSecond().entrySet()) {
ShardInfo shard = new ShardInfo();
shard.setId(shardCache.getKey().getInstance());
shard.setNodes(new HashMap<>());
for (ShardState shardNodeCache : shardCache.getValue()) {
if (shard.getTxsCompleted() == null || shard.getTxsCompleted().longValue() < shardNodeCache.getLastIndexedTxId()) {
shard.setLatestTx(OffsetDateTime.ofInstant(Instant.ofEpochMilli(shardNodeCache.getLastIndexedTxCommitTime()), ZoneOffset.UTC));
shard.setTxsCompleted(shardNodeCache.getLastIndexedTxId());
}
for (Entry<Integer, Pair<SolrHost, ShardInstanceState>> shard : this.sds.findLatestNodeStates(shardSet).entrySet()) {
ShardInfo shardResponse = ShardInfo.from(shard.getKey(), shard.getValue().getSecond());
shardResponse.setNodes(new HashMap<>());
NodeInfo node = new ShardNodeInfo(shardNodeCache);
shard.getNodes().put(node.getId(), node);
}
NodeInfo nodeResponse = ShardNodeInfo.from(shard.getValue().getFirst(), shard.getValue().getSecond());
shardResponse.getNodes().put(nodeResponse.getId(), nodeResponse);
List<Object> hashedValues = shardToHashMap.get(shard.getId());
List<Object> hashedValues = shardToHashMap.get(shardResponse.getId());
if (hashedValues != null) for (Object hashedValue : hashedValues)
shardSet.getShards().put(hashedValue, shard);
shardSetResponse.getShards().put(hashedValue, shardResponse);
}
shardSets.add(shardSet);
shardSetsResponse.add(shardSetResponse);
}
res.setContentType(MediaType.APPLICATION_JSON_VALUE);
res.setContentEncoding("utf-8");
this.getObjectMapper().writeValue(res.getWriter(), shardSets);
}
private ShardState getAnyShardNode(Map<Shard, Set<ShardState>> shards) {
for (Set<ShardState> shardNodes : shards.values())
for (ShardState shardNode : shardNodes)
return shardNode;
return null;
this.getObjectMapper().writeValue(res.getWriter(), shardSetsResponse);
}
private List<String> getOptionalQueryParameterAsList(WebScriptRequest req) {

View File

@@ -1,20 +1,19 @@
package com.inteligr8.alfresco.asie.rest;
import java.io.IOException;
import java.time.Instant;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.Set;
import java.util.List;
import java.util.TreeMap;
import org.alfresco.repo.index.shard.Shard;
import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.util.Pair;
import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.extensions.webscripts.WebScriptResponse;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.compute.SolrShardHashTable;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
import com.inteligr8.alfresco.asie.rest.model.NodeInfo;
import com.inteligr8.alfresco.asie.rest.model.ShardInfo;
import com.inteligr8.alfresco.asie.rest.model.ShardNodeInfo;
@@ -24,34 +23,35 @@ import com.inteligr8.alfresco.asie.rest.model.ShardSetInfo;
public class GetShardWebScript extends AbstractAsieShardWebScript {
@Override
public void execute(WebScriptRequest req, WebScriptResponse res, Set<ShardState> registeredShardNodes) throws IOException {
ShardState aRegisteredShardNode = registeredShardNodes.iterator().next();
Shard registeredShard = aRegisteredShardNode.getShardInstance().getShard();
int maxShards = registeredShard.getFloc().getNumberOfShards();
public void execute(WebScriptRequest req, WebScriptResponse res, int shardId, ShardSet shardSet, List<Pair<SolrHost, ShardInstanceState>> nodeShardStates) throws IOException {
SolrShardHashSampleType sampleHashType = this.getOptionalQueryParameter(req, "sampleHashType", SolrShardHashSampleType.class);
SolrShardHashTable<?> sampleHashTable = sampleHashType == null ? null : this.createSampleHashTable(sampleHashType, maxShards);
ShardInfo shard = new ShardInfo();
shard.setId(registeredShard.getInstance());
shard.setShardSet(new ShardSetInfo(registeredShard.getFloc(), aRegisteredShardNode));
shard.setNodes(new TreeMap<>());
Short maxShards = shardSet.getShards();
SolrShardHashTable<?> sampleHashTable = null;
if (sampleHashType != null && maxShards != null)
sampleHashTable = this.createSampleHashTable(sampleHashType, maxShards);
ShardSetInfo shardSetResponse = ShardSetInfo.from(shardSet);
ShardInfo shardResponse = ShardInfo.from(shardId);
shardResponse.setShardSet(shardSetResponse);
shardResponse.setNodes(new TreeMap<>());
if (sampleHashTable != null)
this.addShardHashSamples(shard, sampleHashTable);
this.addShardHashSamples(shardResponse, sampleHashTable);
for (ShardState registeredShardNode : registeredShardNodes) {
if (shard.getTxsCompleted() == null || shard.getTxsCompleted().longValue() < registeredShardNode.getLastIndexedTxId()) {
shard.setLatestTx(OffsetDateTime.ofInstant(Instant.ofEpochMilli(registeredShardNode.getLastIndexedTxCommitTime()), ZoneOffset.UTC));
shard.setTxsCompleted(registeredShardNode.getLastIndexedTxId());
for (Pair<SolrHost, ShardInstanceState> nodeShardState : nodeShardStates) {
if (shardResponse.getTxsCompleted() == null || shardResponse.getTxsCompleted().longValue() < nodeShardState.getSecond().getLastIndexedTxId()) {
shardResponse.setLatestTx(nodeShardState.getSecond().getLastIndexedTxTime());
shardResponse.setTxsCompleted(nodeShardState.getSecond().getLastIndexedTxId());
}
NodeInfo node = new ShardNodeInfo(registeredShardNode);
shard.getNodes().put(node.getId(), node);
NodeInfo node = ShardNodeInfo.from(nodeShardState.getFirst(), nodeShardState.getSecond());
shardResponse.getNodes().put(node.getId(), node);
}
res.setContentType(MediaType.APPLICATION_JSON_VALUE);
res.setContentEncoding("utf-8");
this.getObjectMapper().writeValue(res.getWriter(), shard);
this.getObjectMapper().writeValue(res.getWriter(), shardResponse);
}
}

View File

@@ -22,6 +22,9 @@ import org.springframework.http.MediaType;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.compute.SolrShardHashTable;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
import com.inteligr8.alfresco.asie.rest.model.NodeInfo;
import com.inteligr8.alfresco.asie.rest.model.ShardInfo;
import com.inteligr8.alfresco.asie.rest.model.ShardNodeInfo;
@@ -42,42 +45,46 @@ public class GetShardsWebScript extends AbstractAsieShardableWebScript {
SolrShardHashSampleType sampleHashType = this.getOptionalQueryParameter(req, "sampleHashType", SolrShardHashSampleType.class);
Map<String, ShardSetInfo> shardSets = new TreeMap<>();
Map<String, ShardSetInfo> shardSetsResponse = new TreeMap<>();
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : flocs.entrySet()) {
int maxShards = floc.getKey().getNumberOfShards();
ShardState anyShardNode = this.getAnyShardNode(floc.getValue());
ShardSetInfo shardSet = new ShardSetInfo(floc.getKey(), anyShardNode);
shardSet.setShards(new TreeMap<>());
SolrShardHashTable<?> sampleHashTable = sampleHashType == null ? null : this.createSampleHashTable(sampleHashType, maxShards);
ShardSet shardSet = ShardSet.from(floc.getKey(), anyShardNode);
ShardSetInfo shardSetResponse = ShardSetInfo.from(shardSet);
shardSetResponse.setShards(new TreeMap<>());
Short maxShards = shardSet.getShards();
SolrShardHashTable<?> sampleHashTable = null;
if (sampleHashType != null && maxShards != null)
sampleHashTable = this.createSampleHashTable(sampleHashType, maxShards);
for (Entry<Shard, Set<ShardState>> registeredShard : floc.getValue().entrySet()) {
ShardInfo shard = new ShardInfo();
shard.setId(registeredShard.getKey().getInstance());
shard.setNodes(new TreeMap<>());
ShardInfo shardResponse = ShardInfo.from(registeredShard.getKey().getInstance());
shardResponse.setNodes(new TreeMap<>());
for (ShardState registeredShardNode : registeredShard.getValue()) {
if (shard.getTxsCompleted() == null || shard.getTxsCompleted().longValue() < registeredShardNode.getLastIndexedTxId()) {
shard.setLatestTx(OffsetDateTime.ofInstant(Instant.ofEpochMilli(registeredShardNode.getLastIndexedTxCommitTime()), ZoneOffset.UTC));
shard.setTxsCompleted(registeredShardNode.getLastIndexedTxId());
if (shardResponse.getTxsCompleted() == null || shardResponse.getTxsCompleted().longValue() < registeredShardNode.getLastIndexedTxId()) {
shardResponse.setLatestTx(OffsetDateTime.ofInstant(Instant.ofEpochMilli(registeredShardNode.getLastIndexedTxCommitTime()), ZoneOffset.UTC));
shardResponse.setTxsCompleted(registeredShardNode.getLastIndexedTxId());
}
NodeInfo node = new ShardNodeInfo(registeredShardNode);
shard.getNodes().put(node.getId(), node);
SolrHost node = SolrHost.from(registeredShardNode.getShardInstance());
ShardInstanceState nodeShardState = ShardInstanceState.from(registeredShardNode);
NodeInfo nodeResponse = ShardNodeInfo.from(node, nodeShardState);
shardResponse.getNodes().put(nodeResponse.getId(), nodeResponse);
}
if (sampleHashTable != null)
this.addShardHashSamples(shardSet, shard, sampleHashTable);
shardSet.getShards().put(shard.getId(), shard);
this.addShardHashSamples(shardSetResponse, shardResponse, sampleHashTable);
shardSetResponse.getShards().put(shardResponse.getId(), shardResponse);
}
shardSets.put(shardSet.getMethodSpec(), shardSet);
shardSetsResponse.put(shardSetResponse.getSpec(), shardSetResponse);
}
res.setContentType(MediaType.APPLICATION_JSON_VALUE);
res.setContentEncoding("utf-8");
this.getObjectMapper().writeValue(res.getWriter(), shardSets);
this.getObjectMapper().writeValue(res.getWriter(), shardSetsResponse);
}
private ShardState getAnyShardNode(Map<Shard, Set<ShardState>> shards) {

View File

@@ -3,12 +3,10 @@ package com.inteligr8.alfresco.asie.rest.model;
import java.util.Map;
import java.util.TreeMap;
import org.alfresco.repo.index.shard.ShardInstance;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.inteligr8.alfresco.asie.model.Node;
import com.inteligr8.alfresco.asie.model.SolrHost;
public abstract class NodeInfo {
public abstract class NodeInfo implements ResponseInfo {
@JsonProperty
private String id;
@@ -19,8 +17,8 @@ public abstract class NodeInfo {
public NodeInfo() {
}
public NodeInfo(ShardInstance nodeCache) {
this.setId(new Node(nodeCache).getId());
protected NodeInfo(SolrHost node) {
this.id = node.getSpec();
}
public String getId() {

View File

@@ -1,4 +1,4 @@
package com.inteligr8.alfresco.asie.model;
package com.inteligr8.alfresco.asie.rest.model;
import java.net.InetAddress;
import java.net.UnknownHostException;

View File

@@ -2,25 +2,28 @@ package com.inteligr8.alfresco.asie.rest.model;
import java.util.Map;
import org.alfresco.repo.index.shard.ShardInstance;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.inteligr8.alfresco.asie.model.SolrHost;
@JsonInclude(Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
public class NodeShardInfo extends NodeInfo {
public static NodeShardInfo from(SolrHost node) {
return new NodeShardInfo(node);
}
@JsonProperty
private Map<Integer, ShardInfo> shards;
public NodeShardInfo() {
}
public NodeShardInfo(ShardInstance nodeCache) {
super(nodeCache);
protected NodeShardInfo(SolrHost node) {
super(node);
}
}

View File

@@ -1,4 +1,6 @@
package com.inteligr8.alfresco.asie.model;
package com.inteligr8.alfresco.asie.rest.model;
import com.inteligr8.alfresco.asie.model.ShardSet;
public class NodeShardParameterSet extends NodeParameterSet {

View File

@@ -2,24 +2,25 @@ package com.inteligr8.alfresco.asie.rest.model;
import java.util.Map;
import org.alfresco.repo.index.shard.Floc;
import org.alfresco.repo.index.shard.ShardState;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.inteligr8.alfresco.asie.model.ShardSet;
@JsonInclude(Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
public class PropertyHashShardSetInfo {
public class PropertyHashShardSetInfo implements ResponseInfo {
public static PropertyHashShardSetInfo from(ShardSet shardSet) {
return new PropertyHashShardSetInfo(shardSet);
}
@JsonProperty
private String methodSpec;
@JsonProperty
private int shardCount;
private String core;
@JsonProperty
private String spec;
@JsonProperty
private Map<Object, ShardInfo> shards;
@@ -27,26 +28,25 @@ public class PropertyHashShardSetInfo {
public PropertyHashShardSetInfo() {
}
public PropertyHashShardSetInfo(Floc floc, ShardState anyShardNode) {
ShardSet shardSet = new ShardSet(floc, anyShardNode);
this.setMethodSpec(shardSet.toSpec());
this.setShardCount(floc.getNumberOfShards());
protected PropertyHashShardSetInfo(ShardSet shardSet) {
this.core = shardSet.getCore();
this.spec = shardSet.toSpec();
}
public String getCore() {
return core;
}
public void setCore(String core) {
this.core = core;
}
public String getMethodSpec() {
return this.methodSpec;
public String getSpec() {
return this.spec;
}
public void setMethodSpec(String methodSpec) {
this.methodSpec = methodSpec;
}
public int getShardCount() {
return shardCount;
}
public void setShardCount(int shardCount) {
this.shardCount = shardCount;
public void setSpec(String spec) {
this.spec = spec;
}
public Map<Object, ShardInfo> getShards() {

View File

@@ -0,0 +1,5 @@
package com.inteligr8.alfresco.asie.rest.model;
public interface RequestParameterSet {
}

View File

@@ -0,0 +1,5 @@
package com.inteligr8.alfresco.asie.rest.model;
public interface ResponseInfo {
}

View File

@@ -1,22 +1,27 @@
package com.inteligr8.alfresco.asie.rest.model;
import java.time.Instant;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.Map;
import org.alfresco.repo.index.shard.ShardState;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonFormat.Shape;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonFormat.Shape;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
@JsonInclude(Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
public class ShardInfo {
public class ShardInfo implements ResponseInfo {
public static ShardInfo from(int shardId) {
return new ShardInfo(shardId, null);
}
public static ShardInfo from(int shardId, ShardInstanceState nodeShardState) {
return new ShardInfo(shardId, nodeShardState);
}
@JsonProperty
private int id;
@@ -40,10 +45,10 @@ public class ShardInfo {
public ShardInfo() {
}
public ShardInfo(ShardState shard) {
this.setId(shard.getShardInstance().getShard().getInstance());
this.setLatestTx(OffsetDateTime.ofInstant(Instant.ofEpochMilli(shard.getLastIndexedTxCommitTime()), ZoneOffset.UTC));
this.setTxsCompleted(shard.getLastIndexedTxId());
protected ShardInfo(int shardId, ShardInstanceState nodeShardState) {
this.id = shardId;
this.latestTx = nodeShardState == null ? null : nodeShardState.getLastIndexedTxTime();
this.txsCompleted = nodeShardState == null ? null : nodeShardState.getLastIndexedTxId();
}
public int getId() {

View File

@@ -1,10 +1,6 @@
package com.inteligr8.alfresco.asie.rest.model;
import java.time.Instant;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import org.alfresco.repo.index.shard.ShardState;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonFormat.Shape;
@@ -12,11 +8,17 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
@JsonInclude(Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
public class ShardNodeInfo extends NodeInfo {
public static ShardNodeInfo from(SolrHost node, ShardInstanceState nodeShardState) {
return new ShardNodeInfo(node, nodeShardState);
}
@JsonProperty
private Long txsCompleted;
@@ -24,13 +26,16 @@ public class ShardNodeInfo extends NodeInfo {
@JsonFormat(shape = Shape.STRING, pattern = "yyyy-MM-dd'T'HH:mm:ssXXX")
private OffsetDateTime latestTx;
/**
* For Spring deserialization
*/
public ShardNodeInfo() {
}
public ShardNodeInfo(ShardState shard) {
super(shard.getShardInstance());
this.setLatestTx(OffsetDateTime.ofInstant(Instant.ofEpochMilli(shard.getLastIndexedTxCommitTime()), ZoneOffset.UTC));
this.setTxsCompleted(shard.getLastIndexedTxId());
protected ShardNodeInfo(SolrHost node, ShardInstanceState nodeShardState) {
super(node);
this.latestTx = nodeShardState.getLastIndexedTxTime();
this.txsCompleted = nodeShardState.getLastIndexedTxId();
}
public Long getTxsCompleted() {

View File

@@ -1,4 +1,6 @@
package com.inteligr8.alfresco.asie.model;
package com.inteligr8.alfresco.asie.rest.model;
import com.inteligr8.alfresco.asie.model.ShardSet;
public class ShardParameterSet implements RequestParameterSet {

View File

@@ -4,24 +4,40 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import org.alfresco.repo.index.shard.Floc;
import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.repo.index.shard.ShardMethodEnum;
import org.alfresco.service.cmr.repository.StoreRef;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.inteligr8.alfresco.asie.model.ShardSet;
@JsonInclude(Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
public class ShardSetInfo {
public class ShardSetInfo implements ResponseInfo {
public static ShardSetInfo from(ShardSet shardSet) {
return new ShardSetInfo(shardSet);
}
@JsonProperty
private String methodSpec;
private String spec;
@JsonProperty
private ShardMethodEnum method;
@JsonProperty
private boolean fulltextEnabled;
@JsonProperty
private String template;
@JsonProperty
private int shardCount;
private Set<StoreRef> storeRefs;
@JsonProperty
private Short shardCount;
@JsonProperty
private Map<Integer, ShardInfo> shards;
@@ -32,25 +48,60 @@ public class ShardSetInfo {
public ShardSetInfo() {
}
public ShardSetInfo(Floc floc, ShardState anyShardNode) {
ShardSet shardSet = new ShardSet(floc, anyShardNode);
this.methodSpec = shardSet.toSpec();
this.setShardCount(floc.getNumberOfShards());
protected ShardSetInfo(ShardSet shardSet) {
this.spec = shardSet.toSpec();
this.method = shardSet.getMethod();
this.fulltextEnabled = shardSet.hasContent();
this.template = shardSet.getTemplate();
this.storeRefs = shardSet.getStoreRefs();
this.shardCount = shardSet.getShards();
}
public String getMethodSpec() {
return this.methodSpec;
public String getSpec() {
return this.spec;
}
public void setMethodSpec(String methodSpec) {
this.methodSpec = methodSpec;
public void setSpec(String spec) {
this.spec = spec;
}
public int getShardCount() {
public ShardMethodEnum getMethod() {
return method;
}
public void setMethod(ShardMethodEnum method) {
this.method = method;
}
public boolean isFulltextEnabled() {
return fulltextEnabled;
}
public void setFulltextEnabled(boolean fulltextEnabled) {
this.fulltextEnabled = fulltextEnabled;
}
public String getTemplate() {
return template;
}
public void setTemplate(String template) {
this.template = template;
}
public Set<StoreRef> getStoreRefs() {
return storeRefs;
}
public void setStoreRefs(Set<StoreRef> storeRefs) {
this.storeRefs = storeRefs;
}
public Short getShardCount() {
return shardCount;
}
public void setShardCount(int shardCount) {
public void setShardCount(Short shardCount) {
this.shardCount = shardCount;
}

View File

@@ -1,11 +1,5 @@
package com.inteligr8.alfresco.asie.service;
import java.io.Serializable;
import java.util.Collection;
import org.alfresco.repo.index.shard.Shard;
import org.alfresco.repo.index.shard.ShardInstance;
import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.service.cmr.attributes.AttributeService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -15,17 +9,15 @@ import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.Constants;
import com.inteligr8.alfresco.asie.model.Node;
import com.inteligr8.alfresco.asie.spi.ShardDiscoveryService;
import com.inteligr8.alfresco.asie.model.PersistedNode;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
@Component
public class ShardBackupService implements com.inteligr8.alfresco.asie.spi.ShardBackupService {
private static final String ATTR_BACKUP_NODE = "backupNode";
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
private ShardDiscoveryService sds;
@Autowired
@Qualifier(Constants.QUALIFIER_ASIE)
@@ -34,24 +26,17 @@ public class ShardBackupService implements com.inteligr8.alfresco.asie.spi.Shard
@Value("${inteligr8.asie.backup.persistTimeMinutes}")
private int persistTimeMinutes;
public Node fetchNode(Collection<ShardState> shardNodes) {
if (shardNodes.isEmpty())
return null;
ShardState shardNode0 = shardNodes.iterator().next();
ShardInstance node0Shard = shardNode0.getShardInstance();
Shard shard = node0Shard.getShard();
String shardKey = shard.getFloc().getShardMethod().name() + "~" + shard.getFloc().getNumberOfShards() + "~" + shard.getInstance();
public SolrHost selectNode(ShardSet shardSet, int shardId, SolrHost node) {
String shardKey = shardSet.getCore() + "-" + shardId;
PersistedNode backupNode = (PersistedNode) this.attributeService.getAttribute(Constants.ATTR_ASIE, ATTR_BACKUP_NODE, shardKey);
this.logger.debug("Found backup node: {}", backupNode);
logger.debug("Found backup node: {}", backupNode);
if (backupNode == null || backupNode.isExpired()) {
ShardInstance backupShardInstance = this.sds.computeLeadShard(shardNodes);
backupNode = new PersistedNode(new Node(backupShardInstance));
backupNode = new PersistedNode(node, this.persistTimeMinutes);
this.attributeService.setAttribute(backupNode, Constants.ATTR_ASIE, ATTR_BACKUP_NODE, shardKey);
}
return backupNode.getNode();
}
@@ -59,45 +44,9 @@ public class ShardBackupService implements com.inteligr8.alfresco.asie.spi.Shard
this.attributeService.removeAttribute(Constants.ATTR_ASIE, ATTR_BACKUP_NODE);
}
public void forget(ShardState shardNode) {
ShardInstance nodeShard = shardNode.getShardInstance();
Shard shard = nodeShard.getShard();
String shardKey = shard.getFloc().getShardMethod().name() + "~" + shard.getFloc().getNumberOfShards() + "~" + shard.getInstance();
public void forget(ShardSet shardSet, int shardId) {
String shardKey = shardSet.getCore() + "-" + shardId;
this.attributeService.removeAttribute(Constants.ATTR_ASIE, ATTR_BACKUP_NODE, shardKey);
}
private class PersistedNode implements Serializable {
private static final long serialVersionUID = 4105196543023419818L;
private final Node node;
private long expireTimeMillis;
PersistedNode(Node node) {
this.node = node;
this.reset();
}
void reset() {
this.expireTimeMillis = System.currentTimeMillis() + persistTimeMinutes * 60L * 1000L;
}
boolean isExpired() {
return this.expireTimeMillis < System.currentTimeMillis();
}
Node getNode() {
return this.node;
}
@Override
public String toString() {
return "node: " + this.node + "; expires in: " + (System.currentTimeMillis() - this.expireTimeMillis) + " ms";
}
}
}

View File

@@ -1,159 +0,0 @@
package com.inteligr8.alfresco.asie.service;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.alfresco.repo.index.shard.Floc;
import org.alfresco.repo.index.shard.Shard;
import org.alfresco.repo.index.shard.ShardInstance;
import org.alfresco.repo.index.shard.ShardMethodEnum;
import org.alfresco.repo.index.shard.ShardRegistry;
import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.util.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.Constants;
import com.inteligr8.alfresco.asie.model.ShardSet;
@Component
public class ShardDiscoveryService implements com.inteligr8.alfresco.asie.spi.ShardDiscoveryService {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
@Qualifier(Constants.QUALIFIER_ASIE)
private ShardRegistry shardRegistry;
public ShardInstance computeLeadShard(Collection<ShardState> shardNodesCache) {
if (shardNodesCache.isEmpty())
return null;
long latestTime = 0L;
ShardInstance latestNode = null;
for (ShardState shardNodeCache : shardNodesCache) {
if (latestTime < shardNodeCache.getLastIndexedTxCommitTime()) {
latestNode = shardNodeCache.getShardInstance();
latestTime = shardNodeCache.getLastIndexedTxCommitTime();
}
}
return latestNode;
}
public Set<ShardState> findByNode(String nodeHostname, int nodePort) {
Map<Floc, Map<Shard, Set<ShardState>>> flocs = this.shardRegistry.getFlocs();
if (flocs.isEmpty())
return Collections.emptySet();
Set<ShardState> shards = new HashSet<>();
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : flocs.entrySet()) {
for (Entry<Shard, Set<ShardState>> flocShard : floc.getValue().entrySet()) {
for (ShardState shardState : flocShard.getValue()) {
ShardInstance shardInstance = shardState.getShardInstance();
if (!nodeHostname.equalsIgnoreCase(shardInstance.getHostName())) {
InetAddress nodeAddress = this.resolve(nodeHostname);
if (nodeAddress == null)
continue;
InetAddress shardInstanceAddress = this.resolve(shardInstance.getHostName());
if (!nodeAddress.equals(shardInstanceAddress))
continue;
}
if (nodePort == shardInstance.getPort())
shards.add(shardState);
}
}
}
return shards;
}
public Map<Shard, Set<ShardState>> findByShardSet(ShardSet shardSet) {
Map<Floc, Map<Shard, Set<ShardState>>> flocs = this.shardRegistry.getFlocs();
if (flocs.isEmpty())
return Collections.emptyMap();
this.logger.trace("Found {} shard sets", flocs.size());
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : flocs.entrySet()) {
if (!floc.getKey().getShardMethod().equals(shardSet.getMethod()))
continue;
if (!shardSet.getConfig().isEmpty()) {
if (floc.getValue().isEmpty())
continue;
Shard firstShard = floc.getValue().keySet().iterator().next();
Set<ShardState> firstShardStates = floc.getValue().get(firstShard);
if (firstShardStates == null || firstShardStates.isEmpty())
continue;
ShardState firstShardState = firstShardStates.iterator().next();
Map<String, String> firstShardProps = firstShardState.getPropertyBag();
if (!shardSet.isConfigurationFor(firstShardProps))
continue;
}
return floc.getValue();
}
return Collections.emptyMap();
}
public Collection<Pair<Floc, Map<Shard, Set<ShardState>>>> findByShardMethod(ShardMethodEnum shardMethod) {
Map<Floc, Map<Shard, Set<ShardState>>> flocs = this.shardRegistry.getFlocs();
if (flocs.isEmpty())
return Collections.emptyList();
this.logger.trace("Found {} shard sets", flocs.size());
List<Pair<Floc, Map<Shard, Set<ShardState>>>> filteredFlocs = new LinkedList<>();
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : flocs.entrySet()) {
if (!floc.getKey().getShardMethod().equals(shardMethod))
continue;
filteredFlocs.add(new Pair<>(floc.getKey(), floc.getValue()));
}
return filteredFlocs;
}
public <T> Set<T> filterByShard(Map<Shard, Set<T>> shards, int shardId) {
if (shards == null)
return null;
for (Entry<Shard, Set<T>> shard : shards.entrySet()) {
if (shard.getKey().getInstance() == shardId)
return shard.getValue();
}
return Collections.emptySet();
}
public Set<ShardState> findByShard(ShardSet shardSet, int shardId) {
Map<Shard, Set<ShardState>> shards = this.findByShardSet(shardSet);
return this.filterByShard(shards, shardId);
}
private InetAddress resolve(String hostname) {
try {
return InetAddress.getByName(hostname);
} catch (UnknownHostException uhe) {
return null;
}
}
}

View File

@@ -1,17 +1,14 @@
package com.inteligr8.alfresco.asie.spi;
import java.util.Collection;
import org.alfresco.repo.index.shard.ShardState;
import com.inteligr8.alfresco.asie.model.Node;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
public interface ShardBackupService {
Node fetchNode(Collection<ShardState> shardNodes);
SolrHost selectNode(ShardSet shardSet, int shardId, SolrHost bestNode);
void forget();
void forget(ShardState shardNode);
void forget(ShardSet shardSet, int shardId);
}

View File

@@ -1,72 +1,132 @@
package com.inteligr8.alfresco.asie.spi;
import java.time.OffsetDateTime;
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.alfresco.repo.index.shard.Floc;
import org.alfresco.repo.index.shard.Shard;
import org.alfresco.repo.index.shard.ShardInstance;
import org.alfresco.repo.index.shard.ShardMethodEnum;
import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.util.Pair;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
public interface ShardDiscoveryService {
/**
* Determine the lead shard in the specified node/shard snapshot metadata.
*
* @param shardNodes A collection of snapshot metadata.
* @param shardNodeStates A collection of shard node/hosts and snapshot metadata.
* @return A single node/shard holding the latest snapshot metadata.
*/
ShardInstance computeLeadShard(Collection<ShardState> shardNodes);
default SolrHost computeLeadNode(Collection<Pair<SolrHost, ShardInstanceState>> shardNodeStates) {
if (shardNodeStates.isEmpty())
return null;
OffsetDateTime latestTime = OffsetDateTime.MIN;
SolrHost latestNode = null;
for (Pair<SolrHost, ShardInstanceState> shardNodeState : shardNodeStates) {
if (latestTime.isBefore(shardNodeState.getSecond().getLastIndexedTxTime())) {
latestNode = shardNodeState.getFirst();
latestTime = shardNodeState.getSecond().getLastIndexedTxTime();
}
}
return latestNode;
}
/**
* Find the shard set by the specified core name.
*
* @param core A core name (without the shard identifier suffix).
* @return A shard set; null if not found.
*/
ShardSet findSetByCore(String core);
/**
* Find the node with the specified hostname/port.
*
* @param nodeHostname The hostname of a ASIE node.
* @param nodePort The port of an ASIE node.
* @return A sharded node/host; null if not found.
*/
SolrHost findNode(String nodeHostname, int nodePort);
/**
* Find the latest snapshot of each shard on the specified node.
*
* @param nodeHostname The hostname of a ASIE node.
* @param nodePort The port of an ASIE node.
* @param node A sharded node/host.
* @return A set of the latest snapshot metadata of shards.
*/
Set<ShardState> findByNode(String nodeHostname, int nodePort);
Map<ShardSet, Map<Integer, ShardInstanceState>> findByNode(SolrHost node);
/**
* Find the shards, their nodes, and the latest snapshot of each within the
* specified shard set.
* Find all shard sets that support the specified shard methods.
*
* @param shardMethods An array of shard methods.
* @return A set of shard sets.
*/
Set<ShardSet> findSetsByShardMethod(ShardMethodEnum... shardMethods);
/**
* Find the shard node/hosts for the specified shard set.
*
* @param shardSet A shard set.
* @return A map of shards to sets of the latest snapshot metadata of those shards and their nodes.
* @return A set of shard node/hosts.
*/
Map<Shard, Set<ShardState>> findByShardSet(ShardSet shardSet);
Set<SolrHost> findNodes(ShardSet shardSet);
/**
* Find the shards, their nodes, and the latest snapshot of each using the
* specified shard method.
* Find the shard node/hosts for the specified shard set and identifier.
*
* @param shardMethod A shard method.
* @return A collection of maps of shards to sets of the latest snapshot metadata of those shards and their nodes.
* @param shardSet A shard set.
* @param shardId A shard identifier (e.g. 0).
* @return A set of shard node/hosts.
*/
Collection<Pair<Floc, Map<Shard, Set<ShardState>>>> findByShardMethod(ShardMethodEnum shardMethod);
Set<SolrHost> findNodesByShard(ShardSet shardSet, int shardId);
Map<Integer, Pair<SolrHost, ShardInstanceState>> findLatestNodeStates(ShardSet shardSet);
/**
* Filter the latest snapshot of each shard.
*
* @param shards A map of shards to sets of the latest snapshot metadata of those shards and their nodes.
* @param shardId A 0-based index of a shard.
* @return A set of the latest snapshot metadata of shards.
* Find the shard node/hosts and their states for the specified shard set
* and identifier. The list is left in an unknown order, but it is a list
* for easy use of a Comparator for sorting.
*
* @param shardSet A shard set.
* @param shardId A shard identifier (e.g. 0).
* @return A list of shard node/hosts and their latest state.
*/
<T> Set<T> filterByShard(Map<Shard, Set<T>> shards, int shardId);
List<Pair<SolrHost, ShardInstanceState>> findNodeStatesByShard(ShardSet shardSet, int shardId);
/**
* Find the shard identifiers for the specified shard set and node/host.
*
* @param shardSet A shard set.
* @param node A shard ndoe/host.
* @return A set of shard identifiers.
*/
Set<Integer> findIdsByNode(ShardSet shardSet, SolrHost node);
/**
* Find the shards and their states for the specified shard set and node/host.
*
* @param shardSet A shard set.
* @param node A shard ndoe/host.
* @return A map of shards and their states.
*/
Map<Integer, ShardInstanceState> findStatesByNode(ShardSet shardSet, SolrHost node);
/**
* Find the latest snapshot of each shard and their nodes within the
* specified shard set.
*
* @param shardSet A shard set.
* @param shardId A 0-based index of a shard.
* @return A set of the latest snapshot metadata of shards.
*/
Set<ShardState> findByShard(ShardSet shardSet, int shardId);
public class ShardedNodeShardStateComparator implements Comparator<Pair<SolrHost, ShardInstanceState>> {
@Override
public int compare(Pair<SolrHost, ShardInstanceState> p1, Pair<SolrHost, ShardInstanceState> p2) {
return - Long.compare(p1.getSecond().getLastIndexedTxId(), p2.getSecond().getLastIndexedTxId());
}
}
}

View File

@@ -0,0 +1,9 @@
package com.inteligr8.alfresco.asie.spi;
import org.alfresco.repo.index.shard.ShardInstance;
public interface ShardRegistry extends org.alfresco.repo.index.shard.ShardRegistry {
void unregisterShardInstance(ShardInstance shardInstance);
}

View File

@@ -1,18 +1,13 @@
package com.inteligr8.alfresco.asie.spi;
import java.io.Serializable;
import org.alfresco.service.cmr.attributes.AttributeService.AttributeQueryCallback;
public interface ShardStateService {
/**
* Clears the shard state.
*
* This allows the framework to provide an alternative implementation from
* what is provided by Alfresco Enterprise, which sometimes fails.
*/
void clear();
void remove(Serializable... keys);
void iterate(AttributeQueryCallback callback);
}

View File

@@ -8,10 +8,8 @@
<p>Retrieve a reference to the ASIE node that should be used for the backup of the specified ASIE shard registered with ACS.</p>
<p>The following path parameters are expected:</p>
<dl>
<dt>shardSet</dt>
<dd>A shard method combined with its distinguishing properties;
methods: MOD_ACL_ID, ACL_ID, DB_ID, DB_ID_RANGE, DATE, PROPERTY, EXPLICIT_ID;
e.g. PROPERTY;key:cm:created;regex:^d{4} or DB_ID</dd>
<dt>shardCore</dt>
<dd>A core name (prefix) for the ASIE shard (e.g. alfresco)</dd>
<dt>shardId</dt>
<dd>A number starting at 1</dd>
</dl>
@@ -34,12 +32,15 @@
]]></description>
<!-- Endpoint Configuration -->
<url>/inteligr8/asie/shard/{shardSet}/{shardId}/backup</url>
<url>/inteligr8/asie/shard/{shardCore}/{shardId}/backup</url>
<format default="json">any</format>
<!-- Security -->
<authentication>none</authentication>
<!-- Transaction -->
<transaction>required</transaction>
<!-- Functionality -->
<cache>
<never>false</never>

View File

@@ -8,10 +8,8 @@
<p>Retrieve a reference to the most current/up-to-date ASIE node for the specified ASIE shard registered with ACS.</p>
<p>The following path parameters are expected:</p>
<dl>
<dt>shardSet</dt>
<dd>A shard method combined with its distinguishing properties;
methods: MOD_ACL_ID, ACL_ID, DB_ID, DB_ID_RANGE, DATE, PROPERTY, EXPLICIT_ID;
e.g. PROPERTY;key:cm:created;regex:^d{4} or DB_ID</dd>
<dt>shardCore</dt>
<dd>A core name (prefix) for the ASIE shard (e.g. alfresco)</dd>
<dt>shardId</dt>
<dd>A number starting at 1</dd>
</dl>
@@ -30,12 +28,15 @@
]]></description>
<!-- Endpoint Configuration -->
<url>/inteligr8/asie/shard/{shardSet}/{shardId}/lead</url>
<url>/inteligr8/asie/shard/{shardCore}/{shardId}/lead</url>
<format default="json">any</format>
<!-- Security -->
<authentication>none</authentication>
<!-- Transaction -->
<transaction>required</transaction>
<!-- Functionality -->
<cache>
<never>false</never>

View File

@@ -29,7 +29,10 @@
<url>/inteligr8/asie/node/{nodeEndpoint}</url>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Transaction -->
<transaction>required</transaction>
<!-- Functionality -->
<cache>

View File

@@ -58,7 +58,7 @@
<format default="json">any</format>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Functionality -->
<cache>

View File

@@ -31,7 +31,7 @@
<url>/inteligr8/asie/node/{nodeEndpoint}?coreName={coreName?}&amp;shardRange={shardRange?}&amp;template={template?}&amp;shardCount={shardCount?}&amp;nodeId={nodeId?}&amp;nodeCount={nodeCount?}</url>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Functionality -->
<cache>

View File

@@ -12,12 +12,10 @@
<dl>
<dt>nodeEndpoint</dt>
<dd>A hostname or hostname:port for the ASIE node</dd>
<dt>shardSet</dt>
<dd>A shard method combined with its distinguishing properties;
methods: MOD_ACL_ID, ACL_ID, DB_ID, DB_ID_RANGE, DATE, PROPERTY, EXPLICIT_ID;
e.g. PROPERTY;key:cm:created;regex:^d{4} or DB_ID</dd>
<dt>shardCore</dt>
<dd>A core name (prefix) for the ASIE shard (e.g. alfresco)</dd>
<dt>shardId</dt>
<dd>A number starting at 1</dd>
<dd>A numeric shard ID for the ASIE shard (e.g. 0)</dd>
</dl>
<p>The following status codes should be expected:</p>
<dl>
@@ -31,10 +29,13 @@
]]></description>
<!-- Endpoint Configuration -->
<url>/inteligr8/asie/node/{nodeEndpoint}/shard/{shardSet}/{shardId}</url>
<url>/inteligr8/asie/node/{nodeEndpoint}/shard/{shardCore}/{shardId}</url>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Transaction -->
<transaction>required</transaction>
<!-- Functionality -->
<cache>

View File

@@ -0,0 +1,41 @@
<webscript xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="https://bitbucket.org/!api/2.0/snippets/inteligr8/AzMgbp/80fdd26a6b3769a63cdc6b54bf1f39e378545cf7/files/snippet.txt">
<!-- Naming & Organization -->
<shortname>Retrieves ASIE Node Shard Status</shortname>
<family>Inteligr8 ASIE</family>
<description><![CDATA[
<p>Retrieves meta-data about all shards on a single ASIE node as registred with ACS.</p>
<p>The following query parameter is supported:</p>
<dl>
<dt>nodeEndpoint</dt>
<dd>A hostname or hostname:port for the ASIE node; dots are not allowed, you may use _ (underscore) instead</dd>
<dt>shardCore</dt>
<dd>A core name (prefix) for the ASIE shard (e.g. alfresco)</dd>
<dt>shardId</dt>
<dd>A numeric shard ID for the ASIE shard (e.g. 0)</dd>
</dl>
<p>The following status codes should be expected:</p>
<dl>
<dt>200</dt>
<dd>OK</dd>
<dt>400</dt>
<dd>The path parameters are invalid</dd>
<dt>404</dt>
<dd>The specified ASIE node/shard could not be found</dd>
</dl>
]]></description>
<!-- Endpoint Configuration -->
<url>/inteligr8/asie/node/{nodeEndpoint}/shard/{shardCore}/{shardId}</url>
<!-- Security -->
<authentication>user</authentication>
<!-- Functionality -->
<cache>
<never>false</never>
<public>false</public>
</cache>
</webscript>

View File

@@ -2,7 +2,7 @@
xsi:noNamespaceSchemaLocation="https://bitbucket.org/!api/2.0/snippets/inteligr8/AzMgbp/80fdd26a6b3769a63cdc6b54bf1f39e378545cf7/files/snippet.txt">
<!-- Naming & Organization -->
<shortname>Adds ASIE Node to Registry</shortname>
<shortname>Adds ASIE Node/Shard to Registry</shortname>
<family>Inteligr8 ASIE</family>
<description><![CDATA[
<p>Loads an ASIE shard on a single ASIE node, which will eventually register with ACS.</p>
@@ -30,7 +30,10 @@
<url>/inteligr8/asie/node/{nodeEndpoint}/shard/{shardCore}/{shardId}</url>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Transaction -->
<transaction>required</transaction>
<!-- Functionality -->
<cache>

View File

@@ -54,7 +54,7 @@
<format default="json">any</format>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Functionality -->
<cache>

View File

@@ -61,7 +61,7 @@
<format default="json">any</format>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Functionality -->
<cache>

View File

@@ -21,7 +21,10 @@
<url>/inteligr8/asie/nodes</url>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Transaction -->
<transaction>required</transaction>
<!-- Functionality -->
<cache>

View File

@@ -47,7 +47,7 @@
<format default="json">any</format>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Functionality -->
<cache>

View File

@@ -8,12 +8,10 @@
<p>Retrieve meta-data about the specified ASIE shard registered with ACS.</p>
<p>The following path and query parameters are expected or supported:</p>
<dl>
<dt>shardSet</dt>
<dd>A shard method combined with its distinguishing properties;
methods: MOD_ACL_ID, ACL_ID, DB_ID, DB_ID_RANGE, DATE, PROPERTY, EXPLICIT_ID;
e.g. PROPERTY;key:cm:created;regex:^d{4} or DB_ID</dd>
<dt>shardCore</dt>
<dd>A core name (prefix) for the ASIE shard (e.g. alfresco)</dd>
<dt>shardId</dt>
<dd>A number starting at 1</dd>
<dd>A numeric shard ID for the ASIE shard (e.g. 0)</dd>
<dt>sampleHashType</dt>
<dd>A sample hash type; Sample hash types: PropertyYear, PropertyQuarter, PropertyMonth, PropertyWeek</dd>
</dl>
@@ -58,11 +56,11 @@
]]></description>
<!-- Endpoint Configuration -->
<url>/inteligr8/asie/shard/{shardSet}/{shardId}?includeSampleHashes={includeSampleHashes?}</url>
<url>/inteligr8/asie/shard/{shardCore}/{shardId}?includeSampleHashes={includeSampleHashes?}</url>
<format default="json">any</format>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Functionality -->
<cache>

View File

@@ -57,7 +57,7 @@
<format default="json">any</format>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Functionality -->
<cache>

View File

@@ -2,7 +2,7 @@
# defaulting to 3 days = 60 * 24 * 3 = 4320
inteligr8.asie.backup.persistTimeMinutes=4320
inteligr8.asie.allowedAuthorities=ALFRESCO_ADMINISTRATORS
inteligr8.asie.allowedAuthorities=GROUP_ALFRESCO_ADMINISTRATORS
# same as solr.baseUrl, but that property is private to the Search subsystem
inteligr8.asie.basePath=/solr

View File

@@ -6,7 +6,7 @@
<parent>
<groupId>com.inteligr8.alfresco</groupId>
<artifactId>asie-platform-module-parent</artifactId>
<version>1.1-SNAPSHOT</version>
<version>1.2-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>