18 Commits

Author SHA1 Message Date
35bae4283d get authorities from AuthorityService 2025-01-09 11:53:48 -05:00
d537c8ec49 logging authority for debugging 2025-01-09 11:05:46 -05:00
f17556835a fix afterPropertiesSet() 2025-01-08 17:10:33 -05:00
4531c7af8e changed admin to user auth; using configurable auth 2025-01-08 16:51:47 -05:00
692410f535 moved ASIE custom authorization to AbstractWebScript 2025-01-08 16:47:35 -05:00
1230a07a5a added transaction wrapper to REST declaration 2025-01-08 14:52:34 -05:00
47835d852f wrapped attributeService in tx 2025-01-08 14:33:14 -05:00
7535475581 refactored PersistedNode for serialization 2025-01-08 13:52:58 -05:00
14887ca167 renamed path variables from shardSet to shardCore 2025-01-08 11:55:14 -05:00
632900ecee Merge branch 'feature/community' into develop 2024-11-14 11:02:40 -05:00
af7c9e148e compiling/running module (untested) 2024-11-14 11:01:49 -05:00
de8e0bf2d7 update from refactoring (incomplete) 2024-11-13 18:03:22 -05:00
006597f6fb Merge branch 'develop' into feature/community 2024-11-13 18:02:53 -05:00
3ecbf006dd added query parsing (incomplete) 2024-11-06 13:24:54 -05:00
07d6e63457 Merge branch 'develop' into feature/community 2024-11-06 13:24:23 -05:00
01d2f5ce23 fix v1.1.x pom 2024-11-01 08:35:38 -04:00
8a0db9f11d Merge branch 'develop' into feature/community 2024-11-01 08:35:03 -04:00
3e544c125b initial community-module (incomplete/breaking) 2024-10-31 14:55:42 -04:00
47 changed files with 2234 additions and 100 deletions

12
community-module/.gitignore vendored Normal file
View File

@@ -0,0 +1,12 @@
# Maven
target
pom.xml.versionsBackup
# Eclipse
.project
.classpath
.settings
.vscode
# IDEA
/.idea/

View File

@@ -0,0 +1 @@
# ASIE Platform Module Library

Binary file not shown.

106
community-module/pom.xml Normal file
View File

@@ -0,0 +1,106 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.inteligr8.alfresco</groupId>
<artifactId>asie-platform-module-parent</artifactId>
<version>1.2-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>
<artifactId>asie-community-platform-module</artifactId>
<packaging>jar</packaging>
<name>ASIE Platform Module for ACS Community</name>
<properties>
<alfresco.sdk.version>4.9.0</alfresco.sdk.version>
<alfresco.platform.version>23.3.0</alfresco.platform.version>
<alfresco.platform.war.version>23.3.0.98</alfresco.platform.war.version>
<tomcat-rad.version>10-2.1</tomcat-rad.version>
<beedk.rad.acs-search.enabled>true</beedk.rad.acs-search.enabled>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.alfresco</groupId>
<artifactId>acs-community-packaging</artifactId>
<version>${alfresco.platform.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>com.inteligr8.alfresco</groupId>
<artifactId>cachext-platform-module</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.inteligr8.alfresco</groupId>
<artifactId>asie-shared</artifactId>
<version>${project.version}</version>
</dependency>
<!-- Needed by this module, but provided by ACS -->
<dependency>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-repository</artifactId>
<scope>provided</scope>
</dependency>
<!-- Alfresco Modules required to use this module -->
<dependency>
<groupId>com.inteligr8.alfresco</groupId>
<artifactId>cxf-jaxrs-platform-module</artifactId>
<version>1.3.1-acs-v23.3</version>
<type>amp</type>
</dependency>
<!-- Including for testing purposes only -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>io.repaint.maven</groupId>
<artifactId>tiles-maven-plugin</artifactId>
<version>2.40</version>
<extensions>true</extensions>
<configuration>
<tiles>
<!-- Documentation: https://bitbucket.org/inteligr8/ootbee-beedk/src/stable/beedk-acs-search-rad-tile -->
<tile>com.inteligr8.ootbee:beedk-acs-search-rad-tile:[1.1.6,2.0.0)</tile>
<!-- Documentation: https://bitbucket.org/inteligr8/ootbee-beedk/src/stable/beedk-acs-platform-self-rad-tile -->
<tile>com.inteligr8.ootbee:beedk-acs-platform-self-rad-tile:[1.1.6,2.0.0)</tile>
<!-- Documentation: https://bitbucket.org/inteligr8/ootbee-beedk/src/stable/beedk-acs-platform-module-tile -->
<tile>com.inteligr8.ootbee:beedk-acs-platform-module-tile:[1.1.6,2.0.0)</tile>
</tiles>
</configuration>
</plugin>
</plugins>
</build>
<repositories>
<repository>
<id>alfresco-public</id>
<url>https://artifacts.alfresco.com/nexus/content/groups/public</url>
</repository>
</repositories>
</project>

74
community-module/rad.ps1 Normal file
View File

@@ -0,0 +1,74 @@
function discoverArtifactId {
$script:ARTIFACT_ID=(mvn -q -Dexpression=project"."artifactId -DforceStdout help:evaluate)
}
function rebuild {
echo "Rebuilding project ..."
mvn process-classes
}
function start_ {
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
mvn -Drad process-classes
}
function start_log {
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
mvn -Drad "-Ddocker.showLogs" process-classes
}
function stop_ {
discoverArtifactId
echo "Stopping Docker containers that supported rapid application development ..."
docker container ls --filter name=${ARTIFACT_ID}-*
echo "Stopping containers ..."
docker container stop (docker container ls -q --filter name=${ARTIFACT_ID}-*)
echo "Removing containers ..."
docker container rm (docker container ls -aq --filter name=${ARTIFACT_ID}-*)
}
function tail_logs {
param (
$container
)
discoverArtifactId
docker container logs -f (docker container ls -q --filter name=${ARTIFACT_ID}-${container})
}
function list {
discoverArtifactId
docker container ls --filter name=${ARTIFACT_ID}-*
}
switch ($args[0]) {
"start" {
start_
}
"start_log" {
start_log
}
"stop" {
stop_
}
"restart" {
stop_
start_
}
"rebuild" {
rebuild
}
"tail" {
tail_logs $args[1]
}
"containers" {
list
}
default {
echo "Usage: .\rad.ps1 [ start | start_log | stop | restart | rebuild | tail {container} | containers ]"
}
}
echo "Completed!"

71
community-module/rad.sh Normal file
View File

@@ -0,0 +1,71 @@
#!/bin/sh
discoverArtifactId() {
ARTIFACT_ID=`mvn -q -Dexpression=project.artifactId -DforceStdout help:evaluate | sed 's/\x1B\[[0-9;]\{1,\}[A-Za-z]//g'`
}
rebuild() {
echo "Rebuilding project ..."
mvn process-test-classes
}
start() {
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
mvn -Drad process-test-classes
}
start_log() {
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
mvn -Drad -Ddocker.showLogs process-test-classes
}
stop() {
discoverArtifactId
echo "Stopping Docker containers that supported rapid application development ..."
docker container ls --filter name=${ARTIFACT_ID}-*
echo "Stopping containers ..."
docker container stop `docker container ls -q --filter name=${ARTIFACT_ID}-*`
echo "Removing containers ..."
docker container rm `docker container ls -aq --filter name=${ARTIFACT_ID}-*`
}
tail_logs() {
discoverArtifactId
docker container logs -f `docker container ls -q --filter name=${ARTIFACT_ID}-$1`
}
list() {
discoverArtifactId
docker container ls --filter name=${ARTIFACT_ID}-*
}
case "$1" in
start)
start
;;
start_log)
start_log
;;
stop)
stop
;;
restart)
stop
start
;;
rebuild)
rebuild
;;
tail)
tail_logs $2
;;
containers)
list
;;
*)
echo "Usage: ./rad.sh [ start | start_log | stop | restart | rebuild | tail {container} | containers ]"
exit 1
esac
echo "Completed!"

View File

@@ -0,0 +1,23 @@
package com.inteligr8.alfresco.asie;
public interface CommunityConstants extends Constants {
static final String BEAN_SHARDSETS_CACHE = "asieShardsetsCache";
static final String BEAN_NODES_CACHE = "asieNodesCache";
static final String BEAN_SHARD_NODES_CACHE = "asieShardNodesCache";
static final String BEAN_SHARDINST_STATE_CACHE = "asieShardInstanceStateCache";
static final String BEAN_NODE_DISABLE_CACHE = "asieNodeDisabledCache";
static final String BEAN_NODE_UNAVAIL_CACHE = "asieNodeUnavailableCache";
static final String BEAN_SHARDINST_DISABLE_CACHE = "asieShardInstanceDisabledCache";
static final String BEAN_SHARDINST_UNAVAIL_CACHE = "asieShardInstanceUnavailableCache";
static final String BEAN_CORE_EXPLICIT_CACHE = "asieCoreExplicitCache";
static final String ATTR_ASIE_SHARDSET = "inteligr8.asie.shardSet";
static final String ATTR_ASIE_NODE = "inteligr8.asie.node";
static final String ATTR_ASIE_SHARD_NODES = "inteligr8.asie.shard.nodes";
static final String ATTR_ASIE_SHARD_NODE = "inteligr8.asie.shard.node";
static final String ATTR_OBJECT = "object";
static final String ATTR_DISABLE = "disabled";
static final String ATTR_NODES = "nodes";
}

View File

@@ -0,0 +1,50 @@
package com.inteligr8.alfresco.asie.compute;
import java.util.List;
import java.util.Set;
import org.alfresco.repo.search.impl.parsers.CMISLexer;
import org.alfresco.repo.search.impl.parsers.CMISParser;
import org.alfresco.service.cmr.search.SearchParameters.Operator;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.QName;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.Tree;
import org.apache.commons.collections4.SetUtils;
import org.springframework.stereotype.Component;
@Component
public class CmisQueryInspector implements QueryInspector {
private Set<String> supportedLanguages = SetUtils.unmodifiableSet(
SearchService.LANGUAGE_CMIS_ALFRESCO,
SearchService.LANGUAGE_CMIS_STRICT,
SearchService.LANGUAGE_INDEX_CMIS,
SearchService.LANGUAGE_SOLR_CMIS);
@Override
public Set<String> getSupportedLanguages() {
return this.supportedLanguages;
}
@Override
public List<QueryValue> findRequiredPropertyValues(String query, Operator defaultOperator, QName property, DataTypeDefinition dataTypeDef) throws RecognitionException {
Tree tree = this.parseCmis(query, defaultOperator);
throw new UnsupportedOperationException();
}
protected Tree parseCmis(String cmisQuery, Operator defaultOperator) throws RecognitionException {
CharStream cs = new ANTLRStringStream(cmisQuery);
CMISLexer lexer = new CMISLexer(cs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
CMISParser parser = new CMISParser(tokens);
CommonTree tree = (CommonTree) parser.query().getTree();
return tree;
}
}

View File

@@ -0,0 +1,290 @@
package com.inteligr8.alfresco.asie.compute;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.Period;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import org.alfresco.repo.search.impl.parsers.FTSLexer;
import org.alfresco.repo.search.impl.parsers.FTSParser;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.repository.AssociationRef;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.search.SearchParameters.Operator;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.Tree;
import org.apache.commons.collections4.SetUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class FtsQueryInspector implements QueryInspector {
private final Logger logger = LoggerFactory.getLogger(FtsQueryInspector.class);
private final Set<String> supportedLanguages = SetUtils.unmodifiableSet(
SearchService.LANGUAGE_FTS_ALFRESCO,
SearchService.LANGUAGE_INDEX_FTS_ALFRESCO,
SearchService.LANGUAGE_SOLR_FTS_ALFRESCO,
SearchService.LANGUAGE_LUCENE);
@Autowired
private NamespaceService namespaceService;
@Override
public Set<String> getSupportedLanguages() {
return this.supportedLanguages;
}
@Override
public List<QueryValue> findRequiredPropertyValues(String ftsQuery, Operator defaultOperator, QName property, DataTypeDefinition dataTypeDef) throws RecognitionException {
Tree tree = this.parseFts(ftsQuery, defaultOperator);
tree = this.bypassSingleTermDisjunctions(tree);
if (tree == null)
return null;
Collection<Tree> trees = this.extractRequiredTerms(tree);
this.logger.trace("Found {} required terms in query: {}", trees.size(), ftsQuery);
this.filterPropertyTerms(trees, property);
this.logger.trace("Found {} required terms for property {} in query: {}", trees.size(), property, ftsQuery);
this.filterOutFuzzyTerms(trees);
this.logger.trace("Found {} required definitive terms for property {} in query: {}", trees.size(), property, ftsQuery);
List<QueryValue> values = new ArrayList<>(trees.size());
for (Tree t : trees)
values.add(this.extractValue(t, dataTypeDef));
return values;
}
protected Tree parseFts(String ftsQuery, Operator defaultOperator) throws RecognitionException {
CharStream cs = new ANTLRStringStream(ftsQuery);
FTSLexer lexer = new FTSLexer(cs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
FTSParser parser = new FTSParser(tokens);
parser.setDefaultFieldConjunction(defaultOperator.equals(Operator.AND));
parser.setMode(defaultOperator.equals(Operator.AND) ? FTSParser.Mode.DEFAULT_CONJUNCTION : FTSParser.Mode.DEFAULT_DISJUNCTION);
CommonTree tree = (CommonTree) parser.ftsQuery().getTree();
return tree;
}
protected Tree bypassSingleTermDisjunctions(Tree tree) {
while ("DISJUNCTION".equals(tree.getText()) && tree.getChildCount() == 1)
tree = tree.getChild(0);
if ("DISJUNCTION".equals(tree.getText()))
return null;
return tree;
}
protected Collection<Tree> extractRequiredTerms(Tree tree) {
while ("DISJUNCTION".equals(tree.getText()) && tree.getChildCount() == 1)
tree = tree.getChild(0);
List<Tree> terms = new LinkedList<>();
switch (tree.getText()) {
case "DISJUNCTION":
break;
case "CONJUNCTION":
for (int c = 0; c < tree.getChildCount(); c++) {
Collection<Tree> subtrees = this.extractRequiredTerms(tree.getChild(c));
if (subtrees == null || subtrees.isEmpty())
continue;
terms.addAll(subtrees);
}
break;
case "DEFAULT":
terms.add(tree);
break;
default:
this.logger.warn("Unexpected/unsupported tree: {}", tree.getText());
}
return terms;
}
protected Collection<Tree> filterPropertyTerms(Collection<Tree> trees, QName property) {
if (trees.isEmpty())
return trees;
Set<String> prefixes = new HashSet<>(this.namespaceService.getPrefixes(property.getNamespaceURI()));
if (prefixes.isEmpty()) {
this.logger.warn("Unexpected/unsupported namespace: {}", property.getNamespaceURI());
trees.clear();
return trees;
}
Iterator<Tree> i = trees.iterator();
while (i.hasNext()) {
Tree tree = i.next();
if ("DEFAULT".equals(tree.getText()))
tree = tree.getChild(0);
int skip = -1;
switch (tree.getText()) {
case "TERM":
case "PHRASE":
case "EXACT_TERM":
case "EXACT_PHRASE":
skip = 1; // skip the value child
break;
case "RANGE":
skip = 4; // skip the inclusive, start, end, inclusive children
break;
default:
}
if (skip >= 0) {
Tree fieldRef = tree.getChild(skip);
if (!"FIELD_REF".equals(fieldRef.getText())) {
this.logger.warn("Unexpected/unsupported tree: {}", tree.getText());
} else if (!fieldRef.getChild(0).getText().equals(property.getLocalName())) {
this.logger.trace("Found but ignoring property: {}", fieldRef.getChild(0).getText());
} else {
Tree prefix = fieldRef.getChild(1);
if (!"PREFIX".equals(prefix.getText())) {
this.logger.warn("Unexpected/unsupported tree: {}", tree.getText());
} else if (!prefixes.contains(prefix.getChild(0).getText())) {
this.logger.trace("Found but ignoring property: {}:{}", prefix.getChild(0).getText(), property.getLocalName());
} else {
// this will skip the remove()
continue;
}
}
}
i.remove();
}
return trees;
}
protected Collection<Tree> filterOutFuzzyTerms(Collection<Tree> trees) {
if (trees.isEmpty())
return trees;
Iterator<Tree> i = trees.iterator();
while (i.hasNext()) {
Tree tree = i.next();
if ("DEFAULT".equals(tree.getText()))
tree = tree.getChild(0);
switch (tree.getText()) {
case "EXACT_TERM":
case "EXACT_PHRASE":
case "RANGE":
break;
default:
i.remove();
}
}
return trees;
}
protected QueryValue extractValue(Tree tree, DataTypeDefinition dataTypeDef) {
if ("DEFAULT".equals(tree.getText()))
tree = tree.getChild(0);
switch (tree.getText()) {
case "RANGE":
return this.extractRangeValue(tree, dataTypeDef);
default:
}
String value = this.unquote(tree.getChild(0).getText());
switch (dataTypeDef.getName().getLocalName()) {
case "boolean":
return new QuerySingleValue<Boolean>(Boolean.parseBoolean(value));
case "double":
return new QuerySingleValue<Double>(Double.parseDouble(value));
case "float":
return new QuerySingleValue<Float>(Float.parseFloat(value));
case "int":
return new QuerySingleValue<Integer>(Integer.parseInt(value));
case "long":
return new QuerySingleValue<Long>(Long.parseLong(value));
case "date":
return new QuerySingleValue<LocalDate>(this.evaluateAsDate(value));
case "datetime":
return new QuerySingleValue<LocalDateTime>(this.evaluateAsDateTime(value));
case "period":
return new QuerySingleValue<Period>(Period.parse(value));
case "qname":
return new QuerySingleValue<QName>(QName.createQName(value, this.namespaceService));
case "noderef":
return new QuerySingleValue<NodeRef>(new NodeRef(value));
case "childassocref":
return new QuerySingleValue<ChildAssociationRef>(new ChildAssociationRef(value));
case "assocref":
return new QuerySingleValue<AssociationRef>(new AssociationRef(value));
case "locale":
return new QuerySingleValue<Locale>(new Locale(value));
default:
return new QuerySingleValue<String>(value);
}
}
protected QueryRangeValue<?> extractRangeValue(Tree tree, DataTypeDefinition dataTypeDef) {
boolean includeStart = "INCLUSIVE".equals(tree.getChild(0).getText());
String start = this.unquote(tree.getChild(1).getText());
String end = this.unquote(tree.getChild(2).getText());
boolean includeEnd = "INCLUSIVE".equals(tree.getChild(3).getText());
switch (dataTypeDef.getName().getLocalName()) {
case "double":
return new QueryRangeValue<Double>(includeStart, Double.parseDouble(start), includeEnd, Double.parseDouble(end));
case "float":
return new QueryRangeValue<Float>(includeStart, Float.parseFloat(start), includeEnd, Float.parseFloat(end));
case "int":
return new QueryRangeValue<Integer>(includeStart, Integer.parseInt(start), includeEnd, Integer.parseInt(end));
case "long":
return new QueryRangeValue<Long>(includeStart, Long.parseLong(start), includeEnd, Long.parseLong(end));
case "date":
return new QueryRangeValue<LocalDate>(includeStart, this.evaluateAsDate(start), includeEnd, this.evaluateAsDate(end));
case "datetime":
return new QueryRangeValue<LocalDateTime>(includeStart, this.evaluateAsDateTime(start), includeEnd, this.evaluateAsDateTime(end));
default:
throw new UnsupportedOperationException("The data type does not make sense for range evaluation: " + dataTypeDef.getName());
}
}
protected LocalDate evaluateAsDate(String str) {
if ("now".equalsIgnoreCase(str)) return LocalDate.now();
else return LocalDate.parse(str);
}
protected LocalDateTime evaluateAsDateTime(String str) {
if ("now".equalsIgnoreCase(str)) return LocalDateTime.now();
else return LocalDateTime.parse(str);
}
protected String unquote(String str) {
if (str.length() < 2) return str;
else if (str.charAt(0) == '\'' && str.charAt(str.length()-1) == '\'') return str.substring(1, str.length()-1);
else if (str.charAt(0) == '\"' && str.charAt(str.length()-1) == '\"') return str.substring(1, str.length()-1);
else return str;
}
}

View File

@@ -0,0 +1,74 @@
package com.inteligr8.alfresco.asie.compute;
import java.util.List;
import java.util.Set;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.search.SearchParameters.Operator;
import org.alfresco.service.namespace.QName;
import org.antlr.runtime.RecognitionException;
public interface QueryInspector {
Set<String> getSupportedLanguages();
List<QueryValue> findRequiredPropertyValues(String query, Operator defaultOperator, QName property, DataTypeDefinition dataTypeDef) throws RecognitionException;
public interface QueryValue {
}
public class QuerySingleValue<T> implements QueryValue {
private T value;
public QuerySingleValue(T value) {
this.value = value;
}
public T getValue() {
return value;
}
@Override
public String toString() {
return this.value.toString();
}
}
public class QueryRangeValue<T> implements QueryValue {
private boolean includeStart;
private T start;
private boolean includeEnd;
private T end;
public QueryRangeValue(boolean includeStart, T start, boolean includeEnd, T end) {
this.includeStart = includeStart;
this.start = start;
this.includeEnd = includeEnd;
this.end = end;
}
public boolean isIncludeStart() {
return includeStart;
}
public boolean isIncludeEnd() {
return includeEnd;
}
public T getStart() {
return start;
}
public T getEnd() {
return end;
}
}
}

View File

@@ -0,0 +1,32 @@
package com.inteligr8.alfresco.asie.compute;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.alfresco.service.cmr.search.SearchParameters;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class QueryInspectorFactory implements InitializingBean {
@Autowired
private List<QueryInspector> inspectors;
private Map<String, QueryInspector> languageInspectorMap = new HashMap<>();
@Override
public void afterPropertiesSet() throws Exception {
for (QueryInspector inspector : this.inspectors) {
for (String language : inspector.getSupportedLanguages())
this.languageInspectorMap.put(language, inspector);
}
}
public QueryInspector selectQueryInspector(SearchParameters searchParams) {
return this.languageInspectorMap.get(searchParams.getLanguage());
}
}

View File

@@ -0,0 +1,28 @@
package com.inteligr8.alfresco.asie.provider;
import org.alfresco.repo.index.shard.ShardRegistry;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Scope;
import com.inteligr8.alfresco.asie.Constants;
@Configuration
public class ShardRegistryProvider extends AbstractProvider<ShardRegistry> {
/**
* This allows for the selection of the primary or first ShardRegistry
* registered in the Spring BeanFactory.
*
* @return A ShardRegistry.
*/
@Bean(Constants.BEAN_SHARD_REGISTRY)
@Qualifier(Constants.QUALIFIER_ASIE)
@Scope(ConfigurableBeanFactory.SCOPE_SINGLETON)
public ShardRegistry selectBean() {
return this.getPrimary(ShardRegistry.class);
}
}

View File

@@ -0,0 +1,240 @@
package com.inteligr8.alfresco.asie.service;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.repo.index.shard.ShardMethodEnum;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.Pair;
import org.alfresco.util.collections.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.CommunityConstants;
import com.inteligr8.alfresco.asie.model.Shard;
import com.inteligr8.alfresco.asie.model.ShardInstance;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.cachext.CollectionCache;
import com.inteligr8.alfresco.cachext.MultiValueCache;
@Component
public class ShardDiscoveryService implements com.inteligr8.alfresco.asie.spi.ShardDiscoveryService {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDSETS_CACHE)
private SimpleCache<String, ShardSet> shardsetsCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_NODES_CACHE)
private SimpleCache<String, SolrHost> nodesCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARD_NODES_CACHE)
private MultiValueCache<Shard, SolrHost> shardNodesCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDINST_STATE_CACHE)
private SimpleCache<ShardInstance, ShardInstanceState> shardInstanceStatesCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_NODE_UNAVAIL_CACHE)
private CollectionCache<SolrHost, HashSet<SolrHost>> nodeUnavailableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_NODE_DISABLE_CACHE)
private CollectionCache<SolrHost, HashSet<SolrHost>> nodeDisableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDINST_UNAVAIL_CACHE)
private CollectionCache<ShardInstance, HashSet<ShardInstance>> shardInstanceUnavailableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDINST_DISABLE_CACHE)
private CollectionCache<ShardInstance, HashSet<ShardInstance>> shardInstanceDisableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_CORE_EXPLICIT_CACHE)
private SimpleCache<String, QName> coreExplicitIdCache;
@Override
public ShardSet findSetByCore(String core) {
return this.shardsetsCache.get(core);
}
@Override
public SolrHost findNode(String nodeHostname, int nodePort) {
Map<String, InetAddress> resolvedAddresses = new HashMap<>();
for (String nodeSpec : this.nodesCache.getKeys()) {
SolrHost node = this.nodesCache.get(nodeSpec);
if (!nodeHostname.equalsIgnoreCase(node.getHostname())) {
if (!resolvedAddresses.containsKey(nodeHostname))
resolvedAddresses.put(nodeHostname, this.resolve(nodeHostname));
InetAddress nodeAddress = resolvedAddresses.get(nodeHostname);
this.logger.trace("Resolved: {} => {}", nodeHostname, nodeAddress);
if (nodeAddress == null)
continue;
if (!resolvedAddresses.containsKey(node.getHostname()))
resolvedAddresses.put(node.getHostname(), this.resolve(node.getHostname()));
InetAddress shardInstanceAddress = resolvedAddresses.get(node.getHostname());
this.logger.trace("Resolved: {} => {}", node.getHostname(), shardInstanceAddress);
if (!nodeAddress.equals(shardInstanceAddress))
continue;
}
if (nodePort == node.getPort()) {
this.logger.debug("Found node: {}", node);
return node;
}
}
return null;
}
private InetAddress resolve(String hostname) {
try {
return InetAddress.getByName(hostname);
} catch (UnknownHostException uhe) {
return null;
}
}
@Override
public Map<ShardSet, Map<Integer, ShardInstanceState>> findByNode(SolrHost node) {
Map<ShardSet, Map<Integer, ShardInstanceState>> response = new HashMap<>();
for (Shard shard : this.shardNodesCache.getKeys()) {
ShardSet shardSet = this.shardsetsCache.get(shard.extractShardSetCore());
if (this.shardNodesCache.contains(shard, node)) {
ShardInstance shardNode = ShardInstance.from(shard, node);
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
Map<Integer, ShardInstanceState> shards = response.get(shardSet);
if (shards == null)
response.put(shardSet, shards = new HashMap<>());
shards.put(shard.extractShardId(), state);
}
}
return response;
}
@Override
public Set<ShardSet> findSetsByShardMethod(ShardMethodEnum... shardMethods) {
Set<ShardSet> shardSets = new HashSet<>();
Set<ShardMethodEnum> methods = CollectionUtils.asSet(shardMethods);
for (String core : this.shardsetsCache.getKeys()) {
ShardSet shardSet = this.shardsetsCache.get(core);
if (methods.contains(shardSet.getMethod()))
shardSets.add(shardSet);
}
return shardSets;
}
@Override
public Set<SolrHost> findNodes(ShardSet shardSet) {
Set<SolrHost> nodes = new HashSet<>();
for (Shard shard : this.shardNodesCache.getKeys()) {
if (shardSet.getCore().equals(shard.extractShardSetCore()))
nodes.addAll(this.shardNodesCache.get(shard));
}
return nodes;
}
@Override
public Set<SolrHost> findNodesByShard(ShardSet shardSet, int shardId) {
Set<SolrHost> nodes = new HashSet<>();
for (Shard shard : this.shardNodesCache.getKeys()) {
if (shardSet.getCore().equals(shard.extractShardSetCore()) && shardId == shard.extractShardId())
nodes.addAll(this.shardNodesCache.get(shard));
}
return nodes;
}
@Override
public Map<Integer, Pair<SolrHost, ShardInstanceState>> findLatestNodeStates(ShardSet shardSet) {
Map<Integer, Pair<SolrHost, ShardInstanceState>> response = new HashMap<>();
for (Shard shard : this.shardNodesCache.getKeys()) {
if (!shardSet.getCore().equals(shard.extractShardSetCore()))
continue;
SolrHost latestNode = null;
ShardInstanceState latestState = null;
for (SolrHost node : this.shardNodesCache.get(shard)) {
ShardInstance shardNode = ShardInstance.from(shard, node);
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
if (latestState == null || state.compareTo(latestState) < 0) {
latestState = state;
latestNode = node;
}
}
if (latestNode != null)
response.put(shard.extractShardId(), new Pair<>(latestNode, latestState));
}
return response;
}
@Override
public List<Pair<SolrHost, ShardInstanceState>> findNodeStatesByShard(ShardSet shardSet, int shardId) {
List<Pair<SolrHost, ShardInstanceState>> response = new LinkedList<>();
for (Shard shard : this.shardNodesCache.getKeys()) {
if (!shardSet.getCore().equals(shard.extractShardSetCore()))
continue;
for (SolrHost node : this.shardNodesCache.get(shard)) {
ShardInstance shardNode = ShardInstance.from(shard, node);
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
response.add(new Pair<>(node, state));
}
}
return response;
}
@Override
public Set<Integer> findIdsByNode(ShardSet shardSet, SolrHost node) {
Set<Integer> shardIds = new HashSet<>();
for (Shard shard : this.shardNodesCache.getKeys()) {
if (shardSet.getCore().equals(shard.extractShardSetCore()) && this.shardNodesCache.contains(shard, node))
shardIds.add(shard.extractShardId());
}
return shardIds;
}
@Override
public Map<Integer, ShardInstanceState> findStatesByNode(ShardSet shardSet, SolrHost node) {
// TODO Auto-generated method stub
return null;
}
}

View File

@@ -0,0 +1,25 @@
package com.inteligr8.alfresco.asie.service;
import org.alfresco.service.cmr.attributes.AttributeService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.Constants;
@Component
public class ShardStateService implements com.inteligr8.alfresco.asie.spi.ShardStateService {
@Autowired
@Qualifier(Constants.QUALIFIER_ASIE)
private AttributeService attrService;
@Autowired
private SolrShardRegistry shardRegistry;
@Override
public void clear() {
this.shardRegistry.purge();
}
}

View File

@@ -0,0 +1,628 @@
package com.inteligr8.alfresco.asie.service;
import java.io.Serializable;
import java.time.OffsetDateTime;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.OptionalInt;
import java.util.Random;
import java.util.Set;
import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.repo.index.shard.Floc;
import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.repo.lock.JobLockService;
import org.alfresco.service.cmr.attributes.AttributeService;
import org.alfresco.service.cmr.attributes.AttributeService.AttributeQueryCallback;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.antlr.runtime.RecognitionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.annotation.Primary;
import org.springframework.extensions.surf.util.AbstractLifecycleBean;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.CommunityConstants;
import com.inteligr8.alfresco.asie.Constants;
import com.inteligr8.alfresco.asie.compute.QueryInspector;
import com.inteligr8.alfresco.asie.compute.QueryInspector.QueryRangeValue;
import com.inteligr8.alfresco.asie.compute.QueryInspector.QuerySingleValue;
import com.inteligr8.alfresco.asie.compute.QueryInspector.QueryValue;
import com.inteligr8.alfresco.asie.compute.QueryInspectorFactory;
import com.inteligr8.alfresco.asie.model.Shard;
import com.inteligr8.alfresco.asie.model.ShardInstance;
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
import com.inteligr8.alfresco.asie.spi.ShardRegistry;
import com.inteligr8.alfresco.cachext.CollectionCache;
import com.inteligr8.alfresco.cachext.MultiValueCache;
@Component
@Primary
public class SolrShardRegistry extends AbstractLifecycleBean implements ShardRegistry {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private final Random random = new Random();
private final QName shardLock = QName.createQName(Constants.NAMESPACE_ASIE, "shardLock");
@Autowired
@Qualifier(Constants.QUALIFIER_ASIE)
private AttributeService attrService;
@Autowired
private NamespaceService namespaceService;
@Autowired
private DictionaryService dictionaryService;
@Autowired
private QueryInspectorFactory queryInspectorFactory;
@Autowired
private JobLockService jobLockService;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDSETS_CACHE)
private SimpleCache<String, ShardSet> shardsetsCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_NODES_CACHE)
private SimpleCache<String, SolrHost> nodesCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARD_NODES_CACHE)
private MultiValueCache<Shard, SolrHost> shardNodesCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDINST_STATE_CACHE)
private SimpleCache<ShardInstance, ShardInstanceState> shardInstanceStatesCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_NODE_UNAVAIL_CACHE)
private CollectionCache<SolrHost, HashSet<SolrHost>> nodeUnavailableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_NODE_DISABLE_CACHE)
private CollectionCache<SolrHost, HashSet<SolrHost>> nodeDisableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDINST_UNAVAIL_CACHE)
private CollectionCache<ShardInstance, HashSet<ShardInstance>> shardInstanceUnavailableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_SHARDINST_DISABLE_CACHE)
private CollectionCache<ShardInstance, HashSet<ShardInstance>> shardInstanceDisableCache;
@Autowired
@Qualifier(CommunityConstants.BEAN_CORE_EXPLICIT_CACHE)
private SimpleCache<String, QName> coreExplicitIdCache;
@Value("${inteligr8.asie.registerUnknownShardDisabled}")
private boolean registerDisabled;
@Value("${inteligr8.asie.offlineIdleShardInSeconds}")
private int offlineIdleShardInSeconds;
@Value("${inteligr8.asie.forgetOfflineShardInSeconds}")
private int forgetOfflineShardInSeconds;
@Override
protected void onBootstrap(ApplicationEvent event) {
this.loadPersistedToCache();
}
@Override
protected void onShutdown(ApplicationEvent event) {
}
protected void loadPersistedToCache() {
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 500L, 10);
try {
this.attrService.getAttributes(new AttributeQueryCallback() {
@Override
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
String core = (String) keys[1];
if (!shardsetsCache.contains(core)) {
ShardSet shardSet = (ShardSet) value;
shardsetsCache.put(core, shardSet);
switch (shardSet.getMethod()) {
case EXPLICIT_ID:
cacheExplicitShard(shardSet, false);
break;
default:
}
}
return true;
}
}, CommunityConstants.ATTR_ASIE_SHARDSET);
this.attrService.getAttributes(new AttributeQueryCallback() {
@Override
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
String nodeSpec = (String) keys[2];
SolrHost node = (SolrHost) value;
if (!nodesCache.contains(nodeSpec))
nodesCache.put(nodeSpec, node);
if (Boolean.TRUE.equals(attrService.getAttribute(CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_DISABLE, nodeSpec))) {
if (!nodeDisableCache.contains(node))
nodeDisableCache.add(node);
} else if (nodeDisableCache.contains(node)) {
nodeDisableCache.remove(node);
}
return true;
}
}, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_OBJECT);
this.attrService.getAttributes(new AttributeQueryCallback() {
@Override
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
Shard shard = (Shard) keys[1];
SolrHost node = (SolrHost) keys[2];
if (!shardNodesCache.contains(shard, node))
shardNodesCache.add(shard, node);
return true;
}
}, CommunityConstants.ATTR_ASIE_SHARD_NODES);
this.attrService.getAttributes(new AttributeQueryCallback() {
@Override
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
ShardInstance shardNode = (ShardInstance) keys[2];
ShardInstanceState state = (ShardInstanceState) value;
if (!shardInstanceStatesCache.contains(shardNode))
shardInstanceStatesCache.put(shardNode, state);
if (Boolean.TRUE.equals(attrService.getAttribute(CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_DISABLE, shardNode))) {
if (!shardInstanceDisableCache.contains(shardNode))
shardInstanceDisableCache.add(shardNode);
} else if (shardInstanceDisableCache.contains(shardNode)) {
shardInstanceDisableCache.remove(shardNode);
}
return true;
}
}, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_OBJECT);
} finally {
this.jobLockService.releaseLock(lockId, this.shardLock);
}
}
private void cacheExplicitShard(ShardSet shardSet, boolean overwrite) {
if (overwrite || !this.coreExplicitIdCache.contains(shardSet.getCore())) {
String property = shardSet.getPrefixedProperty();
QName propertyQName = QName.createQName(property, namespaceService);
this.logger.debug("Mapping core to explicit ID: {} => {}", shardSet.getCore(), propertyQName);
this.coreExplicitIdCache.put(shardSet.getCore(), propertyQName);
}
}
protected void persistCache() {
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 100L, 50);
try {
this.persistShardSetCache();
this.persistNodeCache();
this.persistShardNodesCache();
this.persistShardInstanceCache();
} finally {
this.jobLockService.releaseLock(lockId, this.shardLock);
}
}
private void persistShardSetCache() {
// add anything missing
// update anything changed
for (String core : this.shardsetsCache.getKeys()) {
ShardSet shardSet = this.shardsetsCache.get(core);
this.checkSetAttribute(shardSet, CommunityConstants.ATTR_ASIE_SHARDSET, core);
}
// we are not removing anything removed from the cache, as it might have expired
// it will just recache on the next load
}
private void persistNodeCache() {
// add anything missing
// update anything changed
for (String nodeSpec : this.nodesCache.getKeys()) {
SolrHost node = this.nodesCache.get(nodeSpec);
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_OBJECT, nodeSpec);
}
// we are not removing anything removed from the cache, as it might have expired
// it will just recache on the next load
// add anything disabled
for (SolrHost node : this.nodeDisableCache.values())
this.checkSetAttribute(Boolean.TRUE, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_DISABLE, node.getSpec());
// remove anything not disabled
this.attrService.getAttributes(new AttributeQueryCallback() {
@Override
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
SolrHost node = SolrHost.from((String) keys[2]);
if (!nodeDisableCache.contains(node))
attrService.removeAttribute(keys);
return true;
}
}, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_DISABLE);
}
private void persistShardNodesCache() {
// add anything missing
// update anything changed
for (Shard shard : this.shardNodesCache.getKeys()) {
Collection<SolrHost> nodes = this.shardNodesCache.get(shard);
for (SolrHost node : nodes) {
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_SHARD_NODES, shard, node.getSpec());
}
}
// we are not removing anything removed from the cache, as it might have expired
// it will just recache on the next load
}
private void persistShardInstanceCache() {
// add anything missing
// update anything changed
for (ShardInstance shardNode : this.shardInstanceStatesCache.getKeys()) {
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
this.checkSetAttribute(state, shardNode);
}
// we are not removing anything removed from the cache, as it might have expired
// it will just recache on the next load
// add anything disabled
for (ShardInstance shardNode : this.shardInstanceDisableCache.values())
this.checkSetAttribute(Boolean.TRUE, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_DISABLE, shardNode);
// remove anything not disabled
this.attrService.getAttributes(new AttributeQueryCallback() {
@Override
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
ShardInstance shardNode = (ShardInstance) keys[2];
if (!shardInstanceDisableCache.contains(shardNode))
attrService.removeAttribute(keys);
return true;
}
}, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_DISABLE);
}
private void checkSetAttribute(ShardInstanceState state, ShardInstance shardNode) {
ShardInstanceState currentState = (ShardInstanceState) this.attrService.getAttribute(CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
if (currentState != null) {
if (currentState.compareTo(state) >= 0) {
// current state is older (greater; further down the list)
// do nothing
} else {
this.logger.debug("The persisted state was old; updating: {}: {} => {}", shardNode, currentState, state);
this.attrService.setAttribute(state, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
}
} else {
this.attrService.setAttribute(state, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
}
}
private void checkSetAttribute(Serializable value, Serializable... keys) {
Serializable currentValue = this.attrService.getAttribute(keys);
if (currentValue != null) {
if (currentValue.equals(value))
return;
this.logger.warn("The attribute value unexpectedly changed: {}: {} => {}", keys, currentValue, value);
}
this.attrService.setAttribute(value, keys);
}
@Override
public void registerShardState(ShardState shardNodeState) {
ShardSet shardSet = ShardSet.from(shardNodeState.getShardInstance().getShard().getFloc(), shardNodeState);
Shard shard = Shard.from(shardSet, shardNodeState.getShardInstance().getShard().getInstance());
SolrHost node = SolrHost.from(shardNodeState.getShardInstance());
ShardInstance shardNode = ShardInstance.from(shard, node);
ShardInstanceState state = ShardInstanceState.from(shardNodeState);
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 500L, 10);
try {
if (!this.shardsetsCache.contains(shardSet.getCore()))
this.shardsetsCache.put(shardSet.getCore(), shardSet);
this.checkSetAttribute(shardSet, CommunityConstants.ATTR_ASIE_SHARDSET, shardSet.getCore());
if (!this.nodesCache.contains(node.getSpec()))
this.nodesCache.put(node.getSpec(), node);
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_OBJECT, node.getSpec());
if (!this.shardNodesCache.contains(shard, node))
this.shardNodesCache.add(shard, node);
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_SHARD_NODES, shard, node.getSpec());
ShardInstanceState currentState = this.shardInstanceStatesCache.get(shardNode);
if (currentState == null || currentState.compareTo(state) > 0)
this.shardInstanceStatesCache.put(shardNode, state);
this.checkSetAttribute(state, shardNode);
if (this.registerDisabled && !this.shardInstanceDisableCache.contains(shardNode))
this.shardInstanceDisableCache.add(shardNode);
} finally {
this.jobLockService.releaseLock(lockId, this.shardLock);
}
}
@Override
public void unregisterShardInstance(org.alfresco.repo.index.shard.ShardInstance shardInstance) {
ShardSet shardSet = ShardSet.from(shardInstance.getShard().getFloc(), null);
Shard shard = Shard.from(shardSet, shardInstance.getShard().getInstance());
SolrHost node = SolrHost.from(shardInstance);
ShardInstance shardNode = ShardInstance.from(shard, node);
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 500L, 10);
try {
this.shardInstanceStatesCache.remove(shardNode);
this.shardInstanceDisableCache.remove(shardNode);
this.shardInstanceUnavailableCache.remove(shardNode);
this.nodeDisableCache.remove(node);
this.nodeUnavailableCache.remove(node);
this.attrService.removeAttribute(CommunityConstants.ATTR_ASIE_SHARD_NODES, shard, node.getSpec());
} finally {
this.jobLockService.releaseLock(lockId, this.shardLock);
}
}
@Override
public Map<Floc, Map<org.alfresco.repo.index.shard.Shard, Set<ShardState>>> getFlocs() {
Map<String, Floc> flocs = new HashMap<>();
Map<Floc, Map<org.alfresco.repo.index.shard.Shard, Set<ShardState>>> response = new HashMap<>();
for (Shard shard : this.shardNodesCache.getKeys()) {
String core = shard.extractShardSetCore();
ShardSet shardSet = this.shardsetsCache.get(core);
Map<org.alfresco.repo.index.shard.Shard, Set<ShardState>> shards;
Floc floc = flocs.get(core);
if (floc != null) {
floc = shardSet.toAlfrescoModel();
shards = new HashMap<>();
} else {
shards = response.get(floc);
}
org.alfresco.repo.index.shard.Shard shard_ = shard.toAlfrescoModel(floc);
Set<ShardState> states = shards.get(shard_);
if (states == null)
states = new HashSet<>();
for (SolrHost node : this.shardNodesCache.get(shard)) {
if (this.nodeDisableCache.contains(node) || this.nodeUnavailableCache.contains(node)) {
this.logger.debug("Excluding node as it is disabled or considered unavailable: {}", node);
continue;
}
ShardInstance shardNode = ShardInstance.from(shard, node);
if (this.shardInstanceDisableCache.contains(shardNode) || this.shardInstanceUnavailableCache.contains(shardNode)) {
this.logger.debug("Excluding shard node as it is disabled or considered unavailable: {}", shardNode);
continue;
}
ShardInstanceState shardNodeState = this.shardInstanceStatesCache.get(shardNode);
states.add(shardNodeState.toAlfrescoModel(shardNode.toAlfrescoModel(shard_)));
}
if (!states.isEmpty())
shards.put(shard_, states);
if (!shards.isEmpty())
response.put(floc, shards);
}
return response;
}
@Override
public void purge() {
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 100L, 50);
try {
this.logger.info("Removing all nodes/shards from the shard registry");
this.shardsetsCache.clear();
this.attrService.removeAttributes(CommunityConstants.ATTR_ASIE_SHARDSET);
this.nodesCache.clear();
this.nodeDisableCache.clear();
this.nodeUnavailableCache.clear();
this.attrService.removeAttributes(CommunityConstants.ATTR_ASIE_NODE);
this.shardNodesCache.clear();
this.attrService.removeAttributes(CommunityConstants.ATTR_ASIE_SHARD_NODES);
this.shardInstanceStatesCache.clear();
this.shardInstanceDisableCache.clear();
this.shardInstanceUnavailableCache.clear();
this.attrService.removeAttributes(CommunityConstants.ATTR_ASIE_SHARD_NODE);
this.coreExplicitIdCache.clear();
} finally {
this.jobLockService.releaseLock(lockId, this.shardLock);
}
}
@Override
public void purgeAgedOutShards() {
OffsetDateTime onlineExpired = OffsetDateTime.now().minusSeconds(this.offlineIdleShardInSeconds);
OffsetDateTime offlineExpired = OffsetDateTime.now().minusSeconds(this.forgetOfflineShardInSeconds);
for (ShardInstance shardNode : this.shardInstanceStatesCache.getKeys()) {
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
SolrHost node = shardNode.extractNode();
if (this.shardInstanceDisableCache.contains(shardNode)) {
this.logger.debug("Ignoring disabled shard instance during purgeAgedOutShards()");
} else if (this.nodeDisableCache.contains(node)) {
this.logger.debug("Ignoring disabled node during purgeAgedOutShards()");
} else if (state.getLastUpdated().isBefore(offlineExpired)) {
this.shardInstanceStatesCache.remove(shardNode);
if (this.shardInstanceUnavailableCache.remove(shardNode)) {
this.logger.info("Forgetting about already offline shard: {}", shardNode);
} else if (this.nodeUnavailableCache.remove(node)) {
this.logger.info("Forgetting about already offline shard: {}", shardNode);
} else {
this.logger.warn("Forgetting about online shard: {}", shardNode);
}
} else if (state.getLastUpdated().isBefore(onlineExpired)) {
this.logger.warn("Taking shard offline: {}", shardNode);
this.shardInstanceUnavailableCache.add(shardNode);
}
}
}
@Override
public QName getExplicitIdProperty(String coreName) {
return this.coreExplicitIdCache.get(coreName);
}
@Override
public Set<Integer> getShardInstanceList(String coreName) {
Set<Integer> shardIds = new HashSet<>();
ShardSet shardSet = this.shardsetsCache.get(coreName);
if (shardSet == null)
return Collections.emptySet();
for (Shard shard : this.shardNodesCache.getKeys()) {
if (shardSet.getCore().equals(shard.extractShardSetCore())) {
shardIds.add(shard.extractShardId());
}
}
return shardIds;
}
@Override
public OptionalInt getShardInstanceByTransactionTimestamp(String coreId, long txnTimestamp) {
throw new UnsupportedOperationException();
}
@Override
public List<org.alfresco.repo.index.shard.ShardInstance> getIndexSlice(SearchParameters searchParameters) {
if (searchParameters.getQuery() == null)
return Collections.emptyList();
List<org.alfresco.repo.index.shard.ShardInstance> bestShards = null;
for (String shardSetSpec : this.shardsetsCache.getKeys()) {
ShardSet shardSet = this.shardsetsCache.get(shardSetSpec);
Set<Integer> shardIds = this.getIndexSlice(searchParameters, shardSet);
if (shardIds == null)
continue;
List<org.alfresco.repo.index.shard.ShardInstance> shards = this.selectRandomNodes(shardSet, shardIds);
if (!shards.isEmpty() && (bestShards == null || shards.size() < bestShards.size()))
bestShards = shards;
if (bestShards != null && bestShards.size() == 1)
break;
}
return bestShards;
}
protected Set<Integer> getIndexSlice(SearchParameters searchParameters, ShardSet shardSet) {
try {
switch (shardSet.getMethod()) {
case EXPLICIT_ID:
return this.getExplicitIdIndexSlice(searchParameters, shardSet);
default:
// no optimization available
return null;
}
} catch (RecognitionException re) {
this.logger.debug("Failed to parse the query: " + searchParameters.getQuery(), re);
// no optimization available
return null;
}
}
protected Set<Integer> getExplicitIdIndexSlice(SearchParameters searchParameters, ShardSet shardSet) throws RecognitionException {
this.logger.trace("Found {} shard set, which is the highest priority", shardSet.getMethod());
QueryInspector inspector = this.queryInspectorFactory.selectQueryInspector(searchParameters);
if (inspector == null) {
this.logger.debug("The search is using an unsupported query language; unable to optimize for {}: {}", shardSet.getMethod(), searchParameters.getLanguage());
return null;
}
String property = shardSet.getPrefixedProperty();
QName propertyQName = QName.createQName(property, this.namespaceService);
this.logger.trace("Will attempt to see if search has a required constraint on explicit shard ID property: {}", propertyQName);
DataTypeDefinition dtdef = this.dictionaryService.getProperty(propertyQName).getDataType();
Set<Integer> shardIds = new HashSet<>();
List<QueryValue> values = inspector.findRequiredPropertyValues(searchParameters.getQuery(), searchParameters.getDefaultOperator(), propertyQName, dtdef);
this.logger.trace("Found {} matching terms query: {}: {}", values.size(), propertyQName, searchParameters.getQuery());
for (QueryValue value : values) {
if (value instanceof QuerySingleValue<?>) {
@SuppressWarnings("unchecked")
Number num = ((QuerySingleValue<? extends Number>) value).getValue();
shardIds.add(num.intValue());
} else if (value instanceof QueryRangeValue<?>) {
@SuppressWarnings("unchecked")
QueryRangeValue<? extends Number> num = (QueryRangeValue<? extends Number>) value;
int start = num.getStart().intValue();
if (!num.isIncludeStart())
start++;
int end = num.getStart().intValue();
if (!num.isIncludeEnd())
end--;
for (int shardId = start; shardId <= end; shardId++)
shardIds.add(shardId);
}
}
if (shardIds.isEmpty()) {
this.logger.trace("The {} shard set cannot not be used to optimize the query", shardSet.getMethod());
return null;
}
this.logger.debug("The {} shard set was used to optimize the query to use only shards: {}", shardSet.getMethod(), shardIds);
return shardIds;
}
protected List<org.alfresco.repo.index.shard.ShardInstance> selectRandomNodes(ShardSet shardSet, Collection<Integer> shardIds) {
List<org.alfresco.repo.index.shard.ShardInstance> shardNodes = new LinkedList<>();
for (Integer shardId : shardIds) {
Shard shard = Shard.from(shardSet, shardId);
Collection<SolrHost> nodes = this.shardNodesCache.get(shard);
List<SolrHost> availableNodes = new LinkedList<>();
for (SolrHost node : nodes) {
if (this.nodeDisableCache.contains(node) || this.nodeUnavailableCache.contains(node))
continue;
ShardInstance shardNode = ShardInstance.from(shard, node);
if (this.shardInstanceDisableCache.contains(shardNode) || this.shardInstanceUnavailableCache.contains(shardNode))
continue;
availableNodes.add(node);
}
SolrHost randomNode = availableNodes.get(this.random.nextInt(availableNodes.size()));
shardNodes.add(ShardInstance.from(shard, randomNode).toAlfrescoModel(shard.toAlfrescoModel(shardSet.toAlfrescoModel())));
}
return shardNodes;
}
}

View File

@@ -0,0 +1,79 @@
package com.inteligr8.alfresco.asie.util;
import java.util.Comparator;
import org.alfresco.repo.index.shard.ShardMethodEnum;
import com.inteligr8.alfresco.asie.model.ShardSet;
public class ShardSetSearchComparator implements Comparator<ShardSet> {
@Override
public int compare(ShardSet ss1, ShardSet ss2) {
int compare = this.compare(ss1.getMethod(), ss2.getMethod());
if (compare != 0)
return compare;
return this.compare(ss1.getShards(), ss2.getShards());
}
private int compare(ShardMethodEnum method1, ShardMethodEnum method2) {
if (method1.equals(method2))
return 0;
switch (method1) {
case EXPLICIT_ID:
case EXPLICIT_ID_FALLBACK_LRIS:
return -1;
case PROPERTY:
case DATE:
switch (method2) {
case EXPLICIT_ID:
case EXPLICIT_ID_FALLBACK_LRIS:
return 1;
default:
return -1;
}
case ACL_ID:
case MOD_ACL_ID:
switch (method2) {
case EXPLICIT_ID:
case EXPLICIT_ID_FALLBACK_LRIS:
case PROPERTY:
case DATE:
return 1;
default:
return -1;
}
default:
switch (method2) {
case EXPLICIT_ID:
case EXPLICIT_ID_FALLBACK_LRIS:
case PROPERTY:
case DATE:
case ACL_ID:
case MOD_ACL_ID:
return 1;
default:
}
}
return 0;
}
private int compare(Short shards1, Short shards2) {
// the larger the shard count, the more shards that may need to be queried
// so prefer smaller shard counts
// no shard count (DB_ID_RANGE) should be treated as the worst (unlimited)
if (shards1 == null && shards2 == null) {
return 0;
} else if (shards1 == null) {
return 1;
} else if (shards2 == null) {
return -1;
} else {
return shards1.compareTo(shards2);
}
}
}

View File

@@ -0,0 +1,109 @@
inteligr8.asie.registerUnknownShardDisabled=false
inteligr8.asie.offlineIdleShardInSeconds=120
inteligr8.asie.forgetOfflineShardInSeconds=86400
# we don't want items expiring out of the following caches
# an evicition policy of NONE disables the maxItems limits
# Overrides of alfresco-repository.jar/alfresco/caches.properties
cache.asieShardsetsSharedCache.tx.maxItems=65536
cache.asieShardsetsSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieShardsetsSharedCache.maxItems=65536
cache.asieShardsetsSharedCache.timeToLiveSeconds=0
cache.asieShardsetsSharedCache.maxIdleSeconds=0
cache.asieShardsetsSharedCache.cluster.type=fully-distributed
cache.asieShardsetsSharedCache.backup-count=1
cache.asieShardsetsSharedCache.eviction-policy=NONE
cache.asieShardsetsSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieShardsetsSharedCache.readBackupData=false
cache.asieNodesSharedCache.tx.maxItems=65536
cache.asieNodesSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieNodesSharedCache.maxItems=65536
cache.asieNodesSharedCache.timeToLiveSeconds=0
cache.asieNodesSharedCache.maxIdleSeconds=0
cache.asieNodesSharedCache.cluster.type=fully-distributed
cache.asieNodesSharedCache.backup-count=1
cache.asieNodesSharedCache.eviction-policy=NONE
cache.asieNodesSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieNodesSharedCache.readBackupData=false
cache.asieShardNodesSharedCache.tx.maxItems=65536
cache.asieShardNodesSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieShardNodesSharedCache.maxItems=65536
cache.asieShardNodesSharedCache.timeToLiveSeconds=0
cache.asieShardNodesSharedCache.maxIdleSeconds=0
cache.asieShardNodesSharedCache.cluster.type=fully-distributed
cache.asieShardNodesSharedCache.backup-count=1
cache.asieShardNodesSharedCache.eviction-policy=NONE
cache.asieShardNodesSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieShardNodesSharedCache.readBackupData=false
cache.asieShardInstanceStateSharedCache.tx.maxItems=65536
cache.asieShardInstanceStateSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieShardInstanceStateSharedCache.maxItems=65536
cache.asieShardInstanceStateSharedCache.timeToLiveSeconds=0
cache.asieShardInstanceStateSharedCache.maxIdleSeconds=0
cache.asieShardInstanceStateSharedCache.cluster.type=fully-distributed
cache.asieShardInstanceStateSharedCache.backup-count=1
cache.asieShardInstanceStateSharedCache.eviction-policy=NONE
cache.asieShardInstanceStateSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieShardInstanceStateSharedCache.readBackupData=false
cache.asieNodeDisabledSharedCache.tx.maxItems=65536
cache.asieNodeDisabledSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieNodeDisabledSharedCache.maxItems=65536
cache.asieNodeDisabledSharedCache.timeToLiveSeconds=0
cache.asieNodeDisabledSharedCache.maxIdleSeconds=0
cache.asieNodeDisabledSharedCache.cluster.type=fully-distributed
cache.asieNodeDisabledSharedCache.backup-count=1
cache.asieNodeDisabledSharedCache.eviction-policy=NONE
cache.asieNodeDisabledSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieNodeDisabledSharedCache.readBackupData=false
cache.asieNodeUnavailableSharedCache.tx.maxItems=65536
cache.asieNodeUnavailableSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieNodeUnavailableSharedCache.maxItems=65536
cache.asieNodeUnavailableSharedCache.timeToLiveSeconds=0
cache.asieNodeUnavailableSharedCache.maxIdleSeconds=0
cache.asieNodeUnavailableSharedCache.cluster.type=fully-distributed
cache.asieNodeUnavailableSharedCache.backup-count=1
cache.asieNodeUnavailableSharedCache.eviction-policy=NONE
cache.asieNodeUnavailableSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieNodeUnavailableSharedCache.readBackupData=false
cache.asieShardInstanceDisabledSharedCache.tx.maxItems=65536
cache.asieShardInstanceDisabledSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieShardInstanceDisabledSharedCache.maxItems=65536
cache.asieShardInstanceDisabledSharedCache.timeToLiveSeconds=0
cache.asieShardInstanceDisabledSharedCache.maxIdleSeconds=0
cache.asieShardInstanceDisabledSharedCache.cluster.type=fully-distributed
cache.asieShardInstanceDisabledSharedCache.backup-count=1
cache.asieShardInstanceDisabledSharedCache.eviction-policy=NONE
cache.asieShardInstanceDisabledSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieShardInstanceDisabledSharedCache.readBackupData=false
cache.asieShardInstanceUnavailableSharedCache.tx.maxItems=65536
cache.asieShardInstanceUnavailableSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieShardInstanceUnavailableSharedCache.maxItems=65536
cache.asieShardInstanceUnavailableSharedCache.timeToLiveSeconds=0
cache.asieShardInstanceUnavailableSharedCache.maxIdleSeconds=0
cache.asieShardInstanceUnavailableSharedCache.cluster.type=fully-distributed
cache.asieShardInstanceUnavailableSharedCache.backup-count=1
cache.asieShardInstanceUnavailableSharedCache.eviction-policy=NONE
cache.asieShardInstanceUnavailableSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieShardInstanceUnavailableSharedCache.readBackupData=false
cache.asieCoreExplicitSharedCache.tx.maxItems=65536
cache.asieCoreExplicitSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.asieCoreExplicitSharedCache.maxItems=65536
cache.asieCoreExplicitSharedCache.timeToLiveSeconds=0
cache.asieCoreExplicitSharedCache.maxIdleSeconds=0
cache.asieCoreExplicitSharedCache.cluster.type=fully-distributed
cache.asieCoreExplicitSharedCache.backup-count=1
cache.asieCoreExplicitSharedCache.eviction-policy=NONE
cache.asieCoreExplicitSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
cache.asieCoreExplicitSharedCache.readBackupData=false

View File

@@ -0,0 +1,50 @@
<?xml version='1.0' encoding='UTF-8'?>
<!-- Use this file for beans to be loaded in whatever order Alfresco/Spring decides -->
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd">
<bean name="asieShardsetsCache" factory-bean="cacheFactory" factory-method="createCache">
<constructor-arg value="cache.asieShardsetsSharedCache" />
</bean>
<bean name="asieNodesCache" factory-bean="cacheFactory" factory-method="createCache">
<constructor-arg value="cache.asieNodesSharedCache" />
</bean>
<bean name="asieShardNodesCache" factory-bean="cacheFactory" factory-method="createMultiValueCache">
<constructor-arg value="cache.asieShardNodesSharedCache" />
</bean>
<bean name="asieShardInstanceStateCache" factory-bean="cacheFactory" factory-method="createCache">
<constructor-arg value="cache.asieShardInstanceStateSharedCache" />
</bean>
<bean name="asieNodeDisabledCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
<constructor-arg value="cache.asieNodeDisabledSharedCache" />
<constructor-arg value="java.util.HashSet" />
</bean>
<bean name="asieNodeUnavailableCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
<constructor-arg value="cache.asieNodeUnavailableSharedCache" />
<constructor-arg value="java.util.HashSet" />
</bean>
<bean name="asieShardInstanceDisabledCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
<constructor-arg value="cache.asieShardInstanceDisabledSharedCache" />
<constructor-arg value="java.util.HashSet" />
</bean>
<bean name="asieShardInstanceUnavailableCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
<constructor-arg value="cache.asieShardInstanceUnavailableSharedCache" />
<constructor-arg value="java.util.HashSet" />
</bean>
<bean name="asieCoreExplicitCache" factory-bean="cacheFactory" factory-method="createCache">
<constructor-arg value="cache.asieCoreExplicitSharedCache" />
</bean>
</beans>

View File

@@ -0,0 +1,11 @@
module.id=com_inteligr8_alfresco_${project.artifactId}
module.aliases=
module.title=${project.name}
module.description=${project.description}
module.version=${module.version}
module.repo.version.min=23.0
# this is creating all sorts of problems; probably because of the non-standard versioning
module.depends.com.inteligr8.alfresco.cachext-platform-module=*
module.depends.com.inteligr8.alfresco.cxf-jaxrs-platform-module=*

View File

@@ -0,0 +1,146 @@
package com.inteligr8.alfresco.asie;
import java.util.ArrayList;
import java.util.List;
import org.alfresco.repo.search.impl.parsers.FTSLexer;
import org.alfresco.repo.search.impl.parsers.FTSParser;
import org.alfresco.service.cmr.search.SearchParameters.Operator;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.Tree;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
public class QueryConstraintUnitTest {
private static final ObjectMapper om = new ObjectMapper();
@BeforeClass
public static void init() {
SimpleModule module = new SimpleModule();
module.addSerializer(Tree.class, new TreeSerializer());
om.registerModule(module);
}
@Test
public void testSingleExactTerm() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("=@cm:title:test", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
tree = this.validateChildren(tree, "CONJUNCTION");
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "EXACT_TERM", "test");
tree = this.validateChildren(tree, "FIELD_REF", "title");
this.validate(tree, "PREFIX", "cm");
}
@Test
public void testSingleFuzzyTerm() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("@cm:title:test", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
tree = this.validateChildren(tree, "CONJUNCTION");
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "TERM", "test");
tree = this.validateChildren(tree, "FIELD_REF", "title");
this.validate(tree, "PREFIX", "cm");
}
@Test
public void testSingleFuzzyString() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("@cm:title:'testing'", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
tree = this.validateChildren(tree, "CONJUNCTION");
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "PHRASE", "'testing'");
tree = this.validateChildren(tree, "FIELD_REF", "title");
this.validate(tree, "PREFIX", "cm");
}
@Test
public void testSingleFuzzyStringDoubleQuotes() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("cm:title:\"testing\"", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
tree = this.validateChildren(tree, "CONJUNCTION");
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "PHRASE", "\"testing\"");
tree = this.validateChildren(tree, "FIELD_REF", "title");
this.validate(tree, "PREFIX", "cm");
}
@Test
public void testSingleRange() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("@cm:created:[NOW TO '2025-01-01T00:00:00'>", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
tree = this.validateChildren(tree, "CONJUNCTION");
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "RANGE", "INCLUSIVE", "NOW", "'2025-01-01T00:00:00'", "EXCLUSIVE");
tree = this.validateChildren(tree, "FIELD_REF", "created");
this.validate(tree, "PREFIX", "cm");
}
@Test
public void testTwoTerms() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("=@cm:title:test1 AND @cm:author:test2", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
List<Tree> trees = this.validateChildren(tree, "CONJUNCTION", 2);
tree = trees.get(0);
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "EXACT_TERM", "test1");
tree = this.validateChildren(tree, "FIELD_REF", "title");
this.validate(tree, "PREFIX", "cm");
tree = trees.get(1);
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "TERM", "test2");
tree = this.validateChildren(tree, "FIELD_REF", "author");
this.validate(tree, "PREFIX", "cm");
}
protected void validate(Tree tree, String text, String... extraValues) {
Assert.assertNotNull(tree);
Assert.assertEquals(text, tree.getText());
Assert.assertEquals(extraValues.length, tree.getChildCount());
for (int c = 0; c < extraValues.length; c++)
Assert.assertEquals(extraValues[c], tree.getChild(c).getText());
}
protected Tree validateChildren(Tree tree, String text, String... extraValues) {
Assert.assertNotNull(tree);
Assert.assertEquals(text, tree.getText());
Assert.assertEquals(extraValues.length + 1, tree.getChildCount());
for (int c = 0; c < extraValues.length; c++)
Assert.assertEquals(extraValues[c], tree.getChild(c).getText());
return tree.getChild(extraValues.length);
}
protected List<Tree> validateChildren(Tree tree, String text, int count) {
Assert.assertNotNull(tree);
Assert.assertEquals(text, tree.getText());
Assert.assertEquals(count, tree.getChildCount());
List<Tree> children = new ArrayList<>();
for (int c = 0; c < tree.getChildCount(); c++)
children.add(tree.getChild(c));
return children;
}
protected Tree parseFts(String ftsQuery, Operator defaultOperator) throws RecognitionException, JsonProcessingException {
CharStream cs = new ANTLRStringStream(ftsQuery);
FTSLexer lexer = new FTSLexer(cs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
FTSParser parser = new FTSParser(tokens);
parser.setDefaultFieldConjunction(defaultOperator.equals(Operator.AND));
parser.setMode(defaultOperator.equals(Operator.AND) ? FTSParser.Mode.DEFAULT_CONJUNCTION : FTSParser.Mode.DEFAULT_DISJUNCTION);
CommonTree tree = (CommonTree) parser.ftsQuery().getTree();
return tree;
}
}

View File

@@ -0,0 +1,44 @@
package com.inteligr8.alfresco.asie;
import java.io.IOException;
import org.antlr.runtime.tree.Tree;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
public class TreeSerializer extends StdSerializer<Tree> {
private static final long serialVersionUID = -2714782538361726878L;
public TreeSerializer() {
super(Tree.class);
}
public TreeSerializer(Class<Tree> type) {
super(type);
}
public TreeSerializer(JavaType type) {
super(type);
}
@Override
public void serialize(Tree value, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
if (value.getText() != null)
gen.writeStringField("text", value.getText());
if (value.getChildCount() > 0) {
gen.writeArrayFieldStart("children");
for (int c = 0; c < value.getChildCount(); c++)
gen.writeObject(value.getChild(c));
gen.writeEndArray();
}
gen.writeEndObject();
}
}

Binary file not shown.

View File

@@ -16,8 +16,12 @@
<name>ASIE Platform Module for ACS Enterprise</name>
<properties>
<alfresco.sdk.version>5.2.0</alfresco.sdk.version>
<alfresco.sdk.version>4.9.0</alfresco.sdk.version>
<alfresco.platform.version>23.3.0</alfresco.platform.version>
<alfresco.platform.war.version>23.3.0.98</alfresco.platform.war.version>
<tomcat-rad.version>10-2.1</tomcat-rad.version>
<beedk.rad.acs-search.enabled>true</beedk.rad.acs-search.enabled>
</properties>
<dependencyManagement>
@@ -80,7 +84,7 @@
<groupId>com.inteligr8.alfresco</groupId>
<artifactId>cxf-jaxrs-platform-module</artifactId>
<version>1.3.1-acs-v23.3</version>
<scope>provided</scope>
<type>amp</type>
</dependency>
<!-- Including for testing purposes only -->

View File

@@ -1,22 +1,22 @@
#!/bin/sh
discoverArtifactId() {
ARTIFACT_ID=`mvn -q -Dexpression=project.artifactId -DforceStdout help:evaluate`
ARTIFACT_ID=`mvn -q -Dexpression=project.artifactId -DforceStdout help:evaluate | sed 's/\x1B\[[0-9;]\{1,\}[A-Za-z]//g'`
}
rebuild() {
echo "Rebuilding project ..."
mvn process-classes
mvn process-test-classes
}
start() {
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
mvn -Drad process-classes
mvn -Drad process-test-classes
}
start_log() {
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
mvn -Drad -Ddocker.showLogs process-classes
mvn -Drad -Ddocker.showLogs process-test-classes
}
stop() {

View File

@@ -72,6 +72,7 @@
<module>asie-api</module>
<module>shared</module>
<module>enterprise-module</module>
<module>community-module</module>
</modules>
<profiles>

View File

@@ -16,7 +16,7 @@
<name>ASIE Shared Library for Platform Modules</name>
<properties>
<alfresco.sdk.version>5.2.0</alfresco.sdk.version>
<alfresco.sdk.version>4.9.0</alfresco.sdk.version>
<alfresco.platform.version>23.3.0</alfresco.platform.version>
</properties>

View File

@@ -0,0 +1,36 @@
package com.inteligr8.alfresco.asie.model;
import java.io.Serializable;
public class PersistedNode implements Serializable {
private static final long serialVersionUID = 4105196543023419818L;
private final SolrHost node;
private final long persistMillis;
private long expireTimeMillis;
public PersistedNode(SolrHost node, int persistMinutes) {
this.node = node;
this.persistMillis = persistMinutes * 60L * 1000L;
this.reset();
}
public void reset() {
this.expireTimeMillis = System.currentTimeMillis() + this.persistMillis;
}
public boolean isExpired() {
return this.expireTimeMillis < System.currentTimeMillis();
}
public SolrHost getNode() {
return this.node;
}
@Override
public String toString() {
return "node: " + this.node + "; expires in: " + (System.currentTimeMillis() - this.expireTimeMillis) + " ms";
}
}

View File

@@ -1,13 +1,10 @@
package com.inteligr8.alfresco.asie.rest;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
@@ -23,7 +20,7 @@ import com.inteligr8.rs.ClientCxfImpl;
import jakarta.ws.rs.client.ClientRequestContext;
public abstract class AbstractAsieWebScript extends AbstractWebScript implements InitializingBean {
public abstract class AbstractAsieWebScript extends AbstractWebScript {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@@ -42,9 +39,6 @@ public abstract class AbstractAsieWebScript extends AbstractWebScript implements
@Value("${solr.sharedSecret}")
private String solrSharedSecret;
@Value("${inteligr8.asie.allowedAuthorities}")
private String authorizedAuthoritiesStr;
@Value("${inteligr8.asie.basePath}")
private String solrBaseUrl;
@@ -52,29 +46,12 @@ public abstract class AbstractAsieWebScript extends AbstractWebScript implements
@Qualifier(Constants.QUALIFIER_ASIE)
private ObjectMapper objectMapper;
private Set<String> authorizedAuthorities;
@Override
public void afterPropertiesSet() throws Exception {
this.authorizedAuthorities = new HashSet<>();
String[] authorities = this.authorizedAuthoritiesStr.split(",");
for (String authority : authorities) {
authority = StringUtils.trimToNull(authority);
if (authority != null)
this.authorizedAuthorities.add(authority);
}
if (this.authorizedAuthorities.isEmpty())
this.logger.warn("All authenticated users will be authorized to access ASIE web scripts");
super.afterPropertiesSet();
this.solrSharedSecret = StringUtils.trimToNull(this.solrSharedSecret);
}
@Override
protected Set<String> getAuthorities() {
return this.authorizedAuthorities;
}
protected ObjectMapper getObjectMapper() {
return this.objectMapper;
}

View File

@@ -4,11 +4,19 @@ import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.service.cmr.security.AuthorityService;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.extensions.webscripts.Description.RequiredAuthentication;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.extensions.webscripts.WebScriptException;
import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.extensions.webscripts.WebScriptResponse;
@@ -16,9 +24,38 @@ import org.springframework.http.HttpStatus;
import net.sf.acegisecurity.GrantedAuthority;
public abstract class AbstractWebScript extends org.springframework.extensions.webscripts.AbstractWebScript {
public abstract class AbstractWebScript extends org.springframework.extensions.webscripts.AbstractWebScript implements InitializingBean {
protected abstract Set<String> getAuthorities();
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Value("${inteligr8.asie.allowedAuthorities}")
private String authorizedAuthoritiesStr;
@Autowired
private AuthorityService authorityService;
private Set<String> authorizedAuthorities;
@Override
public void afterPropertiesSet() throws Exception {
this.authorizedAuthorities = new HashSet<>();
String[] authorities = this.authorizedAuthoritiesStr.split(",");
for (String authority : authorities) {
authority = StringUtils.trimToNull(authority);
if (authority != null)
this.authorizedAuthorities.add(authority);
}
if (this.authorizedAuthorities.isEmpty()) {
this.logger.warn("All authenticated users will be authorized to access web scripts");
} else {
this.logger.debug("Allowing only authorities: {}", this.authorizedAuthorities);
}
}
protected Set<String> getAuthorities() {
return this.authorizedAuthorities;
}
@Override
public final void execute(WebScriptRequest request, WebScriptResponse response) throws IOException {
@@ -38,6 +75,13 @@ public abstract class AbstractWebScript extends org.springframework.extensions.w
return true;
}
Set<String> authorities = this.authorityService.getAuthoritiesForUser(AuthenticationUtil.getFullyAuthenticatedUser());
if (authorities != null) {
if (!Collections.disjoint(this.getAuthorities(), authorities))
return true;
}
this.logger.trace("Not authorized: user '{}'; authorities: {} + {}", AuthenticationUtil.getFullyAuthenticatedUser(), AuthenticationUtil.getFullAuthentication().getAuthorities(), authorities);
return false;
}

View File

@@ -3,7 +3,6 @@ package com.inteligr8.alfresco.asie.rest;
import java.io.IOException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.extensions.webscripts.AbstractWebScript;
import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.extensions.webscripts.WebScriptResponse;
import org.springframework.http.HttpStatus;
@@ -21,12 +20,12 @@ public class ClearRegistryWebScript extends AbstractWebScript {
@Autowired
private ShardStateService sss;
@Override
public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException {
@Override
public void executeAuthorized(WebScriptRequest request, WebScriptResponse response) throws IOException {
this.sss.clear();
this.sbs.forget();
res.setStatus(HttpStatus.OK.value());
response.setStatus(HttpStatus.OK.value());
}
}

View File

@@ -1,7 +1,5 @@
package com.inteligr8.alfresco.asie.service;
import java.io.Serializable;
import org.alfresco.service.cmr.attributes.AttributeService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -11,6 +9,7 @@ import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.Constants;
import com.inteligr8.alfresco.asie.model.PersistedNode;
import com.inteligr8.alfresco.asie.model.ShardSet;
import com.inteligr8.alfresco.asie.model.SolrHost;
@@ -31,10 +30,10 @@ public class ShardBackupService implements com.inteligr8.alfresco.asie.spi.Shard
String shardKey = shardSet.getCore() + "-" + shardId;
PersistedNode backupNode = (PersistedNode) this.attributeService.getAttribute(Constants.ATTR_ASIE, ATTR_BACKUP_NODE, shardKey);
this.logger.debug("Found backup node: {}", backupNode);
logger.debug("Found backup node: {}", backupNode);
if (backupNode == null || backupNode.isExpired()) {
backupNode = new PersistedNode(node);
backupNode = new PersistedNode(node, this.persistTimeMinutes);
this.attributeService.setAttribute(backupNode, Constants.ATTR_ASIE, ATTR_BACKUP_NODE, shardKey);
}
@@ -50,37 +49,4 @@ public class ShardBackupService implements com.inteligr8.alfresco.asie.spi.Shard
this.attributeService.removeAttribute(Constants.ATTR_ASIE, ATTR_BACKUP_NODE, shardKey);
}
private class PersistedNode implements Serializable {
private static final long serialVersionUID = 4105196543023419818L;
private final SolrHost node;
private long expireTimeMillis;
PersistedNode(SolrHost node) {
this.node = node;
this.reset();
}
void reset() {
this.expireTimeMillis = System.currentTimeMillis() + persistTimeMinutes * 60L * 1000L;
}
boolean isExpired() {
return this.expireTimeMillis < System.currentTimeMillis();
}
SolrHost getNode() {
return this.node;
}
@Override
public String toString() {
return "node: " + this.node + "; expires in: " + (System.currentTimeMillis() - this.expireTimeMillis) + " ms";
}
}
}

View File

@@ -8,10 +8,8 @@
<p>Retrieve a reference to the ASIE node that should be used for the backup of the specified ASIE shard registered with ACS.</p>
<p>The following path parameters are expected:</p>
<dl>
<dt>shardSet</dt>
<dd>A shard method combined with its distinguishing properties;
methods: MOD_ACL_ID, ACL_ID, DB_ID, DB_ID_RANGE, DATE, PROPERTY, EXPLICIT_ID;
e.g. PROPERTY;key:cm:created;regex:^d{4} or DB_ID</dd>
<dt>shardCore</dt>
<dd>A core name (prefix) for the ASIE shard (e.g. alfresco)</dd>
<dt>shardId</dt>
<dd>A number starting at 1</dd>
</dl>
@@ -34,12 +32,15 @@
]]></description>
<!-- Endpoint Configuration -->
<url>/inteligr8/asie/shard/{shardSet}/{shardId}/backup</url>
<url>/inteligr8/asie/shard/{shardCore}/{shardId}/backup</url>
<format default="json">any</format>
<!-- Security -->
<authentication>none</authentication>
<!-- Transaction -->
<transaction>required</transaction>
<!-- Functionality -->
<cache>
<never>false</never>

View File

@@ -8,10 +8,8 @@
<p>Retrieve a reference to the most current/up-to-date ASIE node for the specified ASIE shard registered with ACS.</p>
<p>The following path parameters are expected:</p>
<dl>
<dt>shardSet</dt>
<dd>A shard method combined with its distinguishing properties;
methods: MOD_ACL_ID, ACL_ID, DB_ID, DB_ID_RANGE, DATE, PROPERTY, EXPLICIT_ID;
e.g. PROPERTY;key:cm:created;regex:^d{4} or DB_ID</dd>
<dt>shardCore</dt>
<dd>A core name (prefix) for the ASIE shard (e.g. alfresco)</dd>
<dt>shardId</dt>
<dd>A number starting at 1</dd>
</dl>
@@ -30,12 +28,15 @@
]]></description>
<!-- Endpoint Configuration -->
<url>/inteligr8/asie/shard/{shardSet}/{shardId}/lead</url>
<url>/inteligr8/asie/shard/{shardCore}/{shardId}/lead</url>
<format default="json">any</format>
<!-- Security -->
<authentication>none</authentication>
<!-- Transaction -->
<transaction>required</transaction>
<!-- Functionality -->
<cache>
<never>false</never>

View File

@@ -29,7 +29,10 @@
<url>/inteligr8/asie/node/{nodeEndpoint}</url>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Transaction -->
<transaction>required</transaction>
<!-- Functionality -->
<cache>

View File

@@ -58,7 +58,7 @@
<format default="json">any</format>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Functionality -->
<cache>

View File

@@ -31,7 +31,7 @@
<url>/inteligr8/asie/node/{nodeEndpoint}?coreName={coreName?}&amp;shardRange={shardRange?}&amp;template={template?}&amp;shardCount={shardCount?}&amp;nodeId={nodeId?}&amp;nodeCount={nodeCount?}</url>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Functionality -->
<cache>

View File

@@ -32,7 +32,10 @@
<url>/inteligr8/asie/node/{nodeEndpoint}/shard/{shardCore}/{shardId}</url>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Transaction -->
<transaction>required</transaction>
<!-- Functionality -->
<cache>

View File

@@ -30,7 +30,7 @@
<url>/inteligr8/asie/node/{nodeEndpoint}/shard/{shardCore}/{shardId}</url>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Functionality -->
<cache>

View File

@@ -30,7 +30,10 @@
<url>/inteligr8/asie/node/{nodeEndpoint}/shard/{shardCore}/{shardId}</url>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Transaction -->
<transaction>required</transaction>
<!-- Functionality -->
<cache>

View File

@@ -54,7 +54,7 @@
<format default="json">any</format>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Functionality -->
<cache>

View File

@@ -61,7 +61,7 @@
<format default="json">any</format>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Functionality -->
<cache>

View File

@@ -21,7 +21,10 @@
<url>/inteligr8/asie/nodes</url>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Transaction -->
<transaction>required</transaction>
<!-- Functionality -->
<cache>

View File

@@ -47,7 +47,7 @@
<format default="json">any</format>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Functionality -->
<cache>

View File

@@ -60,7 +60,7 @@
<format default="json">any</format>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Functionality -->
<cache>

View File

@@ -57,7 +57,7 @@
<format default="json">any</format>
<!-- Security -->
<authentication>admin</authentication>
<authentication>user</authentication>
<!-- Functionality -->
<cache>

View File

@@ -2,7 +2,7 @@
# defaulting to 3 days = 60 * 24 * 3 = 4320
inteligr8.asie.backup.persistTimeMinutes=4320
inteligr8.asie.allowedAuthorities=ALFRESCO_ADMINISTRATORS
inteligr8.asie.allowedAuthorities=GROUP_ALFRESCO_ADMINISTRATORS
# same as solr.baseUrl, but that property is private to the Search subsystem
inteligr8.asie.basePath=/solr