compiling/running module (untested)
This commit is contained in:
BIN
community-module/metadata.keystore
Normal file
BIN
community-module/metadata.keystore
Normal file
Binary file not shown.
@@ -6,7 +6,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>com.inteligr8.alfresco</groupId>
|
<groupId>com.inteligr8.alfresco</groupId>
|
||||||
<artifactId>asie-platform-module-parent</artifactId>
|
<artifactId>asie-platform-module-parent</artifactId>
|
||||||
<version>1.1-SNAPSHOT</version>
|
<version>1.2-SNAPSHOT</version>
|
||||||
<relativePath>../</relativePath>
|
<relativePath>../</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
@@ -16,8 +16,12 @@
|
|||||||
<name>ASIE Platform Module for ACS Community</name>
|
<name>ASIE Platform Module for ACS Community</name>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<alfresco.sdk.version>5.2.0</alfresco.sdk.version>
|
<alfresco.sdk.version>4.9.0</alfresco.sdk.version>
|
||||||
<alfresco.platform.version>7.0.0</alfresco.platform.version>
|
<alfresco.platform.version>23.3.0</alfresco.platform.version>
|
||||||
|
<alfresco.platform.war.version>23.3.0.98</alfresco.platform.war.version>
|
||||||
|
<tomcat-rad.version>10-2.1</tomcat-rad.version>
|
||||||
|
|
||||||
|
<beedk.rad.acs-search.enabled>true</beedk.rad.acs-search.enabled>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencyManagement>
|
<dependencyManagement>
|
||||||
@@ -42,7 +46,6 @@
|
|||||||
<groupId>com.inteligr8.alfresco</groupId>
|
<groupId>com.inteligr8.alfresco</groupId>
|
||||||
<artifactId>asie-shared</artifactId>
|
<artifactId>asie-shared</artifactId>
|
||||||
<version>${project.version}</version>
|
<version>${project.version}</version>
|
||||||
<scope>provided</scope>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- Needed by this module, but provided by ACS -->
|
<!-- Needed by this module, but provided by ACS -->
|
||||||
@@ -52,6 +55,14 @@
|
|||||||
<scope>provided</scope>
|
<scope>provided</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Alfresco Modules required to use this module -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.inteligr8.alfresco</groupId>
|
||||||
|
<artifactId>cxf-jaxrs-platform-module</artifactId>
|
||||||
|
<version>1.3.1-acs-v23.3</version>
|
||||||
|
<type>amp</type>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<!-- Including for testing purposes only -->
|
<!-- Including for testing purposes only -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
@@ -74,6 +85,10 @@
|
|||||||
<extensions>true</extensions>
|
<extensions>true</extensions>
|
||||||
<configuration>
|
<configuration>
|
||||||
<tiles>
|
<tiles>
|
||||||
|
<!-- Documentation: https://bitbucket.org/inteligr8/ootbee-beedk/src/stable/beedk-acs-search-rad-tile -->
|
||||||
|
<tile>com.inteligr8.ootbee:beedk-acs-search-rad-tile:[1.1.6,2.0.0)</tile>
|
||||||
|
<!-- Documentation: https://bitbucket.org/inteligr8/ootbee-beedk/src/stable/beedk-acs-platform-self-rad-tile -->
|
||||||
|
<tile>com.inteligr8.ootbee:beedk-acs-platform-self-rad-tile:[1.1.6,2.0.0)</tile>
|
||||||
<!-- Documentation: https://bitbucket.org/inteligr8/ootbee-beedk/src/stable/beedk-acs-platform-module-tile -->
|
<!-- Documentation: https://bitbucket.org/inteligr8/ootbee-beedk/src/stable/beedk-acs-platform-module-tile -->
|
||||||
<tile>com.inteligr8.ootbee:beedk-acs-platform-module-tile:[1.1.6,2.0.0)</tile>
|
<tile>com.inteligr8.ootbee:beedk-acs-platform-module-tile:[1.1.6,2.0.0)</tile>
|
||||||
</tiles>
|
</tiles>
|
||||||
|
@@ -1,22 +1,22 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
discoverArtifactId() {
|
discoverArtifactId() {
|
||||||
ARTIFACT_ID=`mvn -q -Dexpression=project.artifactId -DforceStdout help:evaluate`
|
ARTIFACT_ID=`mvn -q -Dexpression=project.artifactId -DforceStdout help:evaluate | sed 's/\x1B\[[0-9;]\{1,\}[A-Za-z]//g'`
|
||||||
}
|
}
|
||||||
|
|
||||||
rebuild() {
|
rebuild() {
|
||||||
echo "Rebuilding project ..."
|
echo "Rebuilding project ..."
|
||||||
mvn process-classes
|
mvn process-test-classes
|
||||||
}
|
}
|
||||||
|
|
||||||
start() {
|
start() {
|
||||||
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
|
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
|
||||||
mvn -Drad process-classes
|
mvn -Drad process-test-classes
|
||||||
}
|
}
|
||||||
|
|
||||||
start_log() {
|
start_log() {
|
||||||
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
|
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
|
||||||
mvn -Drad -Ddocker.showLogs process-classes
|
mvn -Drad -Ddocker.showLogs process-test-classes
|
||||||
}
|
}
|
||||||
|
|
||||||
stop() {
|
stop() {
|
||||||
|
@@ -1,10 +1,12 @@
|
|||||||
package com.inteligr8.alfresco.asie.compute;
|
package com.inteligr8.alfresco.asie.compute;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.alfresco.repo.search.impl.parsers.CMISLexer;
|
import org.alfresco.repo.search.impl.parsers.CMISLexer;
|
||||||
import org.alfresco.repo.search.impl.parsers.CMISParser;
|
import org.alfresco.repo.search.impl.parsers.CMISParser;
|
||||||
import org.alfresco.service.cmr.search.SearchParameters.Operator;
|
import org.alfresco.service.cmr.search.SearchParameters.Operator;
|
||||||
|
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||||
import org.alfresco.service.cmr.search.SearchService;
|
import org.alfresco.service.cmr.search.SearchService;
|
||||||
import org.alfresco.service.namespace.QName;
|
import org.alfresco.service.namespace.QName;
|
||||||
import org.antlr.runtime.ANTLRStringStream;
|
import org.antlr.runtime.ANTLRStringStream;
|
||||||
@@ -31,8 +33,9 @@ public class CmisQueryInspector implements QueryInspector {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public QueryValue findRequiredProperty(String cmisQuery, Operator defaultOperator, QName property) throws RecognitionException {
|
public List<QueryValue> findRequiredPropertyValues(String query, Operator defaultOperator, QName property, DataTypeDefinition dataTypeDef) throws RecognitionException {
|
||||||
Tree tree = this.parseCmis(cmisQuery, defaultOperator);
|
Tree tree = this.parseCmis(query, defaultOperator);
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Tree parseCmis(String cmisQuery, Operator defaultOperator) throws RecognitionException {
|
protected Tree parseCmis(String cmisQuery, Operator defaultOperator) throws RecognitionException {
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
package com.inteligr8.alfresco.asie.compute;
|
package com.inteligr8.alfresco.asie.compute;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
@@ -14,7 +15,7 @@ public class QueryInspectorFactory implements InitializingBean {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private List<QueryInspector> inspectors;
|
private List<QueryInspector> inspectors;
|
||||||
|
|
||||||
private Map<String, QueryInspector> languageInspectorMap;
|
private Map<String, QueryInspector> languageInspectorMap = new HashMap<>();
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void afterPropertiesSet() throws Exception {
|
public void afterPropertiesSet() throws Exception {
|
||||||
|
@@ -0,0 +1,240 @@
|
|||||||
|
package com.inteligr8.alfresco.asie.service;
|
||||||
|
|
||||||
|
import java.net.InetAddress;
|
||||||
|
import java.net.UnknownHostException;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.LinkedList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.alfresco.repo.cache.SimpleCache;
|
||||||
|
import org.alfresco.repo.index.shard.ShardMethodEnum;
|
||||||
|
import org.alfresco.service.namespace.QName;
|
||||||
|
import org.alfresco.util.Pair;
|
||||||
|
import org.alfresco.util.collections.CollectionUtils;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import com.inteligr8.alfresco.asie.CommunityConstants;
|
||||||
|
import com.inteligr8.alfresco.asie.model.Shard;
|
||||||
|
import com.inteligr8.alfresco.asie.model.ShardInstance;
|
||||||
|
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
|
||||||
|
import com.inteligr8.alfresco.asie.model.ShardSet;
|
||||||
|
import com.inteligr8.alfresco.asie.model.SolrHost;
|
||||||
|
import com.inteligr8.alfresco.cachext.CollectionCache;
|
||||||
|
import com.inteligr8.alfresco.cachext.MultiValueCache;
|
||||||
|
|
||||||
|
@Component
|
||||||
|
public class ShardDiscoveryService implements com.inteligr8.alfresco.asie.spi.ShardDiscoveryService {
|
||||||
|
|
||||||
|
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier(CommunityConstants.BEAN_SHARDSETS_CACHE)
|
||||||
|
private SimpleCache<String, ShardSet> shardsetsCache;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier(CommunityConstants.BEAN_NODES_CACHE)
|
||||||
|
private SimpleCache<String, SolrHost> nodesCache;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier(CommunityConstants.BEAN_SHARD_NODES_CACHE)
|
||||||
|
private MultiValueCache<Shard, SolrHost> shardNodesCache;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier(CommunityConstants.BEAN_SHARDINST_STATE_CACHE)
|
||||||
|
private SimpleCache<ShardInstance, ShardInstanceState> shardInstanceStatesCache;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier(CommunityConstants.BEAN_NODE_UNAVAIL_CACHE)
|
||||||
|
private CollectionCache<SolrHost, HashSet<SolrHost>> nodeUnavailableCache;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier(CommunityConstants.BEAN_NODE_DISABLE_CACHE)
|
||||||
|
private CollectionCache<SolrHost, HashSet<SolrHost>> nodeDisableCache;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier(CommunityConstants.BEAN_SHARDINST_UNAVAIL_CACHE)
|
||||||
|
private CollectionCache<ShardInstance, HashSet<ShardInstance>> shardInstanceUnavailableCache;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier(CommunityConstants.BEAN_SHARDINST_DISABLE_CACHE)
|
||||||
|
private CollectionCache<ShardInstance, HashSet<ShardInstance>> shardInstanceDisableCache;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier(CommunityConstants.BEAN_CORE_EXPLICIT_CACHE)
|
||||||
|
private SimpleCache<String, QName> coreExplicitIdCache;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ShardSet findSetByCore(String core) {
|
||||||
|
return this.shardsetsCache.get(core);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SolrHost findNode(String nodeHostname, int nodePort) {
|
||||||
|
Map<String, InetAddress> resolvedAddresses = new HashMap<>();
|
||||||
|
|
||||||
|
for (String nodeSpec : this.nodesCache.getKeys()) {
|
||||||
|
SolrHost node = this.nodesCache.get(nodeSpec);
|
||||||
|
|
||||||
|
if (!nodeHostname.equalsIgnoreCase(node.getHostname())) {
|
||||||
|
if (!resolvedAddresses.containsKey(nodeHostname))
|
||||||
|
resolvedAddresses.put(nodeHostname, this.resolve(nodeHostname));
|
||||||
|
InetAddress nodeAddress = resolvedAddresses.get(nodeHostname);
|
||||||
|
this.logger.trace("Resolved: {} => {}", nodeHostname, nodeAddress);
|
||||||
|
if (nodeAddress == null)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
if (!resolvedAddresses.containsKey(node.getHostname()))
|
||||||
|
resolvedAddresses.put(node.getHostname(), this.resolve(node.getHostname()));
|
||||||
|
InetAddress shardInstanceAddress = resolvedAddresses.get(node.getHostname());
|
||||||
|
this.logger.trace("Resolved: {} => {}", node.getHostname(), shardInstanceAddress);
|
||||||
|
if (!nodeAddress.equals(shardInstanceAddress))
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nodePort == node.getPort()) {
|
||||||
|
this.logger.debug("Found node: {}", node);
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private InetAddress resolve(String hostname) {
|
||||||
|
try {
|
||||||
|
return InetAddress.getByName(hostname);
|
||||||
|
} catch (UnknownHostException uhe) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<ShardSet, Map<Integer, ShardInstanceState>> findByNode(SolrHost node) {
|
||||||
|
Map<ShardSet, Map<Integer, ShardInstanceState>> response = new HashMap<>();
|
||||||
|
|
||||||
|
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||||
|
ShardSet shardSet = this.shardsetsCache.get(shard.extractShardSetCore());
|
||||||
|
|
||||||
|
if (this.shardNodesCache.contains(shard, node)) {
|
||||||
|
ShardInstance shardNode = ShardInstance.from(shard, node);
|
||||||
|
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
|
||||||
|
|
||||||
|
Map<Integer, ShardInstanceState> shards = response.get(shardSet);
|
||||||
|
if (shards == null)
|
||||||
|
response.put(shardSet, shards = new HashMap<>());
|
||||||
|
shards.put(shard.extractShardId(), state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<ShardSet> findSetsByShardMethod(ShardMethodEnum... shardMethods) {
|
||||||
|
Set<ShardSet> shardSets = new HashSet<>();
|
||||||
|
|
||||||
|
Set<ShardMethodEnum> methods = CollectionUtils.asSet(shardMethods);
|
||||||
|
for (String core : this.shardsetsCache.getKeys()) {
|
||||||
|
ShardSet shardSet = this.shardsetsCache.get(core);
|
||||||
|
if (methods.contains(shardSet.getMethod()))
|
||||||
|
shardSets.add(shardSet);
|
||||||
|
}
|
||||||
|
|
||||||
|
return shardSets;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<SolrHost> findNodes(ShardSet shardSet) {
|
||||||
|
Set<SolrHost> nodes = new HashSet<>();
|
||||||
|
|
||||||
|
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||||
|
if (shardSet.getCore().equals(shard.extractShardSetCore()))
|
||||||
|
nodes.addAll(this.shardNodesCache.get(shard));
|
||||||
|
}
|
||||||
|
|
||||||
|
return nodes;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<SolrHost> findNodesByShard(ShardSet shardSet, int shardId) {
|
||||||
|
Set<SolrHost> nodes = new HashSet<>();
|
||||||
|
|
||||||
|
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||||
|
if (shardSet.getCore().equals(shard.extractShardSetCore()) && shardId == shard.extractShardId())
|
||||||
|
nodes.addAll(this.shardNodesCache.get(shard));
|
||||||
|
}
|
||||||
|
|
||||||
|
return nodes;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<Integer, Pair<SolrHost, ShardInstanceState>> findLatestNodeStates(ShardSet shardSet) {
|
||||||
|
Map<Integer, Pair<SolrHost, ShardInstanceState>> response = new HashMap<>();
|
||||||
|
|
||||||
|
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||||
|
if (!shardSet.getCore().equals(shard.extractShardSetCore()))
|
||||||
|
continue;
|
||||||
|
|
||||||
|
SolrHost latestNode = null;
|
||||||
|
ShardInstanceState latestState = null;
|
||||||
|
|
||||||
|
for (SolrHost node : this.shardNodesCache.get(shard)) {
|
||||||
|
ShardInstance shardNode = ShardInstance.from(shard, node);
|
||||||
|
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
|
||||||
|
if (latestState == null || state.compareTo(latestState) < 0) {
|
||||||
|
latestState = state;
|
||||||
|
latestNode = node;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (latestNode != null)
|
||||||
|
response.put(shard.extractShardId(), new Pair<>(latestNode, latestState));
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<Pair<SolrHost, ShardInstanceState>> findNodeStatesByShard(ShardSet shardSet, int shardId) {
|
||||||
|
List<Pair<SolrHost, ShardInstanceState>> response = new LinkedList<>();
|
||||||
|
|
||||||
|
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||||
|
if (!shardSet.getCore().equals(shard.extractShardSetCore()))
|
||||||
|
continue;
|
||||||
|
|
||||||
|
for (SolrHost node : this.shardNodesCache.get(shard)) {
|
||||||
|
ShardInstance shardNode = ShardInstance.from(shard, node);
|
||||||
|
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
|
||||||
|
response.add(new Pair<>(node, state));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<Integer> findIdsByNode(ShardSet shardSet, SolrHost node) {
|
||||||
|
Set<Integer> shardIds = new HashSet<>();
|
||||||
|
|
||||||
|
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||||
|
if (shardSet.getCore().equals(shard.extractShardSetCore()) && this.shardNodesCache.contains(shard, node))
|
||||||
|
shardIds.add(shard.extractShardId());
|
||||||
|
}
|
||||||
|
|
||||||
|
return shardIds;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<Integer, ShardInstanceState> findStatesByNode(ShardSet shardSet, SolrHost node) {
|
||||||
|
// TODO Auto-generated method stub
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -24,12 +24,14 @@ import org.alfresco.service.cmr.dictionary.DictionaryService;
|
|||||||
import org.alfresco.service.cmr.search.SearchParameters;
|
import org.alfresco.service.cmr.search.SearchParameters;
|
||||||
import org.alfresco.service.namespace.NamespaceService;
|
import org.alfresco.service.namespace.NamespaceService;
|
||||||
import org.alfresco.service.namespace.QName;
|
import org.alfresco.service.namespace.QName;
|
||||||
|
import org.antlr.runtime.RecognitionException;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
import org.springframework.context.ApplicationEvent;
|
import org.springframework.context.ApplicationEvent;
|
||||||
|
import org.springframework.context.annotation.Primary;
|
||||||
import org.springframework.extensions.surf.util.AbstractLifecycleBean;
|
import org.springframework.extensions.surf.util.AbstractLifecycleBean;
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
@@ -50,6 +52,7 @@ import com.inteligr8.alfresco.cachext.CollectionCache;
|
|||||||
import com.inteligr8.alfresco.cachext.MultiValueCache;
|
import com.inteligr8.alfresco.cachext.MultiValueCache;
|
||||||
|
|
||||||
@Component
|
@Component
|
||||||
|
@Primary
|
||||||
public class SolrShardRegistry extends AbstractLifecycleBean implements ShardRegistry {
|
public class SolrShardRegistry extends AbstractLifecycleBean implements ShardRegistry {
|
||||||
|
|
||||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||||
@@ -108,8 +111,8 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
|
|||||||
@Qualifier(CommunityConstants.BEAN_CORE_EXPLICIT_CACHE)
|
@Qualifier(CommunityConstants.BEAN_CORE_EXPLICIT_CACHE)
|
||||||
private SimpleCache<String, QName> coreExplicitIdCache;
|
private SimpleCache<String, QName> coreExplicitIdCache;
|
||||||
|
|
||||||
@Value("${inteligr8.asie.registerUnknownShardOffline}")
|
@Value("${inteligr8.asie.registerUnknownShardDisabled}")
|
||||||
private boolean registerOffline;
|
private boolean registerDisabled;
|
||||||
|
|
||||||
@Value("${inteligr8.asie.offlineIdleShardInSeconds}")
|
@Value("${inteligr8.asie.offlineIdleShardInSeconds}")
|
||||||
private int offlineIdleShardInSeconds;
|
private int offlineIdleShardInSeconds;
|
||||||
@@ -212,7 +215,7 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
|
|||||||
try {
|
try {
|
||||||
this.persistShardSetCache();
|
this.persistShardSetCache();
|
||||||
this.persistNodeCache();
|
this.persistNodeCache();
|
||||||
this.persistShardNodeCache();
|
this.persistShardNodesCache();
|
||||||
this.persistShardInstanceCache();
|
this.persistShardInstanceCache();
|
||||||
} finally {
|
} finally {
|
||||||
this.jobLockService.releaseLock(lockId, this.shardLock);
|
this.jobLockService.releaseLock(lockId, this.shardLock);
|
||||||
@@ -258,7 +261,7 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
|
|||||||
}, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_DISABLE);
|
}, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_DISABLE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void persistShardNodeCache() {
|
private void persistShardNodesCache() {
|
||||||
// add anything missing
|
// add anything missing
|
||||||
// update anything changed
|
// update anything changed
|
||||||
for (Shard shard : this.shardNodesCache.getKeys()) {
|
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||||
@@ -277,17 +280,7 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
|
|||||||
// update anything changed
|
// update anything changed
|
||||||
for (ShardInstance shardNode : this.shardInstanceStatesCache.getKeys()) {
|
for (ShardInstance shardNode : this.shardInstanceStatesCache.getKeys()) {
|
||||||
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
|
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
|
||||||
ShardInstanceState currentState = (ShardInstanceState) this.attrService.getAttribute(CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
|
this.checkSetAttribute(state, shardNode);
|
||||||
if (currentState != null) {
|
|
||||||
if (currentState.compareTo(state) >= 0) {
|
|
||||||
// do nothing
|
|
||||||
} else {
|
|
||||||
this.logger.debug("The persisted state was old; updating: {}: {} => {}", shardNode, currentState, state);
|
|
||||||
this.attrService.setAttribute(state, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
this.attrService.setAttribute(state, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// we are not removing anything removed from the cache, as it might have expired
|
// we are not removing anything removed from the cache, as it might have expired
|
||||||
@@ -309,15 +302,18 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
|
|||||||
}, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_DISABLE);
|
}, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_DISABLE);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void persistCache(ShardSet shardSet, SolrHost node, Shard shard, ShardInstance shardNode, ShardInstanceState state) {
|
private void checkSetAttribute(ShardInstanceState state, ShardInstance shardNode) {
|
||||||
String lockId = this.jobLockService.getLock(this.shardLock, 2000L, 100L, 50);
|
ShardInstanceState currentState = (ShardInstanceState) this.attrService.getAttribute(CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
|
||||||
try {
|
if (currentState != null) {
|
||||||
this.checkSetAttribute(shardSet, CommunityConstants.ATTR_ASIE_SHARDSET, shardSet.getCore());
|
if (currentState.compareTo(state) >= 0) {
|
||||||
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_OBJECT, node.getSpec());
|
// current state is older (greater; further down the list)
|
||||||
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_SHARD_NODES, shard, node.getSpec());
|
// do nothing
|
||||||
this.checkSetAttribute(state, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
|
} else {
|
||||||
} finally {
|
this.logger.debug("The persisted state was old; updating: {}: {} => {}", shardNode, currentState, state);
|
||||||
this.jobLockService.releaseLock(lockId, this.shardLock);
|
this.attrService.setAttribute(state, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.attrService.setAttribute(state, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -340,7 +336,28 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
|
|||||||
ShardInstance shardNode = ShardInstance.from(shard, node);
|
ShardInstance shardNode = ShardInstance.from(shard, node);
|
||||||
ShardInstanceState state = ShardInstanceState.from(shardNodeState);
|
ShardInstanceState state = ShardInstanceState.from(shardNodeState);
|
||||||
|
|
||||||
this.persistCache(shardSet, node, shard, shardNode, state);
|
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 500L, 10);
|
||||||
|
try {
|
||||||
|
if (!this.shardsetsCache.contains(shardSet.getCore()))
|
||||||
|
this.shardsetsCache.put(shardSet.getCore(), shardSet);
|
||||||
|
this.checkSetAttribute(shardSet, CommunityConstants.ATTR_ASIE_SHARDSET, shardSet.getCore());
|
||||||
|
|
||||||
|
if (!this.nodesCache.contains(node.getSpec()))
|
||||||
|
this.nodesCache.put(node.getSpec(), node);
|
||||||
|
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_OBJECT, node.getSpec());
|
||||||
|
if (!this.shardNodesCache.contains(shard, node))
|
||||||
|
this.shardNodesCache.add(shard, node);
|
||||||
|
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_SHARD_NODES, shard, node.getSpec());
|
||||||
|
|
||||||
|
ShardInstanceState currentState = this.shardInstanceStatesCache.get(shardNode);
|
||||||
|
if (currentState == null || currentState.compareTo(state) > 0)
|
||||||
|
this.shardInstanceStatesCache.put(shardNode, state);
|
||||||
|
this.checkSetAttribute(state, shardNode);
|
||||||
|
if (this.registerDisabled && !this.shardInstanceDisableCache.contains(shardNode))
|
||||||
|
this.shardInstanceDisableCache.add(shardNode);
|
||||||
|
} finally {
|
||||||
|
this.jobLockService.releaseLock(lockId, this.shardLock);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -350,12 +367,17 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
|
|||||||
SolrHost node = SolrHost.from(shardInstance);
|
SolrHost node = SolrHost.from(shardInstance);
|
||||||
ShardInstance shardNode = ShardInstance.from(shard, node);
|
ShardInstance shardNode = ShardInstance.from(shard, node);
|
||||||
|
|
||||||
|
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 500L, 10);
|
||||||
|
try {
|
||||||
this.shardInstanceStatesCache.remove(shardNode);
|
this.shardInstanceStatesCache.remove(shardNode);
|
||||||
this.shardInstanceDisableCache.remove(shardNode);
|
this.shardInstanceDisableCache.remove(shardNode);
|
||||||
this.shardInstanceUnavailableCache.remove(shardNode);
|
this.shardInstanceUnavailableCache.remove(shardNode);
|
||||||
this.nodeDisableCache.remove(node);
|
this.nodeDisableCache.remove(node);
|
||||||
this.nodeUnavailableCache.remove(node);
|
this.nodeUnavailableCache.remove(node);
|
||||||
this.attrService.removeAttribute(CommunityConstants.ATTR_ASIE_SHARD_NODES, shard, node.getSpec());
|
this.attrService.removeAttribute(CommunityConstants.ATTR_ASIE_SHARD_NODES, shard, node.getSpec());
|
||||||
|
} finally {
|
||||||
|
this.jobLockService.releaseLock(lockId, this.shardLock);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -426,6 +448,8 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
|
|||||||
this.shardInstanceDisableCache.clear();
|
this.shardInstanceDisableCache.clear();
|
||||||
this.shardInstanceUnavailableCache.clear();
|
this.shardInstanceUnavailableCache.clear();
|
||||||
this.attrService.removeAttributes(CommunityConstants.ATTR_ASIE_SHARD_NODE);
|
this.attrService.removeAttributes(CommunityConstants.ATTR_ASIE_SHARD_NODE);
|
||||||
|
|
||||||
|
this.coreExplicitIdCache.clear();
|
||||||
} finally {
|
} finally {
|
||||||
this.jobLockService.releaseLock(lockId, this.shardLock);
|
this.jobLockService.releaseLock(lockId, this.shardLock);
|
||||||
}
|
}
|
||||||
@@ -497,20 +521,55 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
|
|||||||
|
|
||||||
for (String shardSetSpec : this.shardsetsCache.getKeys()) {
|
for (String shardSetSpec : this.shardsetsCache.getKeys()) {
|
||||||
ShardSet shardSet = this.shardsetsCache.get(shardSetSpec);
|
ShardSet shardSet = this.shardsetsCache.get(shardSetSpec);
|
||||||
List<org.alfresco.repo.index.shard.ShardInstance> shards = new LinkedList<>();
|
|
||||||
|
|
||||||
|
Set<Integer> shardIds = this.getIndexSlice(searchParameters, shardSet);
|
||||||
|
if (shardIds == null)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
List<org.alfresco.repo.index.shard.ShardInstance> shards = this.selectRandomNodes(shardSet, shardIds);
|
||||||
|
|
||||||
|
if (!shards.isEmpty() && (bestShards == null || shards.size() < bestShards.size()))
|
||||||
|
bestShards = shards;
|
||||||
|
if (bestShards != null && bestShards.size() == 1)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return bestShards;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected Set<Integer> getIndexSlice(SearchParameters searchParameters, ShardSet shardSet) {
|
||||||
|
try {
|
||||||
switch (shardSet.getMethod()) {
|
switch (shardSet.getMethod()) {
|
||||||
case EXPLICIT_ID:
|
case EXPLICIT_ID:
|
||||||
String property = shardSet.getPrefixedProperty();
|
return this.getExplicitIdIndexSlice(searchParameters, shardSet);
|
||||||
QName propertyQName = QName.createQName(property, this.namespaceService);
|
default:
|
||||||
DataTypeDefinition dtdef = this.dictionaryService.getProperty(propertyQName).getDataType();
|
// no optimization available
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
} catch (RecognitionException re) {
|
||||||
|
this.logger.debug("Failed to parse the query: " + searchParameters.getQuery(), re);
|
||||||
|
// no optimization available
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected Set<Integer> getExplicitIdIndexSlice(SearchParameters searchParameters, ShardSet shardSet) throws RecognitionException {
|
||||||
|
this.logger.trace("Found {} shard set, which is the highest priority", shardSet.getMethod());
|
||||||
|
|
||||||
QueryInspector inspector = this.queryInspectorFactory.selectQueryInspector(searchParameters);
|
QueryInspector inspector = this.queryInspectorFactory.selectQueryInspector(searchParameters);
|
||||||
if (inspector == null)
|
if (inspector == null) {
|
||||||
continue;
|
this.logger.debug("The search is using an unsupported query language; unable to optimize for {}: {}", shardSet.getMethod(), searchParameters.getLanguage());
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
String property = shardSet.getPrefixedProperty();
|
||||||
|
QName propertyQName = QName.createQName(property, this.namespaceService);
|
||||||
|
this.logger.trace("Will attempt to see if search has a required constraint on explicit shard ID property: {}", propertyQName);
|
||||||
|
DataTypeDefinition dtdef = this.dictionaryService.getProperty(propertyQName).getDataType();
|
||||||
|
|
||||||
Set<Integer> shardIds = new HashSet<>();
|
Set<Integer> shardIds = new HashSet<>();
|
||||||
List<QueryValue> values = inspector.findRequiredPropertyValues(searchParameters.getQuery(), searchParameters.getDefaultOperator(), propertyQName, dtdef);
|
List<QueryValue> values = inspector.findRequiredPropertyValues(searchParameters.getQuery(), searchParameters.getDefaultOperator(), propertyQName, dtdef);
|
||||||
|
this.logger.trace("Found {} matching terms query: {}: {}", values.size(), propertyQName, searchParameters.getQuery());
|
||||||
for (QueryValue value : values) {
|
for (QueryValue value : values) {
|
||||||
if (value instanceof QuerySingleValue<?>) {
|
if (value instanceof QuerySingleValue<?>) {
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
@@ -530,19 +589,16 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
shards.addAll(this.getIndexSlice(shardSet, shardIds));
|
if (shardIds.isEmpty()) {
|
||||||
default:
|
this.logger.trace("The {} shard set cannot not be used to optimize the query", shardSet.getMethod());
|
||||||
// make no determination
|
return null;
|
||||||
|
}
|
||||||
|
this.logger.debug("The {} shard set was used to optimize the query to use only shards: {}", shardSet.getMethod(), shardIds);
|
||||||
|
|
||||||
|
return shardIds;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!shards.isEmpty() && (bestShards == null || shards.size() < bestShards.size()))
|
protected List<org.alfresco.repo.index.shard.ShardInstance> selectRandomNodes(ShardSet shardSet, Collection<Integer> shardIds) {
|
||||||
bestShards = shards;
|
|
||||||
}
|
|
||||||
|
|
||||||
return bestShards;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected List<org.alfresco.repo.index.shard.ShardInstance> getIndexSlice(ShardSet shardSet, Collection<Integer> shardIds) {
|
|
||||||
List<org.alfresco.repo.index.shard.ShardInstance> shardNodes = new LinkedList<>();
|
List<org.alfresco.repo.index.shard.ShardInstance> shardNodes = new LinkedList<>();
|
||||||
|
|
||||||
for (Integer shardId : shardIds) {
|
for (Integer shardId : shardIds) {
|
||||||
|
@@ -1,40 +1,109 @@
|
|||||||
|
|
||||||
inteligr8.asie.registerUnknownShardOffline=false
|
inteligr8.asie.registerUnknownShardDisabled=false
|
||||||
inteligr8.asie.idleShardExpirationInSeconds=${}
|
inteligr8.asie.offlineIdleShardInSeconds=120
|
||||||
|
inteligr8.asie.forgetOfflineShardInSeconds=86400
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# we don't want items expiring out of the following caches
|
||||||
|
# an evicition policy of NONE disables the maxItems limits
|
||||||
|
|
||||||
# Overrides of alfresco-repository.jar/alfresco/caches.properties
|
# Overrides of alfresco-repository.jar/alfresco/caches.properties
|
||||||
cache.shardStateSharedCache.tx.maxItems=16384
|
cache.asieShardsetsSharedCache.tx.maxItems=65536
|
||||||
cache.shardStateSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
cache.asieShardsetsSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||||
cache.shardStateSharedCache.maxItems=16384
|
cache.asieShardsetsSharedCache.maxItems=65536
|
||||||
cache.shardStateSharedCache.timeToLiveSeconds=1800
|
cache.asieShardsetsSharedCache.timeToLiveSeconds=0
|
||||||
cache.shardStateSharedCache.maxIdleSeconds=0
|
cache.asieShardsetsSharedCache.maxIdleSeconds=0
|
||||||
cache.shardStateSharedCache.cluster.type=fully-distributed
|
cache.asieShardsetsSharedCache.cluster.type=fully-distributed
|
||||||
cache.shardStateSharedCache.backup-count=1
|
cache.asieShardsetsSharedCache.backup-count=1
|
||||||
cache.shardStateSharedCache.eviction-policy=LRU
|
cache.asieShardsetsSharedCache.eviction-policy=NONE
|
||||||
cache.shardStateSharedCache.merge-policy=com.hazelcast.spi.merge.PutIfAbsentMergePolicy
|
cache.asieShardsetsSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||||
cache.shardStateSharedCache.readBackupData=false
|
cache.asieShardsetsSharedCache.readBackupData=false
|
||||||
|
|
||||||
# maxItems needs to be greater than total shards, including HA instances
|
cache.asieNodesSharedCache.tx.maxItems=65536
|
||||||
cache.offlineShardStateSharedCache.tx.maxItems=16384
|
cache.asieNodesSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||||
cache.offlineShardStateSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
cache.asieNodesSharedCache.maxItems=65536
|
||||||
cache.offlineShardStateSharedCache.maxItems=16384
|
cache.asieNodesSharedCache.timeToLiveSeconds=0
|
||||||
cache.offlineShardStateSharedCache.timeToLiveSeconds=1800
|
cache.asieNodesSharedCache.maxIdleSeconds=0
|
||||||
cache.offlineShardStateSharedCache.maxIdleSeconds=0
|
cache.asieNodesSharedCache.cluster.type=fully-distributed
|
||||||
cache.offlineShardStateSharedCache.cluster.type=fully-distributed
|
cache.asieNodesSharedCache.backup-count=1
|
||||||
cache.offlineShardStateSharedCache.backup-count=1
|
cache.asieNodesSharedCache.eviction-policy=NONE
|
||||||
cache.offlineShardStateSharedCache.eviction-policy=LRU
|
cache.asieNodesSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||||
cache.offlineShardStateSharedCache.merge-policy=com.hazelcast.spi.merge.PutIfAbsentMergePolicy
|
cache.asieNodesSharedCache.readBackupData=false
|
||||||
cache.offlineShardStateSharedCache.readBackupData=false
|
|
||||||
|
|
||||||
cache.coreExplicitIdSharedCache.tx.maxItems=16384
|
cache.asieShardNodesSharedCache.tx.maxItems=65536
|
||||||
cache.coreExplicitIdSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
cache.asieShardNodesSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||||
cache.coreExplicitIdSharedCache.maxItems=16384
|
cache.asieShardNodesSharedCache.maxItems=65536
|
||||||
cache.coreExplicitIdSharedCache.timeToLiveSeconds=1800
|
cache.asieShardNodesSharedCache.timeToLiveSeconds=0
|
||||||
cache.coreExplicitIdSharedCache.maxIdleSeconds=0
|
cache.asieShardNodesSharedCache.maxIdleSeconds=0
|
||||||
cache.coreExplicitIdSharedCache.cluster.type=fully-distributed
|
cache.asieShardNodesSharedCache.cluster.type=fully-distributed
|
||||||
cache.coreExplicitIdSharedCache.backup-count=1
|
cache.asieShardNodesSharedCache.backup-count=1
|
||||||
cache.coreExplicitIdSharedCache.eviction-policy=LRU
|
cache.asieShardNodesSharedCache.eviction-policy=NONE
|
||||||
cache.coreExplicitIdSharedCache.merge-policy=com.hazelcast.spi.merge.PutIfAbsentMergePolicy
|
cache.asieShardNodesSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||||
cache.coreExplicitIdSharedCache.readBackupData=false
|
cache.asieShardNodesSharedCache.readBackupData=false
|
||||||
|
|
||||||
|
cache.asieShardInstanceStateSharedCache.tx.maxItems=65536
|
||||||
|
cache.asieShardInstanceStateSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||||
|
cache.asieShardInstanceStateSharedCache.maxItems=65536
|
||||||
|
cache.asieShardInstanceStateSharedCache.timeToLiveSeconds=0
|
||||||
|
cache.asieShardInstanceStateSharedCache.maxIdleSeconds=0
|
||||||
|
cache.asieShardInstanceStateSharedCache.cluster.type=fully-distributed
|
||||||
|
cache.asieShardInstanceStateSharedCache.backup-count=1
|
||||||
|
cache.asieShardInstanceStateSharedCache.eviction-policy=NONE
|
||||||
|
cache.asieShardInstanceStateSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||||
|
cache.asieShardInstanceStateSharedCache.readBackupData=false
|
||||||
|
|
||||||
|
cache.asieNodeDisabledSharedCache.tx.maxItems=65536
|
||||||
|
cache.asieNodeDisabledSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||||
|
cache.asieNodeDisabledSharedCache.maxItems=65536
|
||||||
|
cache.asieNodeDisabledSharedCache.timeToLiveSeconds=0
|
||||||
|
cache.asieNodeDisabledSharedCache.maxIdleSeconds=0
|
||||||
|
cache.asieNodeDisabledSharedCache.cluster.type=fully-distributed
|
||||||
|
cache.asieNodeDisabledSharedCache.backup-count=1
|
||||||
|
cache.asieNodeDisabledSharedCache.eviction-policy=NONE
|
||||||
|
cache.asieNodeDisabledSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||||
|
cache.asieNodeDisabledSharedCache.readBackupData=false
|
||||||
|
|
||||||
|
cache.asieNodeUnavailableSharedCache.tx.maxItems=65536
|
||||||
|
cache.asieNodeUnavailableSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||||
|
cache.asieNodeUnavailableSharedCache.maxItems=65536
|
||||||
|
cache.asieNodeUnavailableSharedCache.timeToLiveSeconds=0
|
||||||
|
cache.asieNodeUnavailableSharedCache.maxIdleSeconds=0
|
||||||
|
cache.asieNodeUnavailableSharedCache.cluster.type=fully-distributed
|
||||||
|
cache.asieNodeUnavailableSharedCache.backup-count=1
|
||||||
|
cache.asieNodeUnavailableSharedCache.eviction-policy=NONE
|
||||||
|
cache.asieNodeUnavailableSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||||
|
cache.asieNodeUnavailableSharedCache.readBackupData=false
|
||||||
|
|
||||||
|
cache.asieShardInstanceDisabledSharedCache.tx.maxItems=65536
|
||||||
|
cache.asieShardInstanceDisabledSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||||
|
cache.asieShardInstanceDisabledSharedCache.maxItems=65536
|
||||||
|
cache.asieShardInstanceDisabledSharedCache.timeToLiveSeconds=0
|
||||||
|
cache.asieShardInstanceDisabledSharedCache.maxIdleSeconds=0
|
||||||
|
cache.asieShardInstanceDisabledSharedCache.cluster.type=fully-distributed
|
||||||
|
cache.asieShardInstanceDisabledSharedCache.backup-count=1
|
||||||
|
cache.asieShardInstanceDisabledSharedCache.eviction-policy=NONE
|
||||||
|
cache.asieShardInstanceDisabledSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||||
|
cache.asieShardInstanceDisabledSharedCache.readBackupData=false
|
||||||
|
|
||||||
|
cache.asieShardInstanceUnavailableSharedCache.tx.maxItems=65536
|
||||||
|
cache.asieShardInstanceUnavailableSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||||
|
cache.asieShardInstanceUnavailableSharedCache.maxItems=65536
|
||||||
|
cache.asieShardInstanceUnavailableSharedCache.timeToLiveSeconds=0
|
||||||
|
cache.asieShardInstanceUnavailableSharedCache.maxIdleSeconds=0
|
||||||
|
cache.asieShardInstanceUnavailableSharedCache.cluster.type=fully-distributed
|
||||||
|
cache.asieShardInstanceUnavailableSharedCache.backup-count=1
|
||||||
|
cache.asieShardInstanceUnavailableSharedCache.eviction-policy=NONE
|
||||||
|
cache.asieShardInstanceUnavailableSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||||
|
cache.asieShardInstanceUnavailableSharedCache.readBackupData=false
|
||||||
|
|
||||||
|
cache.asieCoreExplicitSharedCache.tx.maxItems=65536
|
||||||
|
cache.asieCoreExplicitSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||||
|
cache.asieCoreExplicitSharedCache.maxItems=65536
|
||||||
|
cache.asieCoreExplicitSharedCache.timeToLiveSeconds=0
|
||||||
|
cache.asieCoreExplicitSharedCache.maxIdleSeconds=0
|
||||||
|
cache.asieCoreExplicitSharedCache.cluster.type=fully-distributed
|
||||||
|
cache.asieCoreExplicitSharedCache.backup-count=1
|
||||||
|
cache.asieCoreExplicitSharedCache.eviction-policy=NONE
|
||||||
|
cache.asieCoreExplicitSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||||
|
cache.asieCoreExplicitSharedCache.readBackupData=false
|
||||||
|
@@ -7,12 +7,44 @@
|
|||||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
|
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
|
||||||
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd">
|
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd">
|
||||||
|
|
||||||
<bean name="offlineShardStateCache" factory-bean="cacheFactory" factory-method="createCache">
|
<bean name="asieShardsetsCache" factory-bean="cacheFactory" factory-method="createCache">
|
||||||
<constructor-arg value="cache.offlineShardStateSharedCache" />
|
<constructor-arg value="cache.asieShardsetsSharedCache" />
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<bean name="coreExplicitIdCache" factory-bean="cacheFactory" factory-method="createCache">
|
<bean name="asieNodesCache" factory-bean="cacheFactory" factory-method="createCache">
|
||||||
<constructor-arg value="cache.coreExplicitIdSharedCache" />
|
<constructor-arg value="cache.asieNodesSharedCache" />
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
<bean name="asieShardNodesCache" factory-bean="cacheFactory" factory-method="createMultiValueCache">
|
||||||
|
<constructor-arg value="cache.asieShardNodesSharedCache" />
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
<bean name="asieShardInstanceStateCache" factory-bean="cacheFactory" factory-method="createCache">
|
||||||
|
<constructor-arg value="cache.asieShardInstanceStateSharedCache" />
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
<bean name="asieNodeDisabledCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
|
||||||
|
<constructor-arg value="cache.asieNodeDisabledSharedCache" />
|
||||||
|
<constructor-arg value="java.util.HashSet" />
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
<bean name="asieNodeUnavailableCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
|
||||||
|
<constructor-arg value="cache.asieNodeUnavailableSharedCache" />
|
||||||
|
<constructor-arg value="java.util.HashSet" />
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
<bean name="asieShardInstanceDisabledCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
|
||||||
|
<constructor-arg value="cache.asieShardInstanceDisabledSharedCache" />
|
||||||
|
<constructor-arg value="java.util.HashSet" />
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
<bean name="asieShardInstanceUnavailableCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
|
||||||
|
<constructor-arg value="cache.asieShardInstanceUnavailableSharedCache" />
|
||||||
|
<constructor-arg value="java.util.HashSet" />
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
<bean name="asieCoreExplicitCache" factory-bean="cacheFactory" factory-method="createCache">
|
||||||
|
<constructor-arg value="cache.asieCoreExplicitSharedCache" />
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
</beans>
|
</beans>
|
||||||
|
@@ -7,4 +7,5 @@ module.version=${module.version}
|
|||||||
module.repo.version.min=23.0
|
module.repo.version.min=23.0
|
||||||
|
|
||||||
# this is creating all sorts of problems; probably because of the non-standard versioning
|
# this is creating all sorts of problems; probably because of the non-standard versioning
|
||||||
|
module.depends.com.inteligr8.alfresco.cachext-platform-module=*
|
||||||
module.depends.com.inteligr8.alfresco.cxf-jaxrs-platform-module=*
|
module.depends.com.inteligr8.alfresco.cxf-jaxrs-platform-module=*
|
||||||
|
BIN
enterprise-module/metadata.keystore
Normal file
BIN
enterprise-module/metadata.keystore
Normal file
Binary file not shown.
@@ -16,8 +16,12 @@
|
|||||||
<name>ASIE Platform Module for ACS Enterprise</name>
|
<name>ASIE Platform Module for ACS Enterprise</name>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<alfresco.sdk.version>5.2.0</alfresco.sdk.version>
|
<alfresco.sdk.version>4.9.0</alfresco.sdk.version>
|
||||||
<alfresco.platform.version>23.3.0</alfresco.platform.version>
|
<alfresco.platform.version>23.3.0</alfresco.platform.version>
|
||||||
|
<alfresco.platform.war.version>23.3.0.98</alfresco.platform.war.version>
|
||||||
|
<tomcat-rad.version>10-2.1</tomcat-rad.version>
|
||||||
|
|
||||||
|
<beedk.rad.acs-search.enabled>true</beedk.rad.acs-search.enabled>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencyManagement>
|
<dependencyManagement>
|
||||||
@@ -80,7 +84,7 @@
|
|||||||
<groupId>com.inteligr8.alfresco</groupId>
|
<groupId>com.inteligr8.alfresco</groupId>
|
||||||
<artifactId>cxf-jaxrs-platform-module</artifactId>
|
<artifactId>cxf-jaxrs-platform-module</artifactId>
|
||||||
<version>1.3.1-acs-v23.3</version>
|
<version>1.3.1-acs-v23.3</version>
|
||||||
<scope>provided</scope>
|
<type>amp</type>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- Including for testing purposes only -->
|
<!-- Including for testing purposes only -->
|
||||||
|
@@ -1,22 +1,22 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
discoverArtifactId() {
|
discoverArtifactId() {
|
||||||
ARTIFACT_ID=`mvn -q -Dexpression=project.artifactId -DforceStdout help:evaluate`
|
ARTIFACT_ID=`mvn -q -Dexpression=project.artifactId -DforceStdout help:evaluate | sed 's/\x1B\[[0-9;]\{1,\}[A-Za-z]//g'`
|
||||||
}
|
}
|
||||||
|
|
||||||
rebuild() {
|
rebuild() {
|
||||||
echo "Rebuilding project ..."
|
echo "Rebuilding project ..."
|
||||||
mvn process-classes
|
mvn process-test-classes
|
||||||
}
|
}
|
||||||
|
|
||||||
start() {
|
start() {
|
||||||
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
|
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
|
||||||
mvn -Drad process-classes
|
mvn -Drad process-test-classes
|
||||||
}
|
}
|
||||||
|
|
||||||
start_log() {
|
start_log() {
|
||||||
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
|
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
|
||||||
mvn -Drad -Ddocker.showLogs process-classes
|
mvn -Drad -Ddocker.showLogs process-test-classes
|
||||||
}
|
}
|
||||||
|
|
||||||
stop() {
|
stop() {
|
||||||
|
@@ -16,7 +16,7 @@
|
|||||||
<name>ASIE Shared Library for Platform Modules</name>
|
<name>ASIE Shared Library for Platform Modules</name>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<alfresco.sdk.version>5.2.0</alfresco.sdk.version>
|
<alfresco.sdk.version>4.9.0</alfresco.sdk.version>
|
||||||
<alfresco.platform.version>23.3.0</alfresco.platform.version>
|
<alfresco.platform.version>23.3.0</alfresco.platform.version>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user