Compare commits
27 Commits
Author | SHA1 | Date | |
---|---|---|---|
ba5188ff93 | |||
493f1f813d | |||
0ed41a39e4 | |||
3cd8c91f93 | |||
0cb566e18d | |||
c38ed7a73a | |||
bf9a5fca50 | |||
40d13ac266 | |||
75e25577b7 | |||
35bae4283d | |||
d537c8ec49 | |||
f17556835a | |||
4531c7af8e | |||
692410f535 | |||
1230a07a5a | |||
47835d852f | |||
7535475581 | |||
14887ca167 | |||
632900ecee | |||
af7c9e148e | |||
de8e0bf2d7 | |||
006597f6fb | |||
3ecbf006dd | |||
07d6e63457 | |||
01d2f5ce23 | |||
8a0db9f11d | |||
3e544c125b |
@@ -6,20 +6,20 @@
|
||||
<parent>
|
||||
<groupId>com.inteligr8.alfresco</groupId>
|
||||
<artifactId>asie-platform-module-parent</artifactId>
|
||||
<version>1.2-SNAPSHOT</version>
|
||||
<version>1.3-SNAPSHOT</version>
|
||||
<relativePath>../</relativePath>
|
||||
</parent>
|
||||
|
||||
<groupId>com.inteligr8.alfresco</groupId>
|
||||
<artifactId>asie-api</artifactId>
|
||||
<version>1.0-SNAPSHOT-asie2</version>
|
||||
<version>1.1-SNAPSHOT-asie2</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<name>ASIE JAX-RS API</name>
|
||||
<description>Alfresco Search & Insight Engine JAX-RS API</description>
|
||||
<name>ASIE Jakarta RS API</name>
|
||||
<description>Alfresco Search & Insight Engine Jakarta RS API</description>
|
||||
|
||||
<properties>
|
||||
<alfresco.platform.version>6.0.0</alfresco.platform.version>
|
||||
<alfresco.platform.version>23.2.0</alfresco.platform.version>
|
||||
</properties>
|
||||
|
||||
<dependencyManagement>
|
||||
@@ -38,11 +38,45 @@
|
||||
<dependency>
|
||||
<groupId>com.inteligr8</groupId>
|
||||
<artifactId>solr-api</artifactId>
|
||||
<version>1.0-SNAPSHOT-solr6</version>
|
||||
<version>1.1-SNAPSHOT-solr6</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-repository</artifactId>
|
||||
<artifactId>alfresco-data-model</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>*</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>3.17.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-slf4j2-impl</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.inteligr8</groupId>
|
||||
<artifactId>common-rest-client</artifactId>
|
||||
<version>3.0.2-jersey</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.inject</groupId>
|
||||
<artifactId>jersey-hk2</artifactId>
|
||||
<version>3.1.10</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<version>5.11.2</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
@@ -1,12 +1,12 @@
|
||||
package com.inteligr8.alfresco.asie.api;
|
||||
|
||||
import com.inteligr8.alfresco.asie.model.ActionResponse;
|
||||
import com.inteligr8.alfresco.asie.model.ActionCoreResponse;
|
||||
import com.inteligr8.alfresco.asie.model.EmptyResponse;
|
||||
import com.inteligr8.alfresco.asie.model.core.CheckRequest;
|
||||
import com.inteligr8.alfresco.asie.model.core.DisableIndexingRequest;
|
||||
import com.inteligr8.alfresco.asie.model.core.EnableIndexingRequest;
|
||||
import com.inteligr8.alfresco.asie.model.core.FixAction;
|
||||
import com.inteligr8.alfresco.asie.model.core.FixRequest;
|
||||
import com.inteligr8.alfresco.asie.model.core.FixResponseAction;
|
||||
import com.inteligr8.alfresco.asie.model.core.IndexingStatusAction;
|
||||
import com.inteligr8.alfresco.asie.model.core.NewCoreRequest;
|
||||
import com.inteligr8.alfresco.asie.model.core.NewDefaultIndexRequest;
|
||||
@@ -14,14 +14,15 @@ import com.inteligr8.alfresco.asie.model.core.PurgeRequest;
|
||||
import com.inteligr8.alfresco.asie.model.core.ReindexRequest;
|
||||
import com.inteligr8.alfresco.asie.model.core.ReportRequest;
|
||||
import com.inteligr8.alfresco.asie.model.core.ReportResponse;
|
||||
import com.inteligr8.alfresco.asie.model.core.RetryAction;
|
||||
import com.inteligr8.alfresco.asie.model.core.RetryRequest;
|
||||
import com.inteligr8.alfresco.asie.model.core.RetryResponseAction;
|
||||
import com.inteligr8.alfresco.asie.model.core.SummaryRequest;
|
||||
import com.inteligr8.alfresco.asie.model.core.SummaryResponse;
|
||||
import com.inteligr8.alfresco.asie.model.core.UpdateCoreRequest;
|
||||
import com.inteligr8.alfresco.asie.model.core.UpdateLog4jRequest;
|
||||
import com.inteligr8.alfresco.asie.model.core.UpdateSharedRequest;
|
||||
import com.inteligr8.solr.model.ResponseAction;
|
||||
import com.inteligr8.solr.model.Action;
|
||||
import com.inteligr8.solr.model.ActionResponse;
|
||||
|
||||
import jakarta.ws.rs.BeanParam;
|
||||
import jakarta.ws.rs.GET;
|
||||
@@ -42,43 +43,43 @@ public interface CoreAdminApi extends com.inteligr8.solr.api.CoreAdminApi {
|
||||
|
||||
@GET
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
ActionResponse<ResponseAction> updateCore(@BeanParam UpdateCoreRequest request);
|
||||
ActionResponse<Action> updateCore(@BeanParam UpdateCoreRequest request);
|
||||
|
||||
@GET
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
ActionResponse<ResponseAction> check(@BeanParam CheckRequest request);
|
||||
ActionResponse<Action> check(@BeanParam CheckRequest request);
|
||||
|
||||
@GET
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
ActionResponse<ResponseAction> updateShared(@BeanParam UpdateSharedRequest request);
|
||||
ActionResponse<Action> updateShared(@BeanParam UpdateSharedRequest request);
|
||||
|
||||
@GET
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
ActionResponse<ResponseAction> updateLog4j(@BeanParam UpdateLog4jRequest request);
|
||||
ActionResponse<Action> updateLog4j(@BeanParam UpdateLog4jRequest request);
|
||||
|
||||
@GET
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
ActionResponse<ResponseAction> purge(@BeanParam PurgeRequest request);
|
||||
ActionCoreResponse<Action> purge(@BeanParam PurgeRequest request);
|
||||
|
||||
@GET
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
ActionResponse<ResponseAction> reindex(@BeanParam ReindexRequest request);
|
||||
ActionCoreResponse<Action> reindex(@BeanParam ReindexRequest request);
|
||||
|
||||
@GET
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
ActionResponse<RetryResponseAction> retry(@BeanParam RetryRequest request);
|
||||
ActionCoreResponse<RetryAction> retry(@BeanParam RetryRequest request);
|
||||
|
||||
@GET
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
ActionResponse<FixResponseAction> fix(@BeanParam FixRequest request);
|
||||
ActionCoreResponse<FixAction> fix(@BeanParam FixRequest request);
|
||||
|
||||
@GET
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
ActionResponse<IndexingStatusAction> enableIndexing(@BeanParam EnableIndexingRequest request);
|
||||
ActionCoreResponse<IndexingStatusAction> enableIndexing(@BeanParam EnableIndexingRequest request);
|
||||
|
||||
@GET
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
ActionResponse<IndexingStatusAction> disableIndexing(@BeanParam DisableIndexingRequest request);
|
||||
ActionCoreResponse<IndexingStatusAction> disableIndexing(@BeanParam DisableIndexingRequest request);
|
||||
|
||||
@GET
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
|
@@ -0,0 +1,18 @@
|
||||
package com.inteligr8.alfresco.asie.model;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.inteligr8.solr.model.Action;
|
||||
import com.inteligr8.solr.model.Cores;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class ActionCoreResponse<T extends Action> extends BaseResponse {
|
||||
|
||||
@JsonProperty(value = "action")
|
||||
private Cores<T> cores;
|
||||
|
||||
public Cores<T> getCores() {
|
||||
return cores;
|
||||
}
|
||||
|
||||
}
|
@@ -1,22 +0,0 @@
|
||||
package com.inteligr8.alfresco.asie.model;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty.Access;
|
||||
import com.inteligr8.solr.model.ResponseAction;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class ActionResponse<T extends ResponseAction> extends BaseResponse {
|
||||
|
||||
@JsonProperty(access = Access.READ_ONLY)
|
||||
private T action;
|
||||
|
||||
public T getAction() {
|
||||
return action;
|
||||
}
|
||||
|
||||
protected void setAction(T action) {
|
||||
this.action = action;
|
||||
}
|
||||
|
||||
}
|
@@ -1,41 +1,28 @@
|
||||
package com.inteligr8.alfresco.asie.model;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty.Access;
|
||||
|
||||
public class BaseResponse extends com.inteligr8.solr.model.BaseResponse {
|
||||
|
||||
@JsonProperty(value = "STATUS", access = Access.READ_ONLY)
|
||||
@JsonProperty(value = "STATUS")
|
||||
private String reason;
|
||||
|
||||
@JsonProperty(value = "exception", access = Access.READ_ONLY)
|
||||
@JsonProperty(value = "exception")
|
||||
private String exception;
|
||||
|
||||
@JsonProperty(value = "msg", access = Access.READ_ONLY)
|
||||
@JsonProperty(value = "msg")
|
||||
private String message;
|
||||
|
||||
public String getReason() {
|
||||
return reason;
|
||||
}
|
||||
|
||||
protected void setReason(String reason) {
|
||||
this.reason = reason;
|
||||
}
|
||||
|
||||
public String getException() {
|
||||
return exception;
|
||||
}
|
||||
|
||||
protected void setException(String exception) {
|
||||
this.exception = exception;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
protected void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -0,0 +1,25 @@
|
||||
package com.inteligr8.alfresco.asie.model.core;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.inteligr8.solr.model.Action;
|
||||
import com.inteligr8.solr.model.TransactionStatus;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class FixAction extends Action {
|
||||
|
||||
@JsonProperty(value = "txToReindex")
|
||||
private TransactionStatus transactionStatus;
|
||||
|
||||
@JsonProperty(value = "aclChangeSetToReindex")
|
||||
private TransactionStatus aclStatus;
|
||||
|
||||
public TransactionStatus getTransactionStatus() {
|
||||
return transactionStatus;
|
||||
}
|
||||
|
||||
public TransactionStatus getAclStatus() {
|
||||
return aclStatus;
|
||||
}
|
||||
|
||||
}
|
@@ -1,34 +0,0 @@
|
||||
package com.inteligr8.alfresco.asie.model.core;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty.Access;
|
||||
import com.inteligr8.solr.model.ResponseAction;
|
||||
import com.inteligr8.solr.model.TransactionResponseStatus;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class FixResponseAction extends ResponseAction {
|
||||
|
||||
@JsonProperty(value = "txToReindex", access = Access.READ_ONLY)
|
||||
private TransactionResponseStatus transactionStatus;
|
||||
|
||||
@JsonProperty(value = "aclChangeSetToReindex", access = Access.READ_ONLY)
|
||||
private TransactionResponseStatus aclStatus;
|
||||
|
||||
public TransactionResponseStatus getTransactionStatus() {
|
||||
return transactionStatus;
|
||||
}
|
||||
|
||||
protected void setTransactionStatus(TransactionResponseStatus transactionStatus) {
|
||||
this.transactionStatus = transactionStatus;
|
||||
}
|
||||
|
||||
public TransactionResponseStatus getAclStatus() {
|
||||
return aclStatus;
|
||||
}
|
||||
|
||||
protected void setAclStatus(TransactionResponseStatus aclStatus) {
|
||||
this.aclStatus = aclStatus;
|
||||
}
|
||||
|
||||
}
|
@@ -1,25 +1,43 @@
|
||||
package com.inteligr8.alfresco.asie.model.core;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonAnyGetter;
|
||||
import com.fasterxml.jackson.annotation.JsonAnySetter;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.inteligr8.solr.model.ResponseAction;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.inteligr8.solr.model.Action;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class IndexingStatusAction extends ResponseAction {
|
||||
public class IndexingStatusAction extends Action {
|
||||
|
||||
private Map<String, IndexingStatusMetadata> cores;
|
||||
|
||||
@JsonAnyGetter
|
||||
public Map<String, IndexingStatusMetadata> getCores() {
|
||||
return cores;
|
||||
@JsonProperty(value = "ACL")
|
||||
private Boolean aclIndexed;
|
||||
|
||||
@JsonProperty(value = "CONTENT")
|
||||
private Boolean contentIndexed;
|
||||
|
||||
@JsonProperty(value = "METADATA")
|
||||
private Boolean metadataIndexed;
|
||||
|
||||
public boolean isAclIndexed() {
|
||||
return Boolean.TRUE.equals(this.aclIndexed);
|
||||
}
|
||||
|
||||
@JsonAnySetter
|
||||
public void setCores(Map<String, IndexingStatusMetadata> cores) {
|
||||
this.cores = cores;
|
||||
public Boolean getAclIndexed() {
|
||||
return aclIndexed;
|
||||
}
|
||||
|
||||
public boolean isContentIndexed() {
|
||||
return Boolean.TRUE.equals(this.contentIndexed);
|
||||
}
|
||||
|
||||
public Boolean getContentIndexed() {
|
||||
return contentIndexed;
|
||||
}
|
||||
|
||||
public boolean isMetadataIndexed() {
|
||||
return Boolean.TRUE.equals(this.metadataIndexed);
|
||||
}
|
||||
|
||||
public Boolean getMetadataIndexed() {
|
||||
return metadataIndexed;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,56 +0,0 @@
|
||||
package com.inteligr8.alfresco.asie.model.core;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty.Access;
|
||||
import com.inteligr8.solr.model.ResponseAction;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class IndexingStatusMetadata extends ResponseAction {
|
||||
|
||||
@JsonProperty(value = "ACL", access = Access.READ_ONLY)
|
||||
private Boolean aclIndexed;
|
||||
|
||||
@JsonProperty(value = "CONTENT", access = Access.READ_ONLY)
|
||||
private Boolean contentIndexed;
|
||||
|
||||
@JsonProperty(value = "METADATA", access = Access.READ_ONLY)
|
||||
private Boolean metadataIndexed;
|
||||
|
||||
public boolean isAclIndexed() {
|
||||
return Boolean.TRUE.equals(this.aclIndexed);
|
||||
}
|
||||
|
||||
public Boolean getAclIndexed() {
|
||||
return aclIndexed;
|
||||
}
|
||||
|
||||
protected void setAclIndexed(Boolean aclIndexed) {
|
||||
this.aclIndexed = aclIndexed;
|
||||
}
|
||||
|
||||
public boolean isContentIndexed() {
|
||||
return Boolean.TRUE.equals(this.contentIndexed);
|
||||
}
|
||||
|
||||
public Boolean getContentIndexed() {
|
||||
return contentIndexed;
|
||||
}
|
||||
|
||||
protected void setContentIndexed(Boolean contentIndexed) {
|
||||
this.contentIndexed = contentIndexed;
|
||||
}
|
||||
|
||||
public boolean isMetadataIndexed() {
|
||||
return Boolean.TRUE.equals(this.metadataIndexed);
|
||||
}
|
||||
|
||||
public Boolean getMetadataIndexed() {
|
||||
return metadataIndexed;
|
||||
}
|
||||
|
||||
protected void setMetadataIndexed(Boolean metadataIndexed) {
|
||||
this.metadataIndexed = metadataIndexed;
|
||||
}
|
||||
|
||||
}
|
@@ -19,16 +19,16 @@ public class PurgeRequest extends JsonFormattedResponseRequest<PurgeRequest> {
|
||||
private String core;
|
||||
|
||||
@QueryParam("txid")
|
||||
private Integer transactionId;
|
||||
private Long transactionId;
|
||||
|
||||
@QueryParam("acltxid")
|
||||
private Integer aclTransactionId;
|
||||
private Long aclTransactionId;
|
||||
|
||||
@QueryParam("nodeId")
|
||||
private Integer nodeId;
|
||||
private Long nodeId;
|
||||
|
||||
@QueryParam("aclid")
|
||||
private Integer aclId;
|
||||
private Long aclId;
|
||||
|
||||
public String getAction() {
|
||||
return action;
|
||||
@@ -51,54 +51,54 @@ public class PurgeRequest extends JsonFormattedResponseRequest<PurgeRequest> {
|
||||
return this;
|
||||
}
|
||||
|
||||
public Integer getTransactionId() {
|
||||
public Long getTransactionId() {
|
||||
return transactionId;
|
||||
}
|
||||
|
||||
public void setTransactionId(Integer transactionId) {
|
||||
public void setTransactionId(Long transactionId) {
|
||||
this.transactionId = transactionId;
|
||||
}
|
||||
|
||||
public PurgeRequest withTransactionId(Integer transactionId) {
|
||||
public PurgeRequest withTransactionId(Long transactionId) {
|
||||
this.transactionId = transactionId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Integer getAclTransactionId() {
|
||||
public Long getAclTransactionId() {
|
||||
return aclTransactionId;
|
||||
}
|
||||
|
||||
public void setAclTransactionId(Integer aclTransactionId) {
|
||||
public void setAclTransactionId(Long aclTransactionId) {
|
||||
this.aclTransactionId = aclTransactionId;
|
||||
}
|
||||
|
||||
public PurgeRequest withAclTransactionId(Integer aclTransactionId) {
|
||||
public PurgeRequest withAclTransactionId(Long aclTransactionId) {
|
||||
this.aclTransactionId = aclTransactionId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Integer getNodeId() {
|
||||
public Long getNodeId() {
|
||||
return nodeId;
|
||||
}
|
||||
|
||||
public void setNodeId(Integer nodeId) {
|
||||
public void setNodeId(Long nodeId) {
|
||||
this.nodeId = nodeId;
|
||||
}
|
||||
|
||||
public PurgeRequest withNodeId(Integer nodeId) {
|
||||
public PurgeRequest withNodeId(Long nodeId) {
|
||||
this.nodeId = nodeId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Integer getAclId() {
|
||||
public Long getAclId() {
|
||||
return aclId;
|
||||
}
|
||||
|
||||
public void setAclId(Integer aclId) {
|
||||
public void setAclId(Long aclId) {
|
||||
this.aclId = aclId;
|
||||
}
|
||||
|
||||
public PurgeRequest withAclId(Integer aclId) {
|
||||
public PurgeRequest withAclId(Long aclId) {
|
||||
this.aclId = aclId;
|
||||
return this;
|
||||
}
|
||||
|
@@ -19,16 +19,16 @@ public class ReindexRequest extends JsonFormattedResponseRequest<ReindexRequest>
|
||||
private String core;
|
||||
|
||||
@QueryParam("txid")
|
||||
private Integer transactionId;
|
||||
private Long transactionId;
|
||||
|
||||
@QueryParam("acltxid")
|
||||
private Integer aclTransactionId;
|
||||
private Long aclTransactionId;
|
||||
|
||||
@QueryParam("nodeId")
|
||||
private Integer nodeId;
|
||||
private Long nodeId;
|
||||
|
||||
@QueryParam("aclid")
|
||||
private Integer aclId;
|
||||
private Long aclId;
|
||||
|
||||
@QueryParam("query")
|
||||
private String query;
|
||||
@@ -54,54 +54,54 @@ public class ReindexRequest extends JsonFormattedResponseRequest<ReindexRequest>
|
||||
return this;
|
||||
}
|
||||
|
||||
public Integer getTransactionId() {
|
||||
public Long getTransactionId() {
|
||||
return transactionId;
|
||||
}
|
||||
|
||||
public void setTransactionId(Integer transactionId) {
|
||||
public void setTransactionId(Long transactionId) {
|
||||
this.transactionId = transactionId;
|
||||
}
|
||||
|
||||
public ReindexRequest withTransactionId(Integer transactionId) {
|
||||
public ReindexRequest withTransactionId(Long transactionId) {
|
||||
this.transactionId = transactionId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Integer getAclTransactionId() {
|
||||
public Long getAclTransactionId() {
|
||||
return aclTransactionId;
|
||||
}
|
||||
|
||||
public void setAclTransactionId(Integer aclTransactionId) {
|
||||
public void setAclTransactionId(Long aclTransactionId) {
|
||||
this.aclTransactionId = aclTransactionId;
|
||||
}
|
||||
|
||||
public ReindexRequest withAclTransactionId(Integer aclTransactionId) {
|
||||
public ReindexRequest withAclTransactionId(Long aclTransactionId) {
|
||||
this.aclTransactionId = aclTransactionId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Integer getNodeId() {
|
||||
public Long getNodeId() {
|
||||
return nodeId;
|
||||
}
|
||||
|
||||
public void setNodeId(Integer nodeId) {
|
||||
public void setNodeId(Long nodeId) {
|
||||
this.nodeId = nodeId;
|
||||
}
|
||||
|
||||
public ReindexRequest withNodeId(Integer nodeId) {
|
||||
public ReindexRequest withNodeId(Long nodeId) {
|
||||
this.nodeId = nodeId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Integer getAclId() {
|
||||
public Long getAclId() {
|
||||
return aclId;
|
||||
}
|
||||
|
||||
public void setAclId(Integer aclId) {
|
||||
public void setAclId(Long aclId) {
|
||||
this.aclId = aclId;
|
||||
}
|
||||
|
||||
public ReindexRequest withAclId(Integer aclId) {
|
||||
public ReindexRequest withAclId(Long aclId) {
|
||||
this.aclId = aclId;
|
||||
return this;
|
||||
}
|
||||
|
@@ -1,24 +1,9 @@
|
||||
package com.inteligr8.alfresco.asie.model.core;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonAnyGetter;
|
||||
import com.fasterxml.jackson.annotation.JsonAnySetter;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.inteligr8.solr.model.Metadata;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class Report {
|
||||
|
||||
private Map<String, Map<String, Object>> report;
|
||||
|
||||
@JsonAnyGetter
|
||||
public Map<String, Map<String, Object>> getReport() {
|
||||
return report;
|
||||
}
|
||||
|
||||
@JsonAnySetter
|
||||
protected void setReport(Map<String, Map<String, Object>> report) {
|
||||
this.report = report;
|
||||
}
|
||||
public class Report extends Metadata {
|
||||
|
||||
}
|
||||
|
@@ -25,10 +25,10 @@ public class ReportRequest extends JsonFormattedResponseRequest<ReportRequest> {
|
||||
private Long toTime;
|
||||
|
||||
@QueryParam("fromTx")
|
||||
private Integer fromTransactionId;
|
||||
private Long fromTransactionId;
|
||||
|
||||
@QueryParam("toTx")
|
||||
private Integer toTransactionId;
|
||||
private Long toTransactionId;
|
||||
|
||||
public String getAction() {
|
||||
return action;
|
||||
@@ -77,28 +77,28 @@ public class ReportRequest extends JsonFormattedResponseRequest<ReportRequest> {
|
||||
return this;
|
||||
}
|
||||
|
||||
public Integer getFromTransactionId() {
|
||||
public Long getFromTransactionId() {
|
||||
return fromTransactionId;
|
||||
}
|
||||
|
||||
public void setFromTransactionId(Integer fromTransactionId) {
|
||||
public void setFromTransactionId(Long fromTransactionId) {
|
||||
this.fromTransactionId = fromTransactionId;
|
||||
}
|
||||
|
||||
public ReportRequest fromTransactionId(Integer fromTransactionId) {
|
||||
public ReportRequest fromTransactionId(Long fromTransactionId) {
|
||||
this.fromTransactionId = fromTransactionId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Integer getToTransactionId() {
|
||||
public Long getToTransactionId() {
|
||||
return toTransactionId;
|
||||
}
|
||||
|
||||
public void setToTransactionId(Integer toTransactionId) {
|
||||
public void setToTransactionId(Long toTransactionId) {
|
||||
this.toTransactionId = toTransactionId;
|
||||
}
|
||||
|
||||
public ReportRequest toTransactionId(Integer toTransactionId) {
|
||||
public ReportRequest toTransactionId(Long toTransactionId) {
|
||||
this.toTransactionId = toTransactionId;
|
||||
return this;
|
||||
}
|
||||
|
@@ -2,21 +2,17 @@ package com.inteligr8.alfresco.asie.model.core;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty.Access;
|
||||
import com.inteligr8.alfresco.asie.model.BaseResponse;
|
||||
import com.inteligr8.solr.model.Cores;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class ReportResponse extends BaseResponse {
|
||||
|
||||
@JsonProperty(access = Access.READ_ONLY)
|
||||
private Report report;
|
||||
@JsonProperty(required = true)
|
||||
private Cores<Report> cores;
|
||||
|
||||
public Report getReport() {
|
||||
return report;
|
||||
}
|
||||
|
||||
protected void setReport(Report report) {
|
||||
this.report = report;
|
||||
public Cores<Report> getCores() {
|
||||
return cores;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -0,0 +1,17 @@
|
||||
package com.inteligr8.alfresco.asie.model.core;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.inteligr8.solr.model.Action;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class RetryAction extends Action {
|
||||
|
||||
@JsonProperty(value = "alfresco")
|
||||
private int[] nodeIds;
|
||||
|
||||
public int[] getNodeIds() {
|
||||
return nodeIds;
|
||||
}
|
||||
|
||||
}
|
@@ -1,22 +0,0 @@
|
||||
package com.inteligr8.alfresco.asie.model.core;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty.Access;
|
||||
import com.inteligr8.solr.model.ResponseAction;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class RetryResponseAction extends ResponseAction {
|
||||
|
||||
@JsonProperty(value = "alfresco", access = Access.READ_ONLY)
|
||||
private int[] nodeIds;
|
||||
|
||||
public int[] getNodeIds() {
|
||||
return nodeIds;
|
||||
}
|
||||
|
||||
public void setNodeIds(int[] nodeIds) {
|
||||
this.nodeIds = nodeIds;
|
||||
}
|
||||
|
||||
}
|
@@ -1,24 +1,7 @@
|
||||
package com.inteligr8.alfresco.asie.model.core;
|
||||
|
||||
import java.util.Map;
|
||||
import com.inteligr8.solr.model.Metadata;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonAnyGetter;
|
||||
import com.fasterxml.jackson.annotation.JsonAnySetter;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class Summary {
|
||||
|
||||
private Map<String, Object> summary;
|
||||
|
||||
@JsonAnyGetter
|
||||
public Map<String, Object> getSummary() {
|
||||
return summary;
|
||||
}
|
||||
|
||||
@JsonAnySetter
|
||||
public void setSummary(Map<String, Object> summary) {
|
||||
this.summary = summary;
|
||||
}
|
||||
public class Summary extends Metadata {
|
||||
|
||||
}
|
||||
|
@@ -2,21 +2,17 @@ package com.inteligr8.alfresco.asie.model.core;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty.Access;
|
||||
import com.inteligr8.alfresco.asie.model.BaseResponse;
|
||||
import com.inteligr8.solr.model.Cores;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class SummaryResponse extends BaseResponse {
|
||||
|
||||
@JsonProperty(value = "Summary", access = Access.READ_ONLY)
|
||||
private Summary summary;
|
||||
@JsonProperty(value = "Summary", required = true)
|
||||
private Cores<Summary> cores;
|
||||
|
||||
public Summary getSummary() {
|
||||
return summary;
|
||||
}
|
||||
|
||||
public void setSummary(Summary summary) {
|
||||
this.summary = summary;
|
||||
public Cores<Summary> getCores() {
|
||||
return cores;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -0,0 +1,24 @@
|
||||
package com.inteligr8.alfresco.asie;
|
||||
|
||||
import java.net.URL;
|
||||
|
||||
import com.inteligr8.alfresco.asie.api.CoreAdminApi;
|
||||
import com.inteligr8.rs.ClientJerseyImpl;
|
||||
|
||||
public class AsieClient extends ClientJerseyImpl {
|
||||
|
||||
public AsieClient(String hostname) {
|
||||
super(new AsieClientConfiguration().withHostname(hostname));
|
||||
this.register();
|
||||
}
|
||||
|
||||
public AsieClient(URL baseUrl) {
|
||||
super(new AsieClientConfiguration().withBaseUrl(baseUrl.toString()));
|
||||
this.register();
|
||||
}
|
||||
|
||||
public CoreAdminApi getCoreAdminApi() {
|
||||
return this.getApi(this.getConfig().createAuthorizationFilter(), CoreAdminApi.class);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,54 @@
|
||||
package com.inteligr8.alfresco.asie;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.inteligr8.rs.AuthorizationFilter;
|
||||
import com.inteligr8.rs.ClientJerseyConfiguration;
|
||||
|
||||
import jakarta.ws.rs.client.ClientRequestContext;
|
||||
|
||||
public class AsieClientConfiguration implements ClientJerseyConfiguration {
|
||||
|
||||
private Logger logger = LoggerFactory.getLogger(AsieClientConfiguration.class);
|
||||
private String baseUrl = "http://locahost:8983/solr";
|
||||
private String searchSecret = "alfresco-secret";
|
||||
|
||||
@Override
|
||||
public String getBaseUrl() {
|
||||
return this.baseUrl;
|
||||
}
|
||||
|
||||
public AsieClientConfiguration withBaseUrl(String baseUrl) {
|
||||
this.baseUrl = baseUrl;
|
||||
return this;
|
||||
}
|
||||
|
||||
public AsieClientConfiguration withHostname(String hostname) {
|
||||
this.baseUrl = "http://" + hostname + ":8983/solr";
|
||||
return this;
|
||||
}
|
||||
|
||||
public AsieClientConfiguration withSearchSecret(String searchSecret) {
|
||||
this.searchSecret = searchSecret;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AuthorizationFilter createAuthorizationFilter() {
|
||||
if (this.searchSecret == null)
|
||||
return null;
|
||||
|
||||
return new AuthorizationFilter() {
|
||||
@Override
|
||||
public void filter(ClientRequestContext requestContext) throws IOException {
|
||||
logger.trace("Adding ASIE secret for authorization ...");
|
||||
requestContext.getHeaders().addAll("X-Alfresco-Search-Secret", Arrays.asList(searchSecret));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,27 @@
|
||||
package com.inteligr8.alfresco.asie;
|
||||
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
|
||||
public class AssertionUtil {
|
||||
|
||||
public static <T> T assertNotNull(T obj) {
|
||||
Assertions.assertNotNull(obj);
|
||||
return obj;
|
||||
}
|
||||
|
||||
public static <T> T assertNotNull(T obj, String message) {
|
||||
Assertions.assertNotNull(obj, message);
|
||||
return obj;
|
||||
}
|
||||
|
||||
public static void assertType(Object obj, Class<?> type) {
|
||||
Assertions.assertNotNull(obj);
|
||||
Assertions.assertEquals(type, obj.getClass());
|
||||
}
|
||||
|
||||
public static void assertType(Object obj, Class<?> type, String message) {
|
||||
Assertions.assertNotNull(obj, message);
|
||||
Assertions.assertEquals(type, obj.getClass(), message);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,23 @@
|
||||
package com.inteligr8.alfresco.asie.api;
|
||||
|
||||
import java.net.MalformedURLException;
|
||||
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.inteligr8.alfresco.asie.AsieClient;
|
||||
|
||||
public class AbstractApiUnitTest {
|
||||
|
||||
protected Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
|
||||
protected static AsieClient client;
|
||||
protected static String defaultCore = "alfresco";
|
||||
|
||||
@BeforeAll
|
||||
private static void init() throws MalformedURLException {
|
||||
client = new AsieClient("localhost");
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,34 @@
|
||||
package com.inteligr8.alfresco.asie.api;
|
||||
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import com.inteligr8.alfresco.asie.AssertionUtil;
|
||||
import com.inteligr8.alfresco.asie.model.ActionCoreResponse;
|
||||
import com.inteligr8.alfresco.asie.model.core.ReindexRequest;
|
||||
import com.inteligr8.solr.model.Action;
|
||||
import com.inteligr8.solr.model.Action.Status;
|
||||
import com.inteligr8.solr.model.Cores;
|
||||
import com.inteligr8.solr.model.ResponseHeader;
|
||||
|
||||
public class CoreAdminReindexUnitTest extends AbstractApiUnitTest {
|
||||
|
||||
@Test
|
||||
public void reindex() {
|
||||
CoreAdminApi api = client.getCoreAdminApi();
|
||||
|
||||
ActionCoreResponse<Action> response = api.reindex(
|
||||
new ReindexRequest()
|
||||
.withCore(defaultCore));
|
||||
Assertions.assertNotNull(response);
|
||||
|
||||
ResponseHeader responseHeader = AssertionUtil.assertNotNull(response.getResponseHeader());
|
||||
Assertions.assertEquals(0, responseHeader.getStatus());
|
||||
|
||||
Cores<Action> cores = AssertionUtil.assertNotNull(response.getCores());
|
||||
Action action = AssertionUtil.assertNotNull(cores.getByCore(defaultCore));
|
||||
|
||||
Assertions.assertEquals(Status.Scheduled, action.getStatus());
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,96 @@
|
||||
package com.inteligr8.alfresco.asie.api;
|
||||
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import com.inteligr8.alfresco.asie.AsieClient;
|
||||
import com.inteligr8.alfresco.asie.AssertionUtil;
|
||||
import com.inteligr8.alfresco.asie.model.core.Report;
|
||||
import com.inteligr8.alfresco.asie.model.core.ReportRequest;
|
||||
import com.inteligr8.alfresco.asie.model.core.ReportResponse;
|
||||
import com.inteligr8.alfresco.asie.model.core.Summary;
|
||||
import com.inteligr8.alfresco.asie.model.core.SummaryRequest;
|
||||
import com.inteligr8.alfresco.asie.model.core.SummaryResponse;
|
||||
import com.inteligr8.solr.model.CoreMetadata;
|
||||
import com.inteligr8.solr.model.Cores;
|
||||
import com.inteligr8.solr.model.ResponseHeader;
|
||||
import com.inteligr8.solr.model.core.StatusRequest;
|
||||
import com.inteligr8.solr.model.core.StatusResponse;
|
||||
|
||||
import jakarta.ws.rs.ProcessingException;
|
||||
|
||||
public class CoreAdminStatusUnitTest extends AbstractApiUnitTest {
|
||||
|
||||
@Test
|
||||
public void noHost() {
|
||||
AsieClient client = new AsieClient("doesnotexist");
|
||||
CoreAdminApi api = client.getCoreAdminApi();
|
||||
|
||||
Assertions.assertThrows(ProcessingException.class, () -> {
|
||||
api.getStatus(
|
||||
new StatusRequest()
|
||||
.withCore(defaultCore));
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void summary() {
|
||||
CoreAdminApi api = client.getCoreAdminApi();
|
||||
|
||||
SummaryResponse response = api.getSummary(
|
||||
new SummaryRequest()
|
||||
.withCore(defaultCore));
|
||||
Assertions.assertNotNull(response);
|
||||
|
||||
ResponseHeader responseHeader = AssertionUtil.assertNotNull(response.getResponseHeader());
|
||||
Assertions.assertEquals(0, responseHeader.getStatus());
|
||||
|
||||
Cores<Summary> cores = AssertionUtil.assertNotNull(response.getCores());
|
||||
Summary summary = AssertionUtil.assertNotNull(cores.getByCore(defaultCore));
|
||||
|
||||
AssertionUtil.assertType(summary.getByField("Active"), Boolean.class);
|
||||
AssertionUtil.assertType(summary.getByField("Number of Searchers"), Integer.class);
|
||||
AssertionUtil.assertType(summary.getByField("Last Index TX Commit Time"), Long.class);
|
||||
AssertionUtil.assertType(summary.getByField("Last Index TX Commit Date"), String.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void status() {
|
||||
CoreAdminApi api = client.getCoreAdminApi();
|
||||
|
||||
StatusResponse response = api.getStatus(
|
||||
new StatusRequest()
|
||||
.withCore(defaultCore));
|
||||
Assertions.assertNotNull(response);
|
||||
|
||||
ResponseHeader responseHeader = AssertionUtil.assertNotNull(response.getResponseHeader());
|
||||
Assertions.assertEquals(0, responseHeader.getStatus());
|
||||
|
||||
Cores<CoreMetadata> cores = AssertionUtil.assertNotNull(response.getCores());
|
||||
CoreMetadata core = AssertionUtil.assertNotNull(cores.getByCore(defaultCore));
|
||||
|
||||
Assertions.assertEquals(core, core.getName());
|
||||
Assertions.assertNotNull(core.getStartDateTime());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void report() {
|
||||
CoreAdminApi api = client.getCoreAdminApi();
|
||||
|
||||
ReportResponse response = api.getReport(
|
||||
new ReportRequest()
|
||||
.withCore(defaultCore));
|
||||
Assertions.assertNotNull(response);
|
||||
|
||||
ResponseHeader responseHeader = AssertionUtil.assertNotNull(response.getResponseHeader());
|
||||
Assertions.assertEquals(0, responseHeader.getStatus());
|
||||
|
||||
Cores<Report> cores = AssertionUtil.assertNotNull(response.getCores());
|
||||
Report report = AssertionUtil.assertNotNull(cores.getByCore(defaultCore));
|
||||
|
||||
AssertionUtil.assertType(report.getByField("Index error count"), Integer.class);
|
||||
AssertionUtil.assertType(report.getByField("Last indexed transaction commit time"), Long.class);
|
||||
AssertionUtil.assertType(report.getByField("Last indexed transaction commit date"), String.class);
|
||||
}
|
||||
|
||||
}
|
23
asie-api/src/test/resources/log4j2-test.properties
Normal file
23
asie-api/src/test/resources/log4j2-test.properties
Normal file
@@ -0,0 +1,23 @@
|
||||
rootLogger.level=trace
|
||||
rootLogger.appenderRef.stdout.ref=STDOUT
|
||||
|
||||
logger.inteligr8-rs-request.name=jaxrs.request
|
||||
logger.inteligr8-rs-request.level=trace
|
||||
logger.inteligr8-rs-response.name=jaxrs.response
|
||||
logger.inteligr8-rs-response.level=off
|
||||
|
||||
logger.this.name=com.inteligr8.alfresco.asie
|
||||
logger.this.level=trace
|
||||
|
||||
# hide framework
|
||||
logger.apache-http.name=org.apache.http
|
||||
logger.apache-http.level=debug
|
||||
logger.jersey.name=org.glassfish.jersey
|
||||
logger.jersey.level=trace
|
||||
logger.jersey-client.name=org.glassfish.jersey.client
|
||||
logger.jersey-client.level=trace
|
||||
|
||||
appender.stdout.type=Console
|
||||
appender.stdout.name=STDOUT
|
||||
appender.stdout.layout.type=PatternLayout
|
||||
appender.stdout.layout.pattern=%d{ABSOLUTE_MICROS} %level{length=1} %c{1}: %m%n
|
12
community-module/.gitignore
vendored
Normal file
12
community-module/.gitignore
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# Maven
|
||||
target
|
||||
pom.xml.versionsBackup
|
||||
|
||||
# Eclipse
|
||||
.project
|
||||
.classpath
|
||||
.settings
|
||||
.vscode
|
||||
|
||||
# IDEA
|
||||
/.idea/
|
1
community-module/README.md
Normal file
1
community-module/README.md
Normal file
@@ -0,0 +1 @@
|
||||
# ASIE Platform Module Library
|
BIN
community-module/metadata.keystore
Normal file
BIN
community-module/metadata.keystore
Normal file
Binary file not shown.
106
community-module/pom.xml
Normal file
106
community-module/pom.xml
Normal file
@@ -0,0 +1,106 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>com.inteligr8.alfresco</groupId>
|
||||
<artifactId>asie-platform-module-parent</artifactId>
|
||||
<version>1.3-SNAPSHOT</version>
|
||||
<relativePath>../</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>asie-community-platform-module</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<name>ASIE Platform Module for ACS Community</name>
|
||||
|
||||
<properties>
|
||||
<alfresco.sdk.version>4.9.0</alfresco.sdk.version>
|
||||
<alfresco.platform.version>23.3.0</alfresco.platform.version>
|
||||
<alfresco.platform.war.version>23.3.0.98</alfresco.platform.war.version>
|
||||
<tomcat-rad.version>10-2.1</tomcat-rad.version>
|
||||
|
||||
<beedk.rad.acs-search.enabled>true</beedk.rad.acs-search.enabled>
|
||||
</properties>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>acs-community-packaging</artifactId>
|
||||
<version>${alfresco.platform.version}</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.inteligr8.alfresco</groupId>
|
||||
<artifactId>cachext-platform-module</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.inteligr8.alfresco</groupId>
|
||||
<artifactId>asie-shared</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Needed by this module, but provided by ACS -->
|
||||
<dependency>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-repository</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- Alfresco Modules required to use this module -->
|
||||
<dependency>
|
||||
<groupId>com.inteligr8.alfresco</groupId>
|
||||
<artifactId>cxf-jaxrs-platform-module</artifactId>
|
||||
<version>1.3.1-acs-v23.3</version>
|
||||
<type>amp</type>
|
||||
</dependency>
|
||||
|
||||
<!-- Including for testing purposes only -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>io.repaint.maven</groupId>
|
||||
<artifactId>tiles-maven-plugin</artifactId>
|
||||
<version>2.40</version>
|
||||
<extensions>true</extensions>
|
||||
<configuration>
|
||||
<tiles>
|
||||
<!-- Documentation: https://bitbucket.org/inteligr8/ootbee-beedk/src/stable/beedk-acs-search-rad-tile -->
|
||||
<tile>com.inteligr8.ootbee:beedk-acs-search-rad-tile:[1.1.6,2.0.0)</tile>
|
||||
<!-- Documentation: https://bitbucket.org/inteligr8/ootbee-beedk/src/stable/beedk-acs-platform-self-rad-tile -->
|
||||
<tile>com.inteligr8.ootbee:beedk-acs-platform-self-rad-tile:[1.1.6,2.0.0)</tile>
|
||||
<!-- Documentation: https://bitbucket.org/inteligr8/ootbee-beedk/src/stable/beedk-acs-platform-module-tile -->
|
||||
<tile>com.inteligr8.ootbee:beedk-acs-platform-module-tile:[1.1.6,2.0.0)</tile>
|
||||
</tiles>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>alfresco-public</id>
|
||||
<url>https://artifacts.alfresco.com/nexus/content/groups/public</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
</project>
|
74
community-module/rad.ps1
Normal file
74
community-module/rad.ps1
Normal file
@@ -0,0 +1,74 @@
|
||||
|
||||
function discoverArtifactId {
|
||||
$script:ARTIFACT_ID=(mvn -q -Dexpression=project"."artifactId -DforceStdout help:evaluate)
|
||||
}
|
||||
|
||||
function rebuild {
|
||||
echo "Rebuilding project ..."
|
||||
mvn process-classes
|
||||
}
|
||||
|
||||
function start_ {
|
||||
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
|
||||
mvn -Drad process-classes
|
||||
}
|
||||
|
||||
function start_log {
|
||||
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
|
||||
mvn -Drad "-Ddocker.showLogs" process-classes
|
||||
}
|
||||
|
||||
function stop_ {
|
||||
discoverArtifactId
|
||||
echo "Stopping Docker containers that supported rapid application development ..."
|
||||
docker container ls --filter name=${ARTIFACT_ID}-*
|
||||
echo "Stopping containers ..."
|
||||
docker container stop (docker container ls -q --filter name=${ARTIFACT_ID}-*)
|
||||
echo "Removing containers ..."
|
||||
docker container rm (docker container ls -aq --filter name=${ARTIFACT_ID}-*)
|
||||
}
|
||||
|
||||
function tail_logs {
|
||||
param (
|
||||
$container
|
||||
)
|
||||
|
||||
discoverArtifactId
|
||||
docker container logs -f (docker container ls -q --filter name=${ARTIFACT_ID}-${container})
|
||||
}
|
||||
|
||||
function list {
|
||||
discoverArtifactId
|
||||
docker container ls --filter name=${ARTIFACT_ID}-*
|
||||
}
|
||||
|
||||
switch ($args[0]) {
|
||||
"start" {
|
||||
start_
|
||||
}
|
||||
"start_log" {
|
||||
start_log
|
||||
}
|
||||
"stop" {
|
||||
stop_
|
||||
}
|
||||
"restart" {
|
||||
stop_
|
||||
start_
|
||||
}
|
||||
"rebuild" {
|
||||
rebuild
|
||||
}
|
||||
"tail" {
|
||||
tail_logs $args[1]
|
||||
}
|
||||
"containers" {
|
||||
list
|
||||
}
|
||||
default {
|
||||
echo "Usage: .\rad.ps1 [ start | start_log | stop | restart | rebuild | tail {container} | containers ]"
|
||||
}
|
||||
}
|
||||
|
||||
echo "Completed!"
|
||||
|
71
community-module/rad.sh
Normal file
71
community-module/rad.sh
Normal file
@@ -0,0 +1,71 @@
|
||||
#!/bin/sh
|
||||
|
||||
discoverArtifactId() {
|
||||
ARTIFACT_ID=`mvn -q -Dexpression=project.artifactId -DforceStdout help:evaluate | sed 's/\x1B\[[0-9;]\{1,\}[A-Za-z]//g'`
|
||||
}
|
||||
|
||||
rebuild() {
|
||||
echo "Rebuilding project ..."
|
||||
mvn process-test-classes
|
||||
}
|
||||
|
||||
start() {
|
||||
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
|
||||
mvn -Drad process-test-classes
|
||||
}
|
||||
|
||||
start_log() {
|
||||
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
|
||||
mvn -Drad -Ddocker.showLogs process-test-classes
|
||||
}
|
||||
|
||||
stop() {
|
||||
discoverArtifactId
|
||||
echo "Stopping Docker containers that supported rapid application development ..."
|
||||
docker container ls --filter name=${ARTIFACT_ID}-*
|
||||
echo "Stopping containers ..."
|
||||
docker container stop `docker container ls -q --filter name=${ARTIFACT_ID}-*`
|
||||
echo "Removing containers ..."
|
||||
docker container rm `docker container ls -aq --filter name=${ARTIFACT_ID}-*`
|
||||
}
|
||||
|
||||
tail_logs() {
|
||||
discoverArtifactId
|
||||
docker container logs -f `docker container ls -q --filter name=${ARTIFACT_ID}-$1`
|
||||
}
|
||||
|
||||
list() {
|
||||
discoverArtifactId
|
||||
docker container ls --filter name=${ARTIFACT_ID}-*
|
||||
}
|
||||
|
||||
case "$1" in
|
||||
start)
|
||||
start
|
||||
;;
|
||||
start_log)
|
||||
start_log
|
||||
;;
|
||||
stop)
|
||||
stop
|
||||
;;
|
||||
restart)
|
||||
stop
|
||||
start
|
||||
;;
|
||||
rebuild)
|
||||
rebuild
|
||||
;;
|
||||
tail)
|
||||
tail_logs $2
|
||||
;;
|
||||
containers)
|
||||
list
|
||||
;;
|
||||
*)
|
||||
echo "Usage: ./rad.sh [ start | start_log | stop | restart | rebuild | tail {container} | containers ]"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
echo "Completed!"
|
||||
|
@@ -0,0 +1,23 @@
|
||||
package com.inteligr8.alfresco.asie;
|
||||
|
||||
public interface CommunityConstants extends Constants {
|
||||
|
||||
static final String BEAN_SHARDSETS_CACHE = "asieShardsetsCache";
|
||||
static final String BEAN_NODES_CACHE = "asieNodesCache";
|
||||
static final String BEAN_SHARD_NODES_CACHE = "asieShardNodesCache";
|
||||
static final String BEAN_SHARDINST_STATE_CACHE = "asieShardInstanceStateCache";
|
||||
static final String BEAN_NODE_DISABLE_CACHE = "asieNodeDisabledCache";
|
||||
static final String BEAN_NODE_UNAVAIL_CACHE = "asieNodeUnavailableCache";
|
||||
static final String BEAN_SHARDINST_DISABLE_CACHE = "asieShardInstanceDisabledCache";
|
||||
static final String BEAN_SHARDINST_UNAVAIL_CACHE = "asieShardInstanceUnavailableCache";
|
||||
static final String BEAN_CORE_EXPLICIT_CACHE = "asieCoreExplicitCache";
|
||||
|
||||
static final String ATTR_ASIE_SHARDSET = "inteligr8.asie.shardSet";
|
||||
static final String ATTR_ASIE_NODE = "inteligr8.asie.node";
|
||||
static final String ATTR_ASIE_SHARD_NODES = "inteligr8.asie.shard.nodes";
|
||||
static final String ATTR_ASIE_SHARD_NODE = "inteligr8.asie.shard.node";
|
||||
static final String ATTR_OBJECT = "object";
|
||||
static final String ATTR_DISABLE = "disabled";
|
||||
static final String ATTR_NODES = "nodes";
|
||||
|
||||
}
|
@@ -0,0 +1,50 @@
|
||||
package com.inteligr8.alfresco.asie.compute;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.repo.search.impl.parsers.CMISLexer;
|
||||
import org.alfresco.repo.search.impl.parsers.CMISParser;
|
||||
import org.alfresco.service.cmr.search.SearchParameters.Operator;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.search.SearchService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.antlr.runtime.ANTLRStringStream;
|
||||
import org.antlr.runtime.CharStream;
|
||||
import org.antlr.runtime.CommonTokenStream;
|
||||
import org.antlr.runtime.RecognitionException;
|
||||
import org.antlr.runtime.tree.CommonTree;
|
||||
import org.antlr.runtime.tree.Tree;
|
||||
import org.apache.commons.collections4.SetUtils;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@Component
|
||||
public class CmisQueryInspector implements QueryInspector {
|
||||
|
||||
private Set<String> supportedLanguages = SetUtils.unmodifiableSet(
|
||||
SearchService.LANGUAGE_CMIS_ALFRESCO,
|
||||
SearchService.LANGUAGE_CMIS_STRICT,
|
||||
SearchService.LANGUAGE_INDEX_CMIS,
|
||||
SearchService.LANGUAGE_SOLR_CMIS);
|
||||
|
||||
@Override
|
||||
public Set<String> getSupportedLanguages() {
|
||||
return this.supportedLanguages;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<QueryValue> findRequiredPropertyValues(String query, Operator defaultOperator, QName property, DataTypeDefinition dataTypeDef) throws RecognitionException {
|
||||
Tree tree = this.parseCmis(query, defaultOperator);
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
protected Tree parseCmis(String cmisQuery, Operator defaultOperator) throws RecognitionException {
|
||||
CharStream cs = new ANTLRStringStream(cmisQuery);
|
||||
CMISLexer lexer = new CMISLexer(cs);
|
||||
CommonTokenStream tokens = new CommonTokenStream(lexer);
|
||||
CMISParser parser = new CMISParser(tokens);
|
||||
CommonTree tree = (CommonTree) parser.query().getTree();
|
||||
return tree;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,290 @@
|
||||
package com.inteligr8.alfresco.asie.compute;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.Period;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.repo.search.impl.parsers.FTSLexer;
|
||||
import org.alfresco.repo.search.impl.parsers.FTSParser;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.repository.AssociationRef;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.search.SearchParameters.Operator;
|
||||
import org.alfresco.service.cmr.search.SearchService;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.antlr.runtime.ANTLRStringStream;
|
||||
import org.antlr.runtime.CharStream;
|
||||
import org.antlr.runtime.CommonTokenStream;
|
||||
import org.antlr.runtime.RecognitionException;
|
||||
import org.antlr.runtime.tree.CommonTree;
|
||||
import org.antlr.runtime.tree.Tree;
|
||||
import org.apache.commons.collections4.SetUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@Component
|
||||
public class FtsQueryInspector implements QueryInspector {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(FtsQueryInspector.class);
|
||||
|
||||
private final Set<String> supportedLanguages = SetUtils.unmodifiableSet(
|
||||
SearchService.LANGUAGE_FTS_ALFRESCO,
|
||||
SearchService.LANGUAGE_INDEX_FTS_ALFRESCO,
|
||||
SearchService.LANGUAGE_SOLR_FTS_ALFRESCO,
|
||||
SearchService.LANGUAGE_LUCENE);
|
||||
|
||||
@Autowired
|
||||
private NamespaceService namespaceService;
|
||||
|
||||
@Override
|
||||
public Set<String> getSupportedLanguages() {
|
||||
return this.supportedLanguages;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<QueryValue> findRequiredPropertyValues(String ftsQuery, Operator defaultOperator, QName property, DataTypeDefinition dataTypeDef) throws RecognitionException {
|
||||
Tree tree = this.parseFts(ftsQuery, defaultOperator);
|
||||
tree = this.bypassSingleTermDisjunctions(tree);
|
||||
if (tree == null)
|
||||
return null;
|
||||
|
||||
Collection<Tree> trees = this.extractRequiredTerms(tree);
|
||||
this.logger.trace("Found {} required terms in query: {}", trees.size(), ftsQuery);
|
||||
this.filterPropertyTerms(trees, property);
|
||||
this.logger.trace("Found {} required terms for property {} in query: {}", trees.size(), property, ftsQuery);
|
||||
this.filterOutFuzzyTerms(trees);
|
||||
this.logger.trace("Found {} required definitive terms for property {} in query: {}", trees.size(), property, ftsQuery);
|
||||
|
||||
List<QueryValue> values = new ArrayList<>(trees.size());
|
||||
for (Tree t : trees)
|
||||
values.add(this.extractValue(t, dataTypeDef));
|
||||
return values;
|
||||
}
|
||||
|
||||
protected Tree parseFts(String ftsQuery, Operator defaultOperator) throws RecognitionException {
|
||||
CharStream cs = new ANTLRStringStream(ftsQuery);
|
||||
FTSLexer lexer = new FTSLexer(cs);
|
||||
CommonTokenStream tokens = new CommonTokenStream(lexer);
|
||||
FTSParser parser = new FTSParser(tokens);
|
||||
parser.setDefaultFieldConjunction(defaultOperator.equals(Operator.AND));
|
||||
parser.setMode(defaultOperator.equals(Operator.AND) ? FTSParser.Mode.DEFAULT_CONJUNCTION : FTSParser.Mode.DEFAULT_DISJUNCTION);
|
||||
CommonTree tree = (CommonTree) parser.ftsQuery().getTree();
|
||||
return tree;
|
||||
}
|
||||
|
||||
protected Tree bypassSingleTermDisjunctions(Tree tree) {
|
||||
while ("DISJUNCTION".equals(tree.getText()) && tree.getChildCount() == 1)
|
||||
tree = tree.getChild(0);
|
||||
if ("DISJUNCTION".equals(tree.getText()))
|
||||
return null;
|
||||
return tree;
|
||||
}
|
||||
|
||||
protected Collection<Tree> extractRequiredTerms(Tree tree) {
|
||||
while ("DISJUNCTION".equals(tree.getText()) && tree.getChildCount() == 1)
|
||||
tree = tree.getChild(0);
|
||||
|
||||
List<Tree> terms = new LinkedList<>();
|
||||
|
||||
switch (tree.getText()) {
|
||||
case "DISJUNCTION":
|
||||
break;
|
||||
case "CONJUNCTION":
|
||||
for (int c = 0; c < tree.getChildCount(); c++) {
|
||||
Collection<Tree> subtrees = this.extractRequiredTerms(tree.getChild(c));
|
||||
if (subtrees == null || subtrees.isEmpty())
|
||||
continue;
|
||||
terms.addAll(subtrees);
|
||||
}
|
||||
break;
|
||||
case "DEFAULT":
|
||||
terms.add(tree);
|
||||
break;
|
||||
default:
|
||||
this.logger.warn("Unexpected/unsupported tree: {}", tree.getText());
|
||||
}
|
||||
|
||||
return terms;
|
||||
}
|
||||
|
||||
protected Collection<Tree> filterPropertyTerms(Collection<Tree> trees, QName property) {
|
||||
if (trees.isEmpty())
|
||||
return trees;
|
||||
|
||||
Set<String> prefixes = new HashSet<>(this.namespaceService.getPrefixes(property.getNamespaceURI()));
|
||||
if (prefixes.isEmpty()) {
|
||||
this.logger.warn("Unexpected/unsupported namespace: {}", property.getNamespaceURI());
|
||||
trees.clear();
|
||||
return trees;
|
||||
}
|
||||
|
||||
Iterator<Tree> i = trees.iterator();
|
||||
|
||||
while (i.hasNext()) {
|
||||
Tree tree = i.next();
|
||||
|
||||
if ("DEFAULT".equals(tree.getText()))
|
||||
tree = tree.getChild(0);
|
||||
|
||||
int skip = -1;
|
||||
switch (tree.getText()) {
|
||||
case "TERM":
|
||||
case "PHRASE":
|
||||
case "EXACT_TERM":
|
||||
case "EXACT_PHRASE":
|
||||
skip = 1; // skip the value child
|
||||
break;
|
||||
case "RANGE":
|
||||
skip = 4; // skip the inclusive, start, end, inclusive children
|
||||
break;
|
||||
default:
|
||||
}
|
||||
|
||||
if (skip >= 0) {
|
||||
Tree fieldRef = tree.getChild(skip);
|
||||
if (!"FIELD_REF".equals(fieldRef.getText())) {
|
||||
this.logger.warn("Unexpected/unsupported tree: {}", tree.getText());
|
||||
} else if (!fieldRef.getChild(0).getText().equals(property.getLocalName())) {
|
||||
this.logger.trace("Found but ignoring property: {}", fieldRef.getChild(0).getText());
|
||||
} else {
|
||||
Tree prefix = fieldRef.getChild(1);
|
||||
if (!"PREFIX".equals(prefix.getText())) {
|
||||
this.logger.warn("Unexpected/unsupported tree: {}", tree.getText());
|
||||
} else if (!prefixes.contains(prefix.getChild(0).getText())) {
|
||||
this.logger.trace("Found but ignoring property: {}:{}", prefix.getChild(0).getText(), property.getLocalName());
|
||||
} else {
|
||||
// this will skip the remove()
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
i.remove();
|
||||
}
|
||||
|
||||
return trees;
|
||||
}
|
||||
|
||||
protected Collection<Tree> filterOutFuzzyTerms(Collection<Tree> trees) {
|
||||
if (trees.isEmpty())
|
||||
return trees;
|
||||
|
||||
Iterator<Tree> i = trees.iterator();
|
||||
|
||||
while (i.hasNext()) {
|
||||
Tree tree = i.next();
|
||||
|
||||
if ("DEFAULT".equals(tree.getText()))
|
||||
tree = tree.getChild(0);
|
||||
|
||||
switch (tree.getText()) {
|
||||
case "EXACT_TERM":
|
||||
case "EXACT_PHRASE":
|
||||
case "RANGE":
|
||||
break;
|
||||
default:
|
||||
i.remove();
|
||||
}
|
||||
}
|
||||
|
||||
return trees;
|
||||
}
|
||||
|
||||
protected QueryValue extractValue(Tree tree, DataTypeDefinition dataTypeDef) {
|
||||
if ("DEFAULT".equals(tree.getText()))
|
||||
tree = tree.getChild(0);
|
||||
|
||||
switch (tree.getText()) {
|
||||
case "RANGE":
|
||||
return this.extractRangeValue(tree, dataTypeDef);
|
||||
default:
|
||||
}
|
||||
|
||||
String value = this.unquote(tree.getChild(0).getText());
|
||||
|
||||
switch (dataTypeDef.getName().getLocalName()) {
|
||||
case "boolean":
|
||||
return new QuerySingleValue<Boolean>(Boolean.parseBoolean(value));
|
||||
case "double":
|
||||
return new QuerySingleValue<Double>(Double.parseDouble(value));
|
||||
case "float":
|
||||
return new QuerySingleValue<Float>(Float.parseFloat(value));
|
||||
case "int":
|
||||
return new QuerySingleValue<Integer>(Integer.parseInt(value));
|
||||
case "long":
|
||||
return new QuerySingleValue<Long>(Long.parseLong(value));
|
||||
case "date":
|
||||
return new QuerySingleValue<LocalDate>(this.evaluateAsDate(value));
|
||||
case "datetime":
|
||||
return new QuerySingleValue<LocalDateTime>(this.evaluateAsDateTime(value));
|
||||
case "period":
|
||||
return new QuerySingleValue<Period>(Period.parse(value));
|
||||
case "qname":
|
||||
return new QuerySingleValue<QName>(QName.createQName(value, this.namespaceService));
|
||||
case "noderef":
|
||||
return new QuerySingleValue<NodeRef>(new NodeRef(value));
|
||||
case "childassocref":
|
||||
return new QuerySingleValue<ChildAssociationRef>(new ChildAssociationRef(value));
|
||||
case "assocref":
|
||||
return new QuerySingleValue<AssociationRef>(new AssociationRef(value));
|
||||
case "locale":
|
||||
return new QuerySingleValue<Locale>(new Locale(value));
|
||||
default:
|
||||
return new QuerySingleValue<String>(value);
|
||||
}
|
||||
}
|
||||
|
||||
protected QueryRangeValue<?> extractRangeValue(Tree tree, DataTypeDefinition dataTypeDef) {
|
||||
boolean includeStart = "INCLUSIVE".equals(tree.getChild(0).getText());
|
||||
String start = this.unquote(tree.getChild(1).getText());
|
||||
String end = this.unquote(tree.getChild(2).getText());
|
||||
boolean includeEnd = "INCLUSIVE".equals(tree.getChild(3).getText());
|
||||
|
||||
switch (dataTypeDef.getName().getLocalName()) {
|
||||
case "double":
|
||||
return new QueryRangeValue<Double>(includeStart, Double.parseDouble(start), includeEnd, Double.parseDouble(end));
|
||||
case "float":
|
||||
return new QueryRangeValue<Float>(includeStart, Float.parseFloat(start), includeEnd, Float.parseFloat(end));
|
||||
case "int":
|
||||
return new QueryRangeValue<Integer>(includeStart, Integer.parseInt(start), includeEnd, Integer.parseInt(end));
|
||||
case "long":
|
||||
return new QueryRangeValue<Long>(includeStart, Long.parseLong(start), includeEnd, Long.parseLong(end));
|
||||
case "date":
|
||||
return new QueryRangeValue<LocalDate>(includeStart, this.evaluateAsDate(start), includeEnd, this.evaluateAsDate(end));
|
||||
case "datetime":
|
||||
return new QueryRangeValue<LocalDateTime>(includeStart, this.evaluateAsDateTime(start), includeEnd, this.evaluateAsDateTime(end));
|
||||
default:
|
||||
throw new UnsupportedOperationException("The data type does not make sense for range evaluation: " + dataTypeDef.getName());
|
||||
}
|
||||
}
|
||||
|
||||
protected LocalDate evaluateAsDate(String str) {
|
||||
if ("now".equalsIgnoreCase(str)) return LocalDate.now();
|
||||
else return LocalDate.parse(str);
|
||||
}
|
||||
|
||||
protected LocalDateTime evaluateAsDateTime(String str) {
|
||||
if ("now".equalsIgnoreCase(str)) return LocalDateTime.now();
|
||||
else return LocalDateTime.parse(str);
|
||||
}
|
||||
|
||||
protected String unquote(String str) {
|
||||
if (str.length() < 2) return str;
|
||||
else if (str.charAt(0) == '\'' && str.charAt(str.length()-1) == '\'') return str.substring(1, str.length()-1);
|
||||
else if (str.charAt(0) == '\"' && str.charAt(str.length()-1) == '\"') return str.substring(1, str.length()-1);
|
||||
else return str;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,74 @@
|
||||
package com.inteligr8.alfresco.asie.compute;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.search.SearchParameters.Operator;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.antlr.runtime.RecognitionException;
|
||||
|
||||
public interface QueryInspector {
|
||||
|
||||
Set<String> getSupportedLanguages();
|
||||
|
||||
List<QueryValue> findRequiredPropertyValues(String query, Operator defaultOperator, QName property, DataTypeDefinition dataTypeDef) throws RecognitionException;
|
||||
|
||||
|
||||
|
||||
public interface QueryValue {
|
||||
|
||||
}
|
||||
|
||||
public class QuerySingleValue<T> implements QueryValue {
|
||||
|
||||
private T value;
|
||||
|
||||
public QuerySingleValue(T value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public T getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return this.value.toString();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public class QueryRangeValue<T> implements QueryValue {
|
||||
|
||||
private boolean includeStart;
|
||||
private T start;
|
||||
private boolean includeEnd;
|
||||
private T end;
|
||||
|
||||
public QueryRangeValue(boolean includeStart, T start, boolean includeEnd, T end) {
|
||||
this.includeStart = includeStart;
|
||||
this.start = start;
|
||||
this.includeEnd = includeEnd;
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public boolean isIncludeStart() {
|
||||
return includeStart;
|
||||
}
|
||||
|
||||
public boolean isIncludeEnd() {
|
||||
return includeEnd;
|
||||
}
|
||||
|
||||
public T getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public T getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,32 @@
|
||||
package com.inteligr8.alfresco.asie.compute;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.service.cmr.search.SearchParameters;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@Component
|
||||
public class QueryInspectorFactory implements InitializingBean {
|
||||
|
||||
@Autowired
|
||||
private List<QueryInspector> inspectors;
|
||||
|
||||
private Map<String, QueryInspector> languageInspectorMap = new HashMap<>();
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception {
|
||||
for (QueryInspector inspector : this.inspectors) {
|
||||
for (String language : inspector.getSupportedLanguages())
|
||||
this.languageInspectorMap.put(language, inspector);
|
||||
}
|
||||
}
|
||||
|
||||
public QueryInspector selectQueryInspector(SearchParameters searchParams) {
|
||||
return this.languageInspectorMap.get(searchParams.getLanguage());
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,28 @@
|
||||
package com.inteligr8.alfresco.asie.provider;
|
||||
|
||||
import org.alfresco.repo.index.shard.ShardRegistry;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Scope;
|
||||
|
||||
import com.inteligr8.alfresco.asie.Constants;
|
||||
|
||||
@Configuration
|
||||
public class ShardRegistryProvider extends AbstractProvider<ShardRegistry> {
|
||||
|
||||
/**
|
||||
* This allows for the selection of the primary or first ShardRegistry
|
||||
* registered in the Spring BeanFactory.
|
||||
*
|
||||
* @return A ShardRegistry.
|
||||
*/
|
||||
@Bean(Constants.BEAN_SHARD_REGISTRY)
|
||||
@Qualifier(Constants.QUALIFIER_ASIE)
|
||||
@Scope(ConfigurableBeanFactory.SCOPE_SINGLETON)
|
||||
public ShardRegistry selectBean() {
|
||||
return this.getPrimary(ShardRegistry.class);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,240 @@
|
||||
package com.inteligr8.alfresco.asie.service;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.repo.cache.SimpleCache;
|
||||
import org.alfresco.repo.index.shard.ShardMethodEnum;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.alfresco.util.collections.CollectionUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.CommunityConstants;
|
||||
import com.inteligr8.alfresco.asie.model.Shard;
|
||||
import com.inteligr8.alfresco.asie.model.ShardInstance;
|
||||
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
|
||||
import com.inteligr8.alfresco.asie.model.ShardSet;
|
||||
import com.inteligr8.alfresco.asie.model.SolrHost;
|
||||
import com.inteligr8.alfresco.cachext.CollectionCache;
|
||||
import com.inteligr8.alfresco.cachext.MultiValueCache;
|
||||
|
||||
@Component
|
||||
public class ShardDiscoveryService implements com.inteligr8.alfresco.asie.spi.ShardDiscoveryService {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_SHARDSETS_CACHE)
|
||||
private SimpleCache<String, ShardSet> shardsetsCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_NODES_CACHE)
|
||||
private SimpleCache<String, SolrHost> nodesCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_SHARD_NODES_CACHE)
|
||||
private MultiValueCache<Shard, SolrHost> shardNodesCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_SHARDINST_STATE_CACHE)
|
||||
private SimpleCache<ShardInstance, ShardInstanceState> shardInstanceStatesCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_NODE_UNAVAIL_CACHE)
|
||||
private CollectionCache<SolrHost, HashSet<SolrHost>> nodeUnavailableCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_NODE_DISABLE_CACHE)
|
||||
private CollectionCache<SolrHost, HashSet<SolrHost>> nodeDisableCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_SHARDINST_UNAVAIL_CACHE)
|
||||
private CollectionCache<ShardInstance, HashSet<ShardInstance>> shardInstanceUnavailableCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_SHARDINST_DISABLE_CACHE)
|
||||
private CollectionCache<ShardInstance, HashSet<ShardInstance>> shardInstanceDisableCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_CORE_EXPLICIT_CACHE)
|
||||
private SimpleCache<String, QName> coreExplicitIdCache;
|
||||
|
||||
@Override
|
||||
public ShardSet findSetByCore(String core) {
|
||||
return this.shardsetsCache.get(core);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SolrHost findNode(String nodeHostname, int nodePort) {
|
||||
Map<String, InetAddress> resolvedAddresses = new HashMap<>();
|
||||
|
||||
for (String nodeSpec : this.nodesCache.getKeys()) {
|
||||
SolrHost node = this.nodesCache.get(nodeSpec);
|
||||
|
||||
if (!nodeHostname.equalsIgnoreCase(node.getHostname())) {
|
||||
if (!resolvedAddresses.containsKey(nodeHostname))
|
||||
resolvedAddresses.put(nodeHostname, this.resolve(nodeHostname));
|
||||
InetAddress nodeAddress = resolvedAddresses.get(nodeHostname);
|
||||
this.logger.trace("Resolved: {} => {}", nodeHostname, nodeAddress);
|
||||
if (nodeAddress == null)
|
||||
continue;
|
||||
|
||||
if (!resolvedAddresses.containsKey(node.getHostname()))
|
||||
resolvedAddresses.put(node.getHostname(), this.resolve(node.getHostname()));
|
||||
InetAddress shardInstanceAddress = resolvedAddresses.get(node.getHostname());
|
||||
this.logger.trace("Resolved: {} => {}", node.getHostname(), shardInstanceAddress);
|
||||
if (!nodeAddress.equals(shardInstanceAddress))
|
||||
continue;
|
||||
}
|
||||
|
||||
if (nodePort == node.getPort()) {
|
||||
this.logger.debug("Found node: {}", node);
|
||||
return node;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private InetAddress resolve(String hostname) {
|
||||
try {
|
||||
return InetAddress.getByName(hostname);
|
||||
} catch (UnknownHostException uhe) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<ShardSet, Map<Integer, ShardInstanceState>> findByNode(SolrHost node) {
|
||||
Map<ShardSet, Map<Integer, ShardInstanceState>> response = new HashMap<>();
|
||||
|
||||
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||
ShardSet shardSet = this.shardsetsCache.get(shard.extractShardSetCore());
|
||||
|
||||
if (this.shardNodesCache.contains(shard, node)) {
|
||||
ShardInstance shardNode = ShardInstance.from(shard, node);
|
||||
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
|
||||
|
||||
Map<Integer, ShardInstanceState> shards = response.get(shardSet);
|
||||
if (shards == null)
|
||||
response.put(shardSet, shards = new HashMap<>());
|
||||
shards.put(shard.extractShardId(), state);
|
||||
}
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<ShardSet> findSetsByShardMethod(ShardMethodEnum... shardMethods) {
|
||||
Set<ShardSet> shardSets = new HashSet<>();
|
||||
|
||||
Set<ShardMethodEnum> methods = CollectionUtils.asSet(shardMethods);
|
||||
for (String core : this.shardsetsCache.getKeys()) {
|
||||
ShardSet shardSet = this.shardsetsCache.get(core);
|
||||
if (methods.contains(shardSet.getMethod()))
|
||||
shardSets.add(shardSet);
|
||||
}
|
||||
|
||||
return shardSets;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<SolrHost> findNodes(ShardSet shardSet) {
|
||||
Set<SolrHost> nodes = new HashSet<>();
|
||||
|
||||
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||
if (shardSet.getCore().equals(shard.extractShardSetCore()))
|
||||
nodes.addAll(this.shardNodesCache.get(shard));
|
||||
}
|
||||
|
||||
return nodes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<SolrHost> findNodesByShard(ShardSet shardSet, int shardId) {
|
||||
Set<SolrHost> nodes = new HashSet<>();
|
||||
|
||||
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||
if (shardSet.getCore().equals(shard.extractShardSetCore()) && shardId == shard.extractShardId())
|
||||
nodes.addAll(this.shardNodesCache.get(shard));
|
||||
}
|
||||
|
||||
return nodes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<Integer, Pair<SolrHost, ShardInstanceState>> findLatestNodeStates(ShardSet shardSet) {
|
||||
Map<Integer, Pair<SolrHost, ShardInstanceState>> response = new HashMap<>();
|
||||
|
||||
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||
if (!shardSet.getCore().equals(shard.extractShardSetCore()))
|
||||
continue;
|
||||
|
||||
SolrHost latestNode = null;
|
||||
ShardInstanceState latestState = null;
|
||||
|
||||
for (SolrHost node : this.shardNodesCache.get(shard)) {
|
||||
ShardInstance shardNode = ShardInstance.from(shard, node);
|
||||
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
|
||||
if (latestState == null || state.compareTo(latestState) < 0) {
|
||||
latestState = state;
|
||||
latestNode = node;
|
||||
}
|
||||
}
|
||||
|
||||
if (latestNode != null)
|
||||
response.put(shard.extractShardId(), new Pair<>(latestNode, latestState));
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Pair<SolrHost, ShardInstanceState>> findNodeStatesByShard(ShardSet shardSet, int shardId) {
|
||||
List<Pair<SolrHost, ShardInstanceState>> response = new LinkedList<>();
|
||||
|
||||
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||
if (!shardSet.getCore().equals(shard.extractShardSetCore()))
|
||||
continue;
|
||||
|
||||
for (SolrHost node : this.shardNodesCache.get(shard)) {
|
||||
ShardInstance shardNode = ShardInstance.from(shard, node);
|
||||
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
|
||||
response.add(new Pair<>(node, state));
|
||||
}
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Integer> findIdsByNode(ShardSet shardSet, SolrHost node) {
|
||||
Set<Integer> shardIds = new HashSet<>();
|
||||
|
||||
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||
if (shardSet.getCore().equals(shard.extractShardSetCore()) && this.shardNodesCache.contains(shard, node))
|
||||
shardIds.add(shard.extractShardId());
|
||||
}
|
||||
|
||||
return shardIds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<Integer, ShardInstanceState> findStatesByNode(ShardSet shardSet, SolrHost node) {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,25 @@
|
||||
package com.inteligr8.alfresco.asie.service;
|
||||
|
||||
import org.alfresco.service.cmr.attributes.AttributeService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.Constants;
|
||||
|
||||
@Component
|
||||
public class ShardStateService implements com.inteligr8.alfresco.asie.spi.ShardStateService {
|
||||
|
||||
@Autowired
|
||||
@Qualifier(Constants.QUALIFIER_ASIE)
|
||||
private AttributeService attrService;
|
||||
|
||||
@Autowired
|
||||
private SolrShardRegistry shardRegistry;
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
this.shardRegistry.purge();
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,628 @@
|
||||
package com.inteligr8.alfresco.asie.service;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.OptionalInt;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.repo.cache.SimpleCache;
|
||||
import org.alfresco.repo.index.shard.Floc;
|
||||
import org.alfresco.repo.index.shard.ShardState;
|
||||
import org.alfresco.repo.lock.JobLockService;
|
||||
import org.alfresco.service.cmr.attributes.AttributeService;
|
||||
import org.alfresco.service.cmr.attributes.AttributeService.AttributeQueryCallback;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.search.SearchParameters;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.antlr.runtime.RecognitionException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
import org.springframework.extensions.surf.util.AbstractLifecycleBean;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.CommunityConstants;
|
||||
import com.inteligr8.alfresco.asie.Constants;
|
||||
import com.inteligr8.alfresco.asie.compute.QueryInspector;
|
||||
import com.inteligr8.alfresco.asie.compute.QueryInspector.QueryRangeValue;
|
||||
import com.inteligr8.alfresco.asie.compute.QueryInspector.QuerySingleValue;
|
||||
import com.inteligr8.alfresco.asie.compute.QueryInspector.QueryValue;
|
||||
import com.inteligr8.alfresco.asie.compute.QueryInspectorFactory;
|
||||
import com.inteligr8.alfresco.asie.model.Shard;
|
||||
import com.inteligr8.alfresco.asie.model.ShardInstance;
|
||||
import com.inteligr8.alfresco.asie.model.ShardInstanceState;
|
||||
import com.inteligr8.alfresco.asie.model.ShardSet;
|
||||
import com.inteligr8.alfresco.asie.model.SolrHost;
|
||||
import com.inteligr8.alfresco.asie.spi.ShardRegistry;
|
||||
import com.inteligr8.alfresco.cachext.CollectionCache;
|
||||
import com.inteligr8.alfresco.cachext.MultiValueCache;
|
||||
|
||||
@Component
|
||||
@Primary
|
||||
public class SolrShardRegistry extends AbstractLifecycleBean implements ShardRegistry {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
private final Random random = new Random();
|
||||
private final QName shardLock = QName.createQName(Constants.NAMESPACE_ASIE, "shardLock");
|
||||
|
||||
@Autowired
|
||||
@Qualifier(Constants.QUALIFIER_ASIE)
|
||||
private AttributeService attrService;
|
||||
|
||||
@Autowired
|
||||
private NamespaceService namespaceService;
|
||||
|
||||
@Autowired
|
||||
private DictionaryService dictionaryService;
|
||||
|
||||
@Autowired
|
||||
private QueryInspectorFactory queryInspectorFactory;
|
||||
|
||||
@Autowired
|
||||
private JobLockService jobLockService;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_SHARDSETS_CACHE)
|
||||
private SimpleCache<String, ShardSet> shardsetsCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_NODES_CACHE)
|
||||
private SimpleCache<String, SolrHost> nodesCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_SHARD_NODES_CACHE)
|
||||
private MultiValueCache<Shard, SolrHost> shardNodesCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_SHARDINST_STATE_CACHE)
|
||||
private SimpleCache<ShardInstance, ShardInstanceState> shardInstanceStatesCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_NODE_UNAVAIL_CACHE)
|
||||
private CollectionCache<SolrHost, HashSet<SolrHost>> nodeUnavailableCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_NODE_DISABLE_CACHE)
|
||||
private CollectionCache<SolrHost, HashSet<SolrHost>> nodeDisableCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_SHARDINST_UNAVAIL_CACHE)
|
||||
private CollectionCache<ShardInstance, HashSet<ShardInstance>> shardInstanceUnavailableCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_SHARDINST_DISABLE_CACHE)
|
||||
private CollectionCache<ShardInstance, HashSet<ShardInstance>> shardInstanceDisableCache;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(CommunityConstants.BEAN_CORE_EXPLICIT_CACHE)
|
||||
private SimpleCache<String, QName> coreExplicitIdCache;
|
||||
|
||||
@Value("${inteligr8.asie.registerUnknownShardDisabled}")
|
||||
private boolean registerDisabled;
|
||||
|
||||
@Value("${inteligr8.asie.offlineIdleShardInSeconds}")
|
||||
private int offlineIdleShardInSeconds;
|
||||
|
||||
@Value("${inteligr8.asie.forgetOfflineShardInSeconds}")
|
||||
private int forgetOfflineShardInSeconds;
|
||||
|
||||
@Override
|
||||
protected void onBootstrap(ApplicationEvent event) {
|
||||
this.loadPersistedToCache();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onShutdown(ApplicationEvent event) {
|
||||
}
|
||||
|
||||
protected void loadPersistedToCache() {
|
||||
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 500L, 10);
|
||||
try {
|
||||
this.attrService.getAttributes(new AttributeQueryCallback() {
|
||||
@Override
|
||||
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
|
||||
String core = (String) keys[1];
|
||||
if (!shardsetsCache.contains(core)) {
|
||||
ShardSet shardSet = (ShardSet) value;
|
||||
shardsetsCache.put(core, shardSet);
|
||||
|
||||
switch (shardSet.getMethod()) {
|
||||
case EXPLICIT_ID:
|
||||
cacheExplicitShard(shardSet, false);
|
||||
break;
|
||||
default:
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}, CommunityConstants.ATTR_ASIE_SHARDSET);
|
||||
|
||||
this.attrService.getAttributes(new AttributeQueryCallback() {
|
||||
@Override
|
||||
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
|
||||
String nodeSpec = (String) keys[2];
|
||||
SolrHost node = (SolrHost) value;
|
||||
if (!nodesCache.contains(nodeSpec))
|
||||
nodesCache.put(nodeSpec, node);
|
||||
if (Boolean.TRUE.equals(attrService.getAttribute(CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_DISABLE, nodeSpec))) {
|
||||
if (!nodeDisableCache.contains(node))
|
||||
nodeDisableCache.add(node);
|
||||
} else if (nodeDisableCache.contains(node)) {
|
||||
nodeDisableCache.remove(node);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_OBJECT);
|
||||
|
||||
this.attrService.getAttributes(new AttributeQueryCallback() {
|
||||
@Override
|
||||
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
|
||||
Shard shard = (Shard) keys[1];
|
||||
SolrHost node = (SolrHost) keys[2];
|
||||
if (!shardNodesCache.contains(shard, node))
|
||||
shardNodesCache.add(shard, node);
|
||||
return true;
|
||||
}
|
||||
}, CommunityConstants.ATTR_ASIE_SHARD_NODES);
|
||||
|
||||
this.attrService.getAttributes(new AttributeQueryCallback() {
|
||||
@Override
|
||||
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
|
||||
ShardInstance shardNode = (ShardInstance) keys[2];
|
||||
ShardInstanceState state = (ShardInstanceState) value;
|
||||
if (!shardInstanceStatesCache.contains(shardNode))
|
||||
shardInstanceStatesCache.put(shardNode, state);
|
||||
if (Boolean.TRUE.equals(attrService.getAttribute(CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_DISABLE, shardNode))) {
|
||||
if (!shardInstanceDisableCache.contains(shardNode))
|
||||
shardInstanceDisableCache.add(shardNode);
|
||||
} else if (shardInstanceDisableCache.contains(shardNode)) {
|
||||
shardInstanceDisableCache.remove(shardNode);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_OBJECT);
|
||||
} finally {
|
||||
this.jobLockService.releaseLock(lockId, this.shardLock);
|
||||
}
|
||||
}
|
||||
|
||||
private void cacheExplicitShard(ShardSet shardSet, boolean overwrite) {
|
||||
if (overwrite || !this.coreExplicitIdCache.contains(shardSet.getCore())) {
|
||||
String property = shardSet.getPrefixedProperty();
|
||||
QName propertyQName = QName.createQName(property, namespaceService);
|
||||
|
||||
this.logger.debug("Mapping core to explicit ID: {} => {}", shardSet.getCore(), propertyQName);
|
||||
this.coreExplicitIdCache.put(shardSet.getCore(), propertyQName);
|
||||
}
|
||||
}
|
||||
|
||||
protected void persistCache() {
|
||||
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 100L, 50);
|
||||
try {
|
||||
this.persistShardSetCache();
|
||||
this.persistNodeCache();
|
||||
this.persistShardNodesCache();
|
||||
this.persistShardInstanceCache();
|
||||
} finally {
|
||||
this.jobLockService.releaseLock(lockId, this.shardLock);
|
||||
}
|
||||
}
|
||||
|
||||
private void persistShardSetCache() {
|
||||
// add anything missing
|
||||
// update anything changed
|
||||
for (String core : this.shardsetsCache.getKeys()) {
|
||||
ShardSet shardSet = this.shardsetsCache.get(core);
|
||||
this.checkSetAttribute(shardSet, CommunityConstants.ATTR_ASIE_SHARDSET, core);
|
||||
}
|
||||
|
||||
// we are not removing anything removed from the cache, as it might have expired
|
||||
// it will just recache on the next load
|
||||
}
|
||||
|
||||
private void persistNodeCache() {
|
||||
// add anything missing
|
||||
// update anything changed
|
||||
for (String nodeSpec : this.nodesCache.getKeys()) {
|
||||
SolrHost node = this.nodesCache.get(nodeSpec);
|
||||
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_OBJECT, nodeSpec);
|
||||
}
|
||||
|
||||
// we are not removing anything removed from the cache, as it might have expired
|
||||
// it will just recache on the next load
|
||||
|
||||
// add anything disabled
|
||||
for (SolrHost node : this.nodeDisableCache.values())
|
||||
this.checkSetAttribute(Boolean.TRUE, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_DISABLE, node.getSpec());
|
||||
|
||||
// remove anything not disabled
|
||||
this.attrService.getAttributes(new AttributeQueryCallback() {
|
||||
@Override
|
||||
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
|
||||
SolrHost node = SolrHost.from((String) keys[2]);
|
||||
if (!nodeDisableCache.contains(node))
|
||||
attrService.removeAttribute(keys);
|
||||
return true;
|
||||
}
|
||||
}, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_DISABLE);
|
||||
}
|
||||
|
||||
private void persistShardNodesCache() {
|
||||
// add anything missing
|
||||
// update anything changed
|
||||
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||
Collection<SolrHost> nodes = this.shardNodesCache.get(shard);
|
||||
for (SolrHost node : nodes) {
|
||||
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_SHARD_NODES, shard, node.getSpec());
|
||||
}
|
||||
}
|
||||
|
||||
// we are not removing anything removed from the cache, as it might have expired
|
||||
// it will just recache on the next load
|
||||
}
|
||||
|
||||
private void persistShardInstanceCache() {
|
||||
// add anything missing
|
||||
// update anything changed
|
||||
for (ShardInstance shardNode : this.shardInstanceStatesCache.getKeys()) {
|
||||
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
|
||||
this.checkSetAttribute(state, shardNode);
|
||||
}
|
||||
|
||||
// we are not removing anything removed from the cache, as it might have expired
|
||||
// it will just recache on the next load
|
||||
|
||||
// add anything disabled
|
||||
for (ShardInstance shardNode : this.shardInstanceDisableCache.values())
|
||||
this.checkSetAttribute(Boolean.TRUE, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_DISABLE, shardNode);
|
||||
|
||||
// remove anything not disabled
|
||||
this.attrService.getAttributes(new AttributeQueryCallback() {
|
||||
@Override
|
||||
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
|
||||
ShardInstance shardNode = (ShardInstance) keys[2];
|
||||
if (!shardInstanceDisableCache.contains(shardNode))
|
||||
attrService.removeAttribute(keys);
|
||||
return true;
|
||||
}
|
||||
}, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_DISABLE);
|
||||
}
|
||||
|
||||
private void checkSetAttribute(ShardInstanceState state, ShardInstance shardNode) {
|
||||
ShardInstanceState currentState = (ShardInstanceState) this.attrService.getAttribute(CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
|
||||
if (currentState != null) {
|
||||
if (currentState.compareTo(state) >= 0) {
|
||||
// current state is older (greater; further down the list)
|
||||
// do nothing
|
||||
} else {
|
||||
this.logger.debug("The persisted state was old; updating: {}: {} => {}", shardNode, currentState, state);
|
||||
this.attrService.setAttribute(state, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
|
||||
}
|
||||
} else {
|
||||
this.attrService.setAttribute(state, CommunityConstants.ATTR_ASIE_SHARD_NODE, CommunityConstants.ATTR_OBJECT, shardNode);
|
||||
}
|
||||
}
|
||||
|
||||
private void checkSetAttribute(Serializable value, Serializable... keys) {
|
||||
Serializable currentValue = this.attrService.getAttribute(keys);
|
||||
if (currentValue != null) {
|
||||
if (currentValue.equals(value))
|
||||
return;
|
||||
this.logger.warn("The attribute value unexpectedly changed: {}: {} => {}", keys, currentValue, value);
|
||||
}
|
||||
|
||||
this.attrService.setAttribute(value, keys);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void registerShardState(ShardState shardNodeState) {
|
||||
ShardSet shardSet = ShardSet.from(shardNodeState.getShardInstance().getShard().getFloc(), shardNodeState);
|
||||
Shard shard = Shard.from(shardSet, shardNodeState.getShardInstance().getShard().getInstance());
|
||||
SolrHost node = SolrHost.from(shardNodeState.getShardInstance());
|
||||
ShardInstance shardNode = ShardInstance.from(shard, node);
|
||||
ShardInstanceState state = ShardInstanceState.from(shardNodeState);
|
||||
|
||||
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 500L, 10);
|
||||
try {
|
||||
if (!this.shardsetsCache.contains(shardSet.getCore()))
|
||||
this.shardsetsCache.put(shardSet.getCore(), shardSet);
|
||||
this.checkSetAttribute(shardSet, CommunityConstants.ATTR_ASIE_SHARDSET, shardSet.getCore());
|
||||
|
||||
if (!this.nodesCache.contains(node.getSpec()))
|
||||
this.nodesCache.put(node.getSpec(), node);
|
||||
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_NODE, CommunityConstants.ATTR_OBJECT, node.getSpec());
|
||||
if (!this.shardNodesCache.contains(shard, node))
|
||||
this.shardNodesCache.add(shard, node);
|
||||
this.checkSetAttribute(node, CommunityConstants.ATTR_ASIE_SHARD_NODES, shard, node.getSpec());
|
||||
|
||||
ShardInstanceState currentState = this.shardInstanceStatesCache.get(shardNode);
|
||||
if (currentState == null || currentState.compareTo(state) > 0)
|
||||
this.shardInstanceStatesCache.put(shardNode, state);
|
||||
this.checkSetAttribute(state, shardNode);
|
||||
if (this.registerDisabled && !this.shardInstanceDisableCache.contains(shardNode))
|
||||
this.shardInstanceDisableCache.add(shardNode);
|
||||
} finally {
|
||||
this.jobLockService.releaseLock(lockId, this.shardLock);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void unregisterShardInstance(org.alfresco.repo.index.shard.ShardInstance shardInstance) {
|
||||
ShardSet shardSet = ShardSet.from(shardInstance.getShard().getFloc(), null);
|
||||
Shard shard = Shard.from(shardSet, shardInstance.getShard().getInstance());
|
||||
SolrHost node = SolrHost.from(shardInstance);
|
||||
ShardInstance shardNode = ShardInstance.from(shard, node);
|
||||
|
||||
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 500L, 10);
|
||||
try {
|
||||
this.shardInstanceStatesCache.remove(shardNode);
|
||||
this.shardInstanceDisableCache.remove(shardNode);
|
||||
this.shardInstanceUnavailableCache.remove(shardNode);
|
||||
this.nodeDisableCache.remove(node);
|
||||
this.nodeUnavailableCache.remove(node);
|
||||
this.attrService.removeAttribute(CommunityConstants.ATTR_ASIE_SHARD_NODES, shard, node.getSpec());
|
||||
} finally {
|
||||
this.jobLockService.releaseLock(lockId, this.shardLock);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<Floc, Map<org.alfresco.repo.index.shard.Shard, Set<ShardState>>> getFlocs() {
|
||||
Map<String, Floc> flocs = new HashMap<>();
|
||||
Map<Floc, Map<org.alfresco.repo.index.shard.Shard, Set<ShardState>>> response = new HashMap<>();
|
||||
|
||||
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||
String core = shard.extractShardSetCore();
|
||||
ShardSet shardSet = this.shardsetsCache.get(core);
|
||||
|
||||
Map<org.alfresco.repo.index.shard.Shard, Set<ShardState>> shards;
|
||||
Floc floc = flocs.get(core);
|
||||
if (floc != null) {
|
||||
floc = shardSet.toAlfrescoModel();
|
||||
shards = new HashMap<>();
|
||||
} else {
|
||||
shards = response.get(floc);
|
||||
}
|
||||
|
||||
org.alfresco.repo.index.shard.Shard shard_ = shard.toAlfrescoModel(floc);
|
||||
Set<ShardState> states = shards.get(shard_);
|
||||
if (states == null)
|
||||
states = new HashSet<>();
|
||||
|
||||
for (SolrHost node : this.shardNodesCache.get(shard)) {
|
||||
if (this.nodeDisableCache.contains(node) || this.nodeUnavailableCache.contains(node)) {
|
||||
this.logger.debug("Excluding node as it is disabled or considered unavailable: {}", node);
|
||||
continue;
|
||||
}
|
||||
|
||||
ShardInstance shardNode = ShardInstance.from(shard, node);
|
||||
if (this.shardInstanceDisableCache.contains(shardNode) || this.shardInstanceUnavailableCache.contains(shardNode)) {
|
||||
this.logger.debug("Excluding shard node as it is disabled or considered unavailable: {}", shardNode);
|
||||
continue;
|
||||
}
|
||||
|
||||
ShardInstanceState shardNodeState = this.shardInstanceStatesCache.get(shardNode);
|
||||
states.add(shardNodeState.toAlfrescoModel(shardNode.toAlfrescoModel(shard_)));
|
||||
}
|
||||
|
||||
if (!states.isEmpty())
|
||||
shards.put(shard_, states);
|
||||
if (!shards.isEmpty())
|
||||
response.put(floc, shards);
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void purge() {
|
||||
String lockId = this.jobLockService.getLock(this.shardLock, 2500L, 100L, 50);
|
||||
try {
|
||||
this.logger.info("Removing all nodes/shards from the shard registry");
|
||||
this.shardsetsCache.clear();
|
||||
this.attrService.removeAttributes(CommunityConstants.ATTR_ASIE_SHARDSET);
|
||||
|
||||
this.nodesCache.clear();
|
||||
this.nodeDisableCache.clear();
|
||||
this.nodeUnavailableCache.clear();
|
||||
this.attrService.removeAttributes(CommunityConstants.ATTR_ASIE_NODE);
|
||||
|
||||
this.shardNodesCache.clear();
|
||||
this.attrService.removeAttributes(CommunityConstants.ATTR_ASIE_SHARD_NODES);
|
||||
|
||||
this.shardInstanceStatesCache.clear();
|
||||
this.shardInstanceDisableCache.clear();
|
||||
this.shardInstanceUnavailableCache.clear();
|
||||
this.attrService.removeAttributes(CommunityConstants.ATTR_ASIE_SHARD_NODE);
|
||||
|
||||
this.coreExplicitIdCache.clear();
|
||||
} finally {
|
||||
this.jobLockService.releaseLock(lockId, this.shardLock);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void purgeAgedOutShards() {
|
||||
OffsetDateTime onlineExpired = OffsetDateTime.now().minusSeconds(this.offlineIdleShardInSeconds);
|
||||
OffsetDateTime offlineExpired = OffsetDateTime.now().minusSeconds(this.forgetOfflineShardInSeconds);
|
||||
|
||||
for (ShardInstance shardNode : this.shardInstanceStatesCache.getKeys()) {
|
||||
ShardInstanceState state = this.shardInstanceStatesCache.get(shardNode);
|
||||
SolrHost node = shardNode.extractNode();
|
||||
|
||||
if (this.shardInstanceDisableCache.contains(shardNode)) {
|
||||
this.logger.debug("Ignoring disabled shard instance during purgeAgedOutShards()");
|
||||
} else if (this.nodeDisableCache.contains(node)) {
|
||||
this.logger.debug("Ignoring disabled node during purgeAgedOutShards()");
|
||||
} else if (state.getLastUpdated().isBefore(offlineExpired)) {
|
||||
this.shardInstanceStatesCache.remove(shardNode);
|
||||
if (this.shardInstanceUnavailableCache.remove(shardNode)) {
|
||||
this.logger.info("Forgetting about already offline shard: {}", shardNode);
|
||||
} else if (this.nodeUnavailableCache.remove(node)) {
|
||||
this.logger.info("Forgetting about already offline shard: {}", shardNode);
|
||||
} else {
|
||||
this.logger.warn("Forgetting about online shard: {}", shardNode);
|
||||
}
|
||||
} else if (state.getLastUpdated().isBefore(onlineExpired)) {
|
||||
this.logger.warn("Taking shard offline: {}", shardNode);
|
||||
this.shardInstanceUnavailableCache.add(shardNode);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public QName getExplicitIdProperty(String coreName) {
|
||||
return this.coreExplicitIdCache.get(coreName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Integer> getShardInstanceList(String coreName) {
|
||||
Set<Integer> shardIds = new HashSet<>();
|
||||
|
||||
ShardSet shardSet = this.shardsetsCache.get(coreName);
|
||||
if (shardSet == null)
|
||||
return Collections.emptySet();
|
||||
|
||||
|
||||
for (Shard shard : this.shardNodesCache.getKeys()) {
|
||||
if (shardSet.getCore().equals(shard.extractShardSetCore())) {
|
||||
shardIds.add(shard.extractShardId());
|
||||
}
|
||||
}
|
||||
|
||||
return shardIds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public OptionalInt getShardInstanceByTransactionTimestamp(String coreId, long txnTimestamp) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<org.alfresco.repo.index.shard.ShardInstance> getIndexSlice(SearchParameters searchParameters) {
|
||||
if (searchParameters.getQuery() == null)
|
||||
return Collections.emptyList();
|
||||
|
||||
List<org.alfresco.repo.index.shard.ShardInstance> bestShards = null;
|
||||
|
||||
for (String shardSetSpec : this.shardsetsCache.getKeys()) {
|
||||
ShardSet shardSet = this.shardsetsCache.get(shardSetSpec);
|
||||
|
||||
Set<Integer> shardIds = this.getIndexSlice(searchParameters, shardSet);
|
||||
if (shardIds == null)
|
||||
continue;
|
||||
|
||||
List<org.alfresco.repo.index.shard.ShardInstance> shards = this.selectRandomNodes(shardSet, shardIds);
|
||||
|
||||
if (!shards.isEmpty() && (bestShards == null || shards.size() < bestShards.size()))
|
||||
bestShards = shards;
|
||||
if (bestShards != null && bestShards.size() == 1)
|
||||
break;
|
||||
}
|
||||
|
||||
return bestShards;
|
||||
}
|
||||
|
||||
protected Set<Integer> getIndexSlice(SearchParameters searchParameters, ShardSet shardSet) {
|
||||
try {
|
||||
switch (shardSet.getMethod()) {
|
||||
case EXPLICIT_ID:
|
||||
return this.getExplicitIdIndexSlice(searchParameters, shardSet);
|
||||
default:
|
||||
// no optimization available
|
||||
return null;
|
||||
}
|
||||
} catch (RecognitionException re) {
|
||||
this.logger.debug("Failed to parse the query: " + searchParameters.getQuery(), re);
|
||||
// no optimization available
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
protected Set<Integer> getExplicitIdIndexSlice(SearchParameters searchParameters, ShardSet shardSet) throws RecognitionException {
|
||||
this.logger.trace("Found {} shard set, which is the highest priority", shardSet.getMethod());
|
||||
|
||||
QueryInspector inspector = this.queryInspectorFactory.selectQueryInspector(searchParameters);
|
||||
if (inspector == null) {
|
||||
this.logger.debug("The search is using an unsupported query language; unable to optimize for {}: {}", shardSet.getMethod(), searchParameters.getLanguage());
|
||||
return null;
|
||||
}
|
||||
|
||||
String property = shardSet.getPrefixedProperty();
|
||||
QName propertyQName = QName.createQName(property, this.namespaceService);
|
||||
this.logger.trace("Will attempt to see if search has a required constraint on explicit shard ID property: {}", propertyQName);
|
||||
DataTypeDefinition dtdef = this.dictionaryService.getProperty(propertyQName).getDataType();
|
||||
|
||||
Set<Integer> shardIds = new HashSet<>();
|
||||
List<QueryValue> values = inspector.findRequiredPropertyValues(searchParameters.getQuery(), searchParameters.getDefaultOperator(), propertyQName, dtdef);
|
||||
this.logger.trace("Found {} matching terms query: {}: {}", values.size(), propertyQName, searchParameters.getQuery());
|
||||
for (QueryValue value : values) {
|
||||
if (value instanceof QuerySingleValue<?>) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Number num = ((QuerySingleValue<? extends Number>) value).getValue();
|
||||
shardIds.add(num.intValue());
|
||||
} else if (value instanceof QueryRangeValue<?>) {
|
||||
@SuppressWarnings("unchecked")
|
||||
QueryRangeValue<? extends Number> num = (QueryRangeValue<? extends Number>) value;
|
||||
int start = num.getStart().intValue();
|
||||
if (!num.isIncludeStart())
|
||||
start++;
|
||||
int end = num.getStart().intValue();
|
||||
if (!num.isIncludeEnd())
|
||||
end--;
|
||||
for (int shardId = start; shardId <= end; shardId++)
|
||||
shardIds.add(shardId);
|
||||
}
|
||||
}
|
||||
|
||||
if (shardIds.isEmpty()) {
|
||||
this.logger.trace("The {} shard set cannot not be used to optimize the query", shardSet.getMethod());
|
||||
return null;
|
||||
}
|
||||
this.logger.debug("The {} shard set was used to optimize the query to use only shards: {}", shardSet.getMethod(), shardIds);
|
||||
|
||||
return shardIds;
|
||||
}
|
||||
|
||||
protected List<org.alfresco.repo.index.shard.ShardInstance> selectRandomNodes(ShardSet shardSet, Collection<Integer> shardIds) {
|
||||
List<org.alfresco.repo.index.shard.ShardInstance> shardNodes = new LinkedList<>();
|
||||
|
||||
for (Integer shardId : shardIds) {
|
||||
Shard shard = Shard.from(shardSet, shardId);
|
||||
|
||||
Collection<SolrHost> nodes = this.shardNodesCache.get(shard);
|
||||
List<SolrHost> availableNodes = new LinkedList<>();
|
||||
for (SolrHost node : nodes) {
|
||||
if (this.nodeDisableCache.contains(node) || this.nodeUnavailableCache.contains(node))
|
||||
continue;
|
||||
|
||||
ShardInstance shardNode = ShardInstance.from(shard, node);
|
||||
if (this.shardInstanceDisableCache.contains(shardNode) || this.shardInstanceUnavailableCache.contains(shardNode))
|
||||
continue;
|
||||
|
||||
availableNodes.add(node);
|
||||
}
|
||||
|
||||
SolrHost randomNode = availableNodes.get(this.random.nextInt(availableNodes.size()));
|
||||
|
||||
shardNodes.add(ShardInstance.from(shard, randomNode).toAlfrescoModel(shard.toAlfrescoModel(shardSet.toAlfrescoModel())));
|
||||
}
|
||||
|
||||
return shardNodes;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,79 @@
|
||||
package com.inteligr8.alfresco.asie.util;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
import org.alfresco.repo.index.shard.ShardMethodEnum;
|
||||
|
||||
import com.inteligr8.alfresco.asie.model.ShardSet;
|
||||
|
||||
public class ShardSetSearchComparator implements Comparator<ShardSet> {
|
||||
|
||||
@Override
|
||||
public int compare(ShardSet ss1, ShardSet ss2) {
|
||||
int compare = this.compare(ss1.getMethod(), ss2.getMethod());
|
||||
if (compare != 0)
|
||||
return compare;
|
||||
|
||||
return this.compare(ss1.getShards(), ss2.getShards());
|
||||
}
|
||||
|
||||
private int compare(ShardMethodEnum method1, ShardMethodEnum method2) {
|
||||
if (method1.equals(method2))
|
||||
return 0;
|
||||
|
||||
switch (method1) {
|
||||
case EXPLICIT_ID:
|
||||
case EXPLICIT_ID_FALLBACK_LRIS:
|
||||
return -1;
|
||||
case PROPERTY:
|
||||
case DATE:
|
||||
switch (method2) {
|
||||
case EXPLICIT_ID:
|
||||
case EXPLICIT_ID_FALLBACK_LRIS:
|
||||
return 1;
|
||||
default:
|
||||
return -1;
|
||||
}
|
||||
case ACL_ID:
|
||||
case MOD_ACL_ID:
|
||||
switch (method2) {
|
||||
case EXPLICIT_ID:
|
||||
case EXPLICIT_ID_FALLBACK_LRIS:
|
||||
case PROPERTY:
|
||||
case DATE:
|
||||
return 1;
|
||||
default:
|
||||
return -1;
|
||||
}
|
||||
default:
|
||||
switch (method2) {
|
||||
case EXPLICIT_ID:
|
||||
case EXPLICIT_ID_FALLBACK_LRIS:
|
||||
case PROPERTY:
|
||||
case DATE:
|
||||
case ACL_ID:
|
||||
case MOD_ACL_ID:
|
||||
return 1;
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
private int compare(Short shards1, Short shards2) {
|
||||
// the larger the shard count, the more shards that may need to be queried
|
||||
// so prefer smaller shard counts
|
||||
// no shard count (DB_ID_RANGE) should be treated as the worst (unlimited)
|
||||
if (shards1 == null && shards2 == null) {
|
||||
return 0;
|
||||
} else if (shards1 == null) {
|
||||
return 1;
|
||||
} else if (shards2 == null) {
|
||||
return -1;
|
||||
} else {
|
||||
return shards1.compareTo(shards2);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,109 @@
|
||||
|
||||
inteligr8.asie.registerUnknownShardDisabled=false
|
||||
inteligr8.asie.offlineIdleShardInSeconds=120
|
||||
inteligr8.asie.forgetOfflineShardInSeconds=86400
|
||||
|
||||
|
||||
|
||||
# we don't want items expiring out of the following caches
|
||||
# an evicition policy of NONE disables the maxItems limits
|
||||
|
||||
# Overrides of alfresco-repository.jar/alfresco/caches.properties
|
||||
cache.asieShardsetsSharedCache.tx.maxItems=65536
|
||||
cache.asieShardsetsSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||
cache.asieShardsetsSharedCache.maxItems=65536
|
||||
cache.asieShardsetsSharedCache.timeToLiveSeconds=0
|
||||
cache.asieShardsetsSharedCache.maxIdleSeconds=0
|
||||
cache.asieShardsetsSharedCache.cluster.type=fully-distributed
|
||||
cache.asieShardsetsSharedCache.backup-count=1
|
||||
cache.asieShardsetsSharedCache.eviction-policy=NONE
|
||||
cache.asieShardsetsSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||
cache.asieShardsetsSharedCache.readBackupData=false
|
||||
|
||||
cache.asieNodesSharedCache.tx.maxItems=65536
|
||||
cache.asieNodesSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||
cache.asieNodesSharedCache.maxItems=65536
|
||||
cache.asieNodesSharedCache.timeToLiveSeconds=0
|
||||
cache.asieNodesSharedCache.maxIdleSeconds=0
|
||||
cache.asieNodesSharedCache.cluster.type=fully-distributed
|
||||
cache.asieNodesSharedCache.backup-count=1
|
||||
cache.asieNodesSharedCache.eviction-policy=NONE
|
||||
cache.asieNodesSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||
cache.asieNodesSharedCache.readBackupData=false
|
||||
|
||||
cache.asieShardNodesSharedCache.tx.maxItems=65536
|
||||
cache.asieShardNodesSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||
cache.asieShardNodesSharedCache.maxItems=65536
|
||||
cache.asieShardNodesSharedCache.timeToLiveSeconds=0
|
||||
cache.asieShardNodesSharedCache.maxIdleSeconds=0
|
||||
cache.asieShardNodesSharedCache.cluster.type=fully-distributed
|
||||
cache.asieShardNodesSharedCache.backup-count=1
|
||||
cache.asieShardNodesSharedCache.eviction-policy=NONE
|
||||
cache.asieShardNodesSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||
cache.asieShardNodesSharedCache.readBackupData=false
|
||||
|
||||
cache.asieShardInstanceStateSharedCache.tx.maxItems=65536
|
||||
cache.asieShardInstanceStateSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||
cache.asieShardInstanceStateSharedCache.maxItems=65536
|
||||
cache.asieShardInstanceStateSharedCache.timeToLiveSeconds=0
|
||||
cache.asieShardInstanceStateSharedCache.maxIdleSeconds=0
|
||||
cache.asieShardInstanceStateSharedCache.cluster.type=fully-distributed
|
||||
cache.asieShardInstanceStateSharedCache.backup-count=1
|
||||
cache.asieShardInstanceStateSharedCache.eviction-policy=NONE
|
||||
cache.asieShardInstanceStateSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||
cache.asieShardInstanceStateSharedCache.readBackupData=false
|
||||
|
||||
cache.asieNodeDisabledSharedCache.tx.maxItems=65536
|
||||
cache.asieNodeDisabledSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||
cache.asieNodeDisabledSharedCache.maxItems=65536
|
||||
cache.asieNodeDisabledSharedCache.timeToLiveSeconds=0
|
||||
cache.asieNodeDisabledSharedCache.maxIdleSeconds=0
|
||||
cache.asieNodeDisabledSharedCache.cluster.type=fully-distributed
|
||||
cache.asieNodeDisabledSharedCache.backup-count=1
|
||||
cache.asieNodeDisabledSharedCache.eviction-policy=NONE
|
||||
cache.asieNodeDisabledSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||
cache.asieNodeDisabledSharedCache.readBackupData=false
|
||||
|
||||
cache.asieNodeUnavailableSharedCache.tx.maxItems=65536
|
||||
cache.asieNodeUnavailableSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||
cache.asieNodeUnavailableSharedCache.maxItems=65536
|
||||
cache.asieNodeUnavailableSharedCache.timeToLiveSeconds=0
|
||||
cache.asieNodeUnavailableSharedCache.maxIdleSeconds=0
|
||||
cache.asieNodeUnavailableSharedCache.cluster.type=fully-distributed
|
||||
cache.asieNodeUnavailableSharedCache.backup-count=1
|
||||
cache.asieNodeUnavailableSharedCache.eviction-policy=NONE
|
||||
cache.asieNodeUnavailableSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||
cache.asieNodeUnavailableSharedCache.readBackupData=false
|
||||
|
||||
cache.asieShardInstanceDisabledSharedCache.tx.maxItems=65536
|
||||
cache.asieShardInstanceDisabledSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||
cache.asieShardInstanceDisabledSharedCache.maxItems=65536
|
||||
cache.asieShardInstanceDisabledSharedCache.timeToLiveSeconds=0
|
||||
cache.asieShardInstanceDisabledSharedCache.maxIdleSeconds=0
|
||||
cache.asieShardInstanceDisabledSharedCache.cluster.type=fully-distributed
|
||||
cache.asieShardInstanceDisabledSharedCache.backup-count=1
|
||||
cache.asieShardInstanceDisabledSharedCache.eviction-policy=NONE
|
||||
cache.asieShardInstanceDisabledSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||
cache.asieShardInstanceDisabledSharedCache.readBackupData=false
|
||||
|
||||
cache.asieShardInstanceUnavailableSharedCache.tx.maxItems=65536
|
||||
cache.asieShardInstanceUnavailableSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||
cache.asieShardInstanceUnavailableSharedCache.maxItems=65536
|
||||
cache.asieShardInstanceUnavailableSharedCache.timeToLiveSeconds=0
|
||||
cache.asieShardInstanceUnavailableSharedCache.maxIdleSeconds=0
|
||||
cache.asieShardInstanceUnavailableSharedCache.cluster.type=fully-distributed
|
||||
cache.asieShardInstanceUnavailableSharedCache.backup-count=1
|
||||
cache.asieShardInstanceUnavailableSharedCache.eviction-policy=NONE
|
||||
cache.asieShardInstanceUnavailableSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||
cache.asieShardInstanceUnavailableSharedCache.readBackupData=false
|
||||
|
||||
cache.asieCoreExplicitSharedCache.tx.maxItems=65536
|
||||
cache.asieCoreExplicitSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
|
||||
cache.asieCoreExplicitSharedCache.maxItems=65536
|
||||
cache.asieCoreExplicitSharedCache.timeToLiveSeconds=0
|
||||
cache.asieCoreExplicitSharedCache.maxIdleSeconds=0
|
||||
cache.asieCoreExplicitSharedCache.cluster.type=fully-distributed
|
||||
cache.asieCoreExplicitSharedCache.backup-count=1
|
||||
cache.asieCoreExplicitSharedCache.eviction-policy=NONE
|
||||
cache.asieCoreExplicitSharedCache.merge-policy=com.hazelcast.map.merge.PutIfAbsentMapMergePolicy
|
||||
cache.asieCoreExplicitSharedCache.readBackupData=false
|
@@ -0,0 +1,50 @@
|
||||
<?xml version='1.0' encoding='UTF-8'?>
|
||||
|
||||
<!-- Use this file for beans to be loaded in whatever order Alfresco/Spring decides -->
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:context="http://www.springframework.org/schema/context"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
|
||||
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd">
|
||||
|
||||
<bean name="asieShardsetsCache" factory-bean="cacheFactory" factory-method="createCache">
|
||||
<constructor-arg value="cache.asieShardsetsSharedCache" />
|
||||
</bean>
|
||||
|
||||
<bean name="asieNodesCache" factory-bean="cacheFactory" factory-method="createCache">
|
||||
<constructor-arg value="cache.asieNodesSharedCache" />
|
||||
</bean>
|
||||
|
||||
<bean name="asieShardNodesCache" factory-bean="cacheFactory" factory-method="createMultiValueCache">
|
||||
<constructor-arg value="cache.asieShardNodesSharedCache" />
|
||||
</bean>
|
||||
|
||||
<bean name="asieShardInstanceStateCache" factory-bean="cacheFactory" factory-method="createCache">
|
||||
<constructor-arg value="cache.asieShardInstanceStateSharedCache" />
|
||||
</bean>
|
||||
|
||||
<bean name="asieNodeDisabledCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
|
||||
<constructor-arg value="cache.asieNodeDisabledSharedCache" />
|
||||
<constructor-arg value="java.util.HashSet" />
|
||||
</bean>
|
||||
|
||||
<bean name="asieNodeUnavailableCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
|
||||
<constructor-arg value="cache.asieNodeUnavailableSharedCache" />
|
||||
<constructor-arg value="java.util.HashSet" />
|
||||
</bean>
|
||||
|
||||
<bean name="asieShardInstanceDisabledCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
|
||||
<constructor-arg value="cache.asieShardInstanceDisabledSharedCache" />
|
||||
<constructor-arg value="java.util.HashSet" />
|
||||
</bean>
|
||||
|
||||
<bean name="asieShardInstanceUnavailableCache" factory-bean="cacheFactory" factory-method="createCollectionCache">
|
||||
<constructor-arg value="cache.asieShardInstanceUnavailableSharedCache" />
|
||||
<constructor-arg value="java.util.HashSet" />
|
||||
</bean>
|
||||
|
||||
<bean name="asieCoreExplicitCache" factory-bean="cacheFactory" factory-method="createCache">
|
||||
<constructor-arg value="cache.asieCoreExplicitSharedCache" />
|
||||
</bean>
|
||||
|
||||
</beans>
|
@@ -0,0 +1,11 @@
|
||||
module.id=com_inteligr8_alfresco_${project.artifactId}
|
||||
module.aliases=
|
||||
module.title=${project.name}
|
||||
module.description=${project.description}
|
||||
module.version=${module.version}
|
||||
|
||||
module.repo.version.min=23.0
|
||||
|
||||
# this is creating all sorts of problems; probably because of the non-standard versioning
|
||||
module.depends.com.inteligr8.alfresco.cachext-platform-module=*
|
||||
module.depends.com.inteligr8.alfresco.cxf-jaxrs-platform-module=*
|
@@ -0,0 +1,146 @@
|
||||
package com.inteligr8.alfresco.asie;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.repo.search.impl.parsers.FTSLexer;
|
||||
import org.alfresco.repo.search.impl.parsers.FTSParser;
|
||||
import org.alfresco.service.cmr.search.SearchParameters.Operator;
|
||||
import org.antlr.runtime.ANTLRStringStream;
|
||||
import org.antlr.runtime.CharStream;
|
||||
import org.antlr.runtime.CommonTokenStream;
|
||||
import org.antlr.runtime.RecognitionException;
|
||||
import org.antlr.runtime.tree.CommonTree;
|
||||
import org.antlr.runtime.tree.Tree;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.module.SimpleModule;
|
||||
|
||||
public class QueryConstraintUnitTest {
|
||||
|
||||
private static final ObjectMapper om = new ObjectMapper();
|
||||
|
||||
@BeforeClass
|
||||
public static void init() {
|
||||
SimpleModule module = new SimpleModule();
|
||||
module.addSerializer(Tree.class, new TreeSerializer());
|
||||
om.registerModule(module);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleExactTerm() throws RecognitionException, JsonProcessingException {
|
||||
Tree tree = this.parseFts("=@cm:title:test", Operator.AND);
|
||||
tree = this.validateChildren(tree, "DISJUNCTION");
|
||||
tree = this.validateChildren(tree, "CONJUNCTION");
|
||||
tree = this.validateChildren(tree, "DEFAULT");
|
||||
tree = this.validateChildren(tree, "EXACT_TERM", "test");
|
||||
tree = this.validateChildren(tree, "FIELD_REF", "title");
|
||||
this.validate(tree, "PREFIX", "cm");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleFuzzyTerm() throws RecognitionException, JsonProcessingException {
|
||||
Tree tree = this.parseFts("@cm:title:test", Operator.AND);
|
||||
tree = this.validateChildren(tree, "DISJUNCTION");
|
||||
tree = this.validateChildren(tree, "CONJUNCTION");
|
||||
tree = this.validateChildren(tree, "DEFAULT");
|
||||
tree = this.validateChildren(tree, "TERM", "test");
|
||||
tree = this.validateChildren(tree, "FIELD_REF", "title");
|
||||
this.validate(tree, "PREFIX", "cm");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleFuzzyString() throws RecognitionException, JsonProcessingException {
|
||||
Tree tree = this.parseFts("@cm:title:'testing'", Operator.AND);
|
||||
tree = this.validateChildren(tree, "DISJUNCTION");
|
||||
tree = this.validateChildren(tree, "CONJUNCTION");
|
||||
tree = this.validateChildren(tree, "DEFAULT");
|
||||
tree = this.validateChildren(tree, "PHRASE", "'testing'");
|
||||
tree = this.validateChildren(tree, "FIELD_REF", "title");
|
||||
this.validate(tree, "PREFIX", "cm");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleFuzzyStringDoubleQuotes() throws RecognitionException, JsonProcessingException {
|
||||
Tree tree = this.parseFts("cm:title:\"testing\"", Operator.AND);
|
||||
tree = this.validateChildren(tree, "DISJUNCTION");
|
||||
tree = this.validateChildren(tree, "CONJUNCTION");
|
||||
tree = this.validateChildren(tree, "DEFAULT");
|
||||
tree = this.validateChildren(tree, "PHRASE", "\"testing\"");
|
||||
tree = this.validateChildren(tree, "FIELD_REF", "title");
|
||||
this.validate(tree, "PREFIX", "cm");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleRange() throws RecognitionException, JsonProcessingException {
|
||||
Tree tree = this.parseFts("@cm:created:[NOW TO '2025-01-01T00:00:00'>", Operator.AND);
|
||||
tree = this.validateChildren(tree, "DISJUNCTION");
|
||||
tree = this.validateChildren(tree, "CONJUNCTION");
|
||||
tree = this.validateChildren(tree, "DEFAULT");
|
||||
tree = this.validateChildren(tree, "RANGE", "INCLUSIVE", "NOW", "'2025-01-01T00:00:00'", "EXCLUSIVE");
|
||||
tree = this.validateChildren(tree, "FIELD_REF", "created");
|
||||
this.validate(tree, "PREFIX", "cm");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTwoTerms() throws RecognitionException, JsonProcessingException {
|
||||
Tree tree = this.parseFts("=@cm:title:test1 AND @cm:author:test2", Operator.AND);
|
||||
tree = this.validateChildren(tree, "DISJUNCTION");
|
||||
List<Tree> trees = this.validateChildren(tree, "CONJUNCTION", 2);
|
||||
|
||||
tree = trees.get(0);
|
||||
tree = this.validateChildren(tree, "DEFAULT");
|
||||
tree = this.validateChildren(tree, "EXACT_TERM", "test1");
|
||||
tree = this.validateChildren(tree, "FIELD_REF", "title");
|
||||
this.validate(tree, "PREFIX", "cm");
|
||||
|
||||
tree = trees.get(1);
|
||||
tree = this.validateChildren(tree, "DEFAULT");
|
||||
tree = this.validateChildren(tree, "TERM", "test2");
|
||||
tree = this.validateChildren(tree, "FIELD_REF", "author");
|
||||
this.validate(tree, "PREFIX", "cm");
|
||||
}
|
||||
|
||||
protected void validate(Tree tree, String text, String... extraValues) {
|
||||
Assert.assertNotNull(tree);
|
||||
Assert.assertEquals(text, tree.getText());
|
||||
Assert.assertEquals(extraValues.length, tree.getChildCount());
|
||||
for (int c = 0; c < extraValues.length; c++)
|
||||
Assert.assertEquals(extraValues[c], tree.getChild(c).getText());
|
||||
}
|
||||
|
||||
protected Tree validateChildren(Tree tree, String text, String... extraValues) {
|
||||
Assert.assertNotNull(tree);
|
||||
Assert.assertEquals(text, tree.getText());
|
||||
Assert.assertEquals(extraValues.length + 1, tree.getChildCount());
|
||||
for (int c = 0; c < extraValues.length; c++)
|
||||
Assert.assertEquals(extraValues[c], tree.getChild(c).getText());
|
||||
return tree.getChild(extraValues.length);
|
||||
}
|
||||
|
||||
protected List<Tree> validateChildren(Tree tree, String text, int count) {
|
||||
Assert.assertNotNull(tree);
|
||||
Assert.assertEquals(text, tree.getText());
|
||||
Assert.assertEquals(count, tree.getChildCount());
|
||||
List<Tree> children = new ArrayList<>();
|
||||
for (int c = 0; c < tree.getChildCount(); c++)
|
||||
children.add(tree.getChild(c));
|
||||
return children;
|
||||
}
|
||||
|
||||
protected Tree parseFts(String ftsQuery, Operator defaultOperator) throws RecognitionException, JsonProcessingException {
|
||||
CharStream cs = new ANTLRStringStream(ftsQuery);
|
||||
FTSLexer lexer = new FTSLexer(cs);
|
||||
CommonTokenStream tokens = new CommonTokenStream(lexer);
|
||||
FTSParser parser = new FTSParser(tokens);
|
||||
parser.setDefaultFieldConjunction(defaultOperator.equals(Operator.AND));
|
||||
parser.setMode(defaultOperator.equals(Operator.AND) ? FTSParser.Mode.DEFAULT_CONJUNCTION : FTSParser.Mode.DEFAULT_DISJUNCTION);
|
||||
CommonTree tree = (CommonTree) parser.ftsQuery().getTree();
|
||||
return tree;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,44 @@
|
||||
package com.inteligr8.alfresco.asie;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.antlr.runtime.tree.Tree;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.databind.JavaType;
|
||||
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
|
||||
|
||||
public class TreeSerializer extends StdSerializer<Tree> {
|
||||
|
||||
private static final long serialVersionUID = -2714782538361726878L;
|
||||
|
||||
public TreeSerializer() {
|
||||
super(Tree.class);
|
||||
}
|
||||
|
||||
public TreeSerializer(Class<Tree> type) {
|
||||
super(type);
|
||||
}
|
||||
|
||||
public TreeSerializer(JavaType type) {
|
||||
super(type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void serialize(Tree value, JsonGenerator gen, SerializerProvider provider) throws IOException {
|
||||
gen.writeStartObject();
|
||||
if (value.getText() != null)
|
||||
gen.writeStringField("text", value.getText());
|
||||
|
||||
if (value.getChildCount() > 0) {
|
||||
gen.writeArrayFieldStart("children");
|
||||
for (int c = 0; c < value.getChildCount(); c++)
|
||||
gen.writeObject(value.getChild(c));
|
||||
gen.writeEndArray();
|
||||
}
|
||||
|
||||
gen.writeEndObject();
|
||||
}
|
||||
|
||||
}
|
BIN
enterprise-module/metadata.keystore
Normal file
BIN
enterprise-module/metadata.keystore
Normal file
Binary file not shown.
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>com.inteligr8.alfresco</groupId>
|
||||
<artifactId>asie-platform-module-parent</artifactId>
|
||||
<version>1.2-SNAPSHOT</version>
|
||||
<version>1.3-SNAPSHOT</version>
|
||||
<relativePath>../</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -16,8 +16,12 @@
|
||||
<name>ASIE Platform Module for ACS Enterprise</name>
|
||||
|
||||
<properties>
|
||||
<alfresco.sdk.version>5.2.0</alfresco.sdk.version>
|
||||
<alfresco.sdk.version>4.9.0</alfresco.sdk.version>
|
||||
<alfresco.platform.version>23.3.0</alfresco.platform.version>
|
||||
<alfresco.platform.war.version>23.3.0.98</alfresco.platform.war.version>
|
||||
<tomcat-rad.version>10-2.1</tomcat-rad.version>
|
||||
|
||||
<beedk.rad.acs-search.enabled>true</beedk.rad.acs-search.enabled>
|
||||
</properties>
|
||||
|
||||
<dependencyManagement>
|
||||
@@ -79,8 +83,8 @@
|
||||
<dependency>
|
||||
<groupId>com.inteligr8.alfresco</groupId>
|
||||
<artifactId>cxf-jaxrs-platform-module</artifactId>
|
||||
<version>1.3.1-acs-v23.3</version>
|
||||
<scope>provided</scope>
|
||||
<version>1.3.2-acs-v23.3</version>
|
||||
<type>amp</type>
|
||||
</dependency>
|
||||
|
||||
<!-- Including for testing purposes only -->
|
||||
|
@@ -1,22 +1,22 @@
|
||||
#!/bin/sh
|
||||
|
||||
discoverArtifactId() {
|
||||
ARTIFACT_ID=`mvn -q -Dexpression=project.artifactId -DforceStdout help:evaluate`
|
||||
ARTIFACT_ID=`mvn -q -Dexpression=project.artifactId -DforceStdout help:evaluate | sed 's/\x1B\[[0-9;]\{1,\}[A-Za-z]//g'`
|
||||
}
|
||||
|
||||
rebuild() {
|
||||
echo "Rebuilding project ..."
|
||||
mvn process-classes
|
||||
mvn process-test-classes
|
||||
}
|
||||
|
||||
start() {
|
||||
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
|
||||
mvn -Drad process-classes
|
||||
mvn -Drad process-test-classes
|
||||
}
|
||||
|
||||
start_log() {
|
||||
echo "Rebuilding project and starting Docker containers to support rapid application development ..."
|
||||
mvn -Drad -Ddocker.showLogs process-classes
|
||||
mvn -Drad -Ddocker.showLogs process-test-classes
|
||||
}
|
||||
|
||||
stop() {
|
||||
|
@@ -91,7 +91,7 @@ public abstract class AbstractUnregisterNodeWebScript<T extends NodeParameterSet
|
||||
if (status == null) {
|
||||
this.logger.warn("Registered host/core status could not be retrieved: {}:{}/solr/{}", nodeHostname, nodePort, core);
|
||||
} else {
|
||||
CoreMetadata coreMetadata = status.getStatus().getCores().get(core);
|
||||
CoreMetadata coreMetadata = status.getCores().getByCore(core);
|
||||
if (coreMetadata == null || coreMetadata.getName() == null) {
|
||||
this.logger.warn("Registered core does not actually exist on the node host; could be a DNS issue: {}:{}/solr/{}", nodeHostname, nodePort, core);
|
||||
} else {
|
||||
@@ -141,13 +141,13 @@ public abstract class AbstractUnregisterNodeWebScript<T extends NodeParameterSet
|
||||
|
||||
protected StatusResponse getCoreStatus(String nodeHostname, int nodePort, String core) {
|
||||
this.logger.debug("Retrieving status for core {} on ASIE node: {}", core, nodeHostname);
|
||||
CoreAdminApi api = this.createApi(nodeHostname, nodePort);
|
||||
CoreAdminApi api = this.getApiService().createApi(nodeHostname, nodePort, CoreAdminApi.class);
|
||||
return api.getStatus(new StatusRequest().withCore(core));
|
||||
}
|
||||
|
||||
protected void unloadCore(String nodeHostname, int nodePort, String core) {
|
||||
this.logger.info("Unloading core {} on ASIE node: {}", core, nodeHostname);
|
||||
CoreAdminApi api = this.createApi(nodeHostname, nodePort);
|
||||
CoreAdminApi api = this.getApiService().createApi(nodeHostname, nodePort, CoreAdminApi.class);
|
||||
api.unload(new UnloadRequest().withCore(core));
|
||||
}
|
||||
|
||||
|
@@ -63,7 +63,7 @@ public class ReloadNodeShardWebScript extends AbstractAsieNodeWebScript {
|
||||
throw new WebScriptException(HttpStatus.NOT_FOUND.value(), "The specified node/shard could not be found or formulated");
|
||||
|
||||
this.logger.info("Reloading core {} on ASIE node: {}", coreName, nodeHostname);
|
||||
CoreAdminApi api = this.createApi(nodeHostname, nodePort);
|
||||
CoreAdminApi api = this.getApiService().createApi(nodeHostname, nodePort, CoreAdminApi.class);
|
||||
try {
|
||||
api.create(new CreateRequest()
|
||||
.withCore(coreName)
|
||||
|
@@ -64,7 +64,7 @@ public class ReloadNodeWebScript extends AbstractAsieNodeWebScript {
|
||||
String coreInstancePath = core.getValue();
|
||||
|
||||
this.logger.info("Reloading core {} on ASIE node: {}", coreName, nodeHostname);
|
||||
CoreAdminApi api = this.createApi(nodeHostname, nodePort);
|
||||
CoreAdminApi api = this.getApiService().createApi(nodeHostname, nodePort, CoreAdminApi.class);
|
||||
try {
|
||||
api.create(new CreateRequest()
|
||||
.withCore(coreName)
|
||||
|
@@ -1,21 +1,33 @@
|
||||
package com.inteligr8.alfresco.asie.enterprise.rest;
|
||||
|
||||
import org.alfresco.repo.index.shard.ShardState;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.extensions.webscripts.WebScriptException;
|
||||
import org.springframework.extensions.webscripts.WebScriptRequest;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.model.ShardSet;
|
||||
import com.inteligr8.alfresco.asie.rest.model.NodeShardParameterSet;
|
||||
import com.inteligr8.alfresco.asie.spi.ShardDiscoveryService;
|
||||
|
||||
@Component(value = "webscript.com.inteligr8.alfresco.asie.nodeShard.delete")
|
||||
public class UnloadNodeShardWebScript extends AbstractUnregisterNodeWebScript<NodeShardParameterSet> {
|
||||
|
||||
@Autowired
|
||||
private ShardDiscoveryService sds;
|
||||
|
||||
@Override
|
||||
protected NodeShardParameterSet createParameters(WebScriptRequest req, String nodeHostname, int nodePort) {
|
||||
ShardSet shardSet = this.getRequiredPathParameter(req, "shardSet", ShardSet.class);
|
||||
String coreName = this.getRequiredPathParameter(req, "shardCore");
|
||||
int shardId = this.getRequiredPathParameter(req, "shardId", Integer.class);
|
||||
|
||||
return new NodeShardParameterSet(nodeHostname, nodePort, shardSet, shardId);
|
||||
try {
|
||||
ShardSet shardSet = this.sds.findSetByCore(coreName);
|
||||
return new NodeShardParameterSet(nodeHostname, nodePort, shardSet, shardId);
|
||||
} catch (IllegalArgumentException iae) {
|
||||
throw new WebScriptException(HttpStatus.BAD_REQUEST.value(), iae.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
34
pom.xml
34
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>com.inteligr8.alfresco</groupId>
|
||||
<artifactId>asie-platform-module-parent</artifactId>
|
||||
<version>1.2-SNAPSHOT</version>
|
||||
<version>1.3-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>ASIE Platform Module Parent</name>
|
||||
@@ -39,9 +39,9 @@
|
||||
|
||||
<properties>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<maven.compiler.source>11</maven.compiler.source>
|
||||
<maven.compiler.target>11</maven.compiler.target>
|
||||
<maven.compiler.release>11</maven.compiler.release>
|
||||
<maven.compiler.source>17</maven.compiler.source>
|
||||
<maven.compiler.target>17</maven.compiler.target>
|
||||
<maven.compiler.release>17</maven.compiler.release>
|
||||
<maven.deploy.skip>true</maven.deploy.skip>
|
||||
</properties>
|
||||
|
||||
@@ -56,12 +56,34 @@
|
||||
<!-- avoids struts dependency -->
|
||||
<plugin>
|
||||
<artifactId>maven-site-plugin</artifactId>
|
||||
<version>3.12.1</version>
|
||||
<version>3.21.0</version>
|
||||
</plugin>
|
||||
<!-- Force use of a new maven-dependency-plugin that doesn't download struts dependency -->
|
||||
<plugin>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<version>3.8.0</version>
|
||||
<version>3.8.1</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>3.4.0</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<version>5.11.0</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-failsafe-plugin</artifactId>
|
||||
<version>3.4.0</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<version>5.11.0</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
|
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>com.inteligr8.alfresco</groupId>
|
||||
<artifactId>asie-platform-module-parent</artifactId>
|
||||
<version>1.2-SNAPSHOT</version>
|
||||
<version>1.3-SNAPSHOT</version>
|
||||
<relativePath>../</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
<name>ASIE Shared Library for Platform Modules</name>
|
||||
|
||||
<properties>
|
||||
<alfresco.sdk.version>5.2.0</alfresco.sdk.version>
|
||||
<alfresco.sdk.version>4.9.0</alfresco.sdk.version>
|
||||
<alfresco.platform.version>23.3.0</alfresco.platform.version>
|
||||
</properties>
|
||||
|
||||
@@ -36,15 +36,20 @@
|
||||
<dependency>
|
||||
<groupId>com.inteligr8.alfresco</groupId>
|
||||
<artifactId>asie-api</artifactId>
|
||||
<version>1.0-SNAPSHOT-asie2</version>
|
||||
<version>1.1-SNAPSHOT-asie2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.inteligr8</groupId>
|
||||
<artifactId>common-rest-client</artifactId>
|
||||
<version>3.0.1-cxf</version>
|
||||
<version>3.0.3-cxf</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Needed by this module, but provided by ACS -->
|
||||
<dependency>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-data-model</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-repository</artifactId>
|
||||
|
@@ -0,0 +1,36 @@
|
||||
package com.inteligr8.alfresco.asie.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
public class PersistedNode implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 4105196543023419818L;
|
||||
|
||||
private final SolrHost node;
|
||||
private final long persistMillis;
|
||||
private long expireTimeMillis;
|
||||
|
||||
public PersistedNode(SolrHost node, int persistMinutes) {
|
||||
this.node = node;
|
||||
this.persistMillis = persistMinutes * 60L * 1000L;
|
||||
this.reset();
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
this.expireTimeMillis = System.currentTimeMillis() + this.persistMillis;
|
||||
}
|
||||
|
||||
public boolean isExpired() {
|
||||
return this.expireTimeMillis < System.currentTimeMillis();
|
||||
}
|
||||
|
||||
public SolrHost getNode() {
|
||||
return this.node;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "node: " + this.node + "; expires in: " + (System.currentTimeMillis() - this.expireTimeMillis) + " ms";
|
||||
}
|
||||
|
||||
}
|
@@ -19,7 +19,7 @@ public class Shard implements Serializable {
|
||||
private final String spec;
|
||||
|
||||
protected Shard(ShardSet shardSet, int shardId) {
|
||||
this.spec = shardSet.getCore() + "~" + shardId;
|
||||
this.spec = shardSet.getCore() + "-" + shardId;
|
||||
}
|
||||
|
||||
protected Shard(String spec) {
|
||||
@@ -34,16 +34,20 @@ public class Shard implements Serializable {
|
||||
}
|
||||
|
||||
public String getSpec() {
|
||||
return spec;
|
||||
return this.spec;
|
||||
}
|
||||
|
||||
public String getCoreName() {
|
||||
return this.spec;
|
||||
}
|
||||
|
||||
public String extractShardSetCore() {
|
||||
int pos = this.spec.indexOf('~');
|
||||
int pos = this.spec.lastIndexOf('-');
|
||||
return this.spec.substring(0, pos);
|
||||
}
|
||||
|
||||
public int extractShardId() {
|
||||
int pos = this.spec.indexOf('~');
|
||||
int pos = this.spec.lastIndexOf('-');
|
||||
return Integer.parseInt(this.spec.substring(pos+1));
|
||||
}
|
||||
|
||||
|
@@ -13,7 +13,7 @@ public class ShardInstance implements Serializable {
|
||||
private final String spec;
|
||||
|
||||
protected ShardInstance(Shard shard, SolrHost node) {
|
||||
this.spec = shard.getSpec() + "~" + node.getSpec();
|
||||
this.spec = node.getSpec() + "~" + shard.getSpec();
|
||||
}
|
||||
|
||||
public org.alfresco.repo.index.shard.ShardInstance toAlfrescoModel(org.alfresco.repo.index.shard.Shard shard) {
|
||||
@@ -33,14 +33,14 @@ public class ShardInstance implements Serializable {
|
||||
return spec;
|
||||
}
|
||||
|
||||
public Shard extractShard() {
|
||||
int pos = this.spec.indexOf('~');
|
||||
return Shard.from(this.spec.substring(0, pos));
|
||||
}
|
||||
|
||||
public SolrHost extractNode() {
|
||||
int pos = this.spec.indexOf('~');
|
||||
return SolrHost.from(this.spec.substring(pos+1));
|
||||
return SolrHost.from(this.spec.substring(0, pos));
|
||||
}
|
||||
|
||||
public Shard extractShard() {
|
||||
int pos = this.spec.indexOf('~');
|
||||
return Shard.from(this.spec.substring(pos+1));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -41,6 +41,8 @@ public class SolrHost implements Serializable {
|
||||
this.spec = spec;
|
||||
|
||||
Matcher matcher = PATTERN.matcher(spec);
|
||||
if (!matcher.find())
|
||||
throw new IllegalArgumentException();
|
||||
this.hostname = matcher.group(1);
|
||||
this.port = Integer.parseInt(matcher.group(2));
|
||||
this.path = matcher.group(3);
|
||||
|
@@ -0,0 +1,119 @@
|
||||
package com.inteligr8.alfresco.asie.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.service.cmr.repository.InvalidNodeRefException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.extensions.webscripts.WebScriptException;
|
||||
import org.springframework.extensions.webscripts.WebScriptRequest;
|
||||
import org.springframework.extensions.webscripts.WebScriptResponse;
|
||||
import org.springframework.http.HttpStatus;
|
||||
|
||||
import com.inteligr8.alfresco.asie.model.ShardInstance;
|
||||
import com.inteligr8.alfresco.asie.spi.ActionCallback;
|
||||
|
||||
public abstract class AbstractAcsNodeActionWebScript extends AbstractAsieWebScript {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
|
||||
@Autowired
|
||||
private NodeService nodeService;
|
||||
|
||||
@Override
|
||||
public void executeAuthorized(WebScriptRequest request, WebScriptResponse response) throws IOException {
|
||||
String nodeId = request.getServiceMatch().getTemplateVars().get("nodeId");
|
||||
NodeRef nodeRef = new NodeRef(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE, nodeId);
|
||||
long nodeDbId = this.findNodeDbId(nodeRef);
|
||||
this.logger.trace("Found node database ID: {}: {}", nodeId, nodeDbId);
|
||||
|
||||
try {
|
||||
Map<String, Object> responseMap = new HashMap<>();
|
||||
responseMap.put("nodeDbId", nodeDbId);
|
||||
|
||||
ActionCallback callback = new ActionCallback() {
|
||||
|
||||
@Override
|
||||
public void success(ShardInstance instance) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<String> instances = (List<String>) responseMap.get("success");
|
||||
if (instances == null)
|
||||
responseMap.put("success", instances = new LinkedList<>());
|
||||
instances.add(instance.getSpec());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void scheduled(ShardInstance instance) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<String> instances = (List<String>) responseMap.get("scheduled");
|
||||
if (instances == null)
|
||||
responseMap.put("scheduled", instances = new LinkedList<>());
|
||||
instances.add(instance.getSpec());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(ShardInstance instance, String message) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> instances = (Map<String, Object>) responseMap.get("error");
|
||||
if (instances == null)
|
||||
responseMap.put("error", instances = new HashMap<>());
|
||||
instances.put(instance.getSpec(), Collections.singletonMap("message", message));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void unknownResult(ShardInstance instance) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<String> instances = (List<String>) responseMap.get("unknown");
|
||||
if (instances == null)
|
||||
responseMap.put("unknown", instances = new LinkedList<>());
|
||||
instances.add(instance.getSpec());
|
||||
}
|
||||
};
|
||||
|
||||
this.executeAction(nodeDbId, callback, 10L, TimeUnit.SECONDS, 30L, TimeUnit.SECONDS);
|
||||
|
||||
if (responseMap.containsKey("error")) {
|
||||
response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value());
|
||||
} else if (responseMap.containsKey("scheduled")) {
|
||||
response.setStatus(HttpStatus.ACCEPTED.value());
|
||||
} else {
|
||||
response.setStatus(HttpStatus.OK.value());
|
||||
}
|
||||
|
||||
response.setContentType("application/json");
|
||||
this.getObjectMapper().writeValue(response.getWriter(), responseMap);
|
||||
} catch (UnsupportedOperationException uoe) {
|
||||
throw new WebScriptException(HttpStatus.NOT_IMPLEMENTED.value(), uoe.getMessage(), uoe);
|
||||
} catch (InterruptedException ie) {
|
||||
throw new WebScriptException(HttpStatus.SERVICE_UNAVAILABLE.value(), "The execution was interrupted", ie);
|
||||
} catch (TimeoutException te) {
|
||||
throw new WebScriptException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "The execution may continue, but timed-out waiting", te);
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void executeAction(
|
||||
long nodeDbId, ActionCallback callback,
|
||||
long fullQueueTimeout, TimeUnit fullQueueUnit,
|
||||
long execTimeout, TimeUnit execUnit) throws TimeoutException, InterruptedException;
|
||||
|
||||
private long findNodeDbId(NodeRef nodeRef) {
|
||||
try {
|
||||
return (Long) this.nodeService.getProperty(nodeRef, ContentModel.PROP_NODE_DBID);
|
||||
} catch (InvalidNodeRefException inre) {
|
||||
throw new WebScriptException(HttpStatus.NOT_FOUND.value(), "The node does not exist");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,102 @@
|
||||
package com.inteligr8.alfresco.asie.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.service.cmr.repository.InvalidNodeRefException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.extensions.webscripts.WebScriptException;
|
||||
import org.springframework.extensions.webscripts.WebScriptRequest;
|
||||
import org.springframework.extensions.webscripts.WebScriptResponse;
|
||||
import org.springframework.http.HttpStatus;
|
||||
|
||||
import com.inteligr8.alfresco.asie.model.ShardInstance;
|
||||
import com.inteligr8.alfresco.asie.spi.ActionCallback;
|
||||
|
||||
public abstract class AbstractActionWebScript extends AbstractAsieWebScript {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
|
||||
@Override
|
||||
public void executeAuthorized(WebScriptRequest request, WebScriptResponse response) throws IOException {
|
||||
try {
|
||||
Map<String, Object> responseMap = new HashMap<>();
|
||||
|
||||
ActionCallback callback = new ActionCallback() {
|
||||
|
||||
@Override
|
||||
public void success(ShardInstance instance) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<String> instances = (List<String>) responseMap.get("success");
|
||||
if (instances == null)
|
||||
responseMap.put("success", instances = new LinkedList<>());
|
||||
instances.add(instance.getSpec());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void scheduled(ShardInstance instance) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<String> instances = (List<String>) responseMap.get("scheduled");
|
||||
if (instances == null)
|
||||
responseMap.put("scheduled", instances = new LinkedList<>());
|
||||
instances.add(instance.getSpec());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(ShardInstance instance, String message) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> instances = (Map<String, Object>) responseMap.get("error");
|
||||
if (instances == null)
|
||||
responseMap.put("error", instances = new HashMap<>());
|
||||
instances.put(instance.getSpec(), Collections.singletonMap("message", message));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void unknownResult(ShardInstance instance) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<String> instances = (List<String>) responseMap.get("unknown");
|
||||
if (instances == null)
|
||||
responseMap.put("unknown", instances = new LinkedList<>());
|
||||
instances.add(instance.getSpec());
|
||||
}
|
||||
};
|
||||
|
||||
this.executeAction(callback, 10L, TimeUnit.SECONDS, 30L, TimeUnit.SECONDS);
|
||||
|
||||
if (responseMap.containsKey("error")) {
|
||||
response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value());
|
||||
} else if (responseMap.containsKey("scheduled")) {
|
||||
response.setStatus(HttpStatus.ACCEPTED.value());
|
||||
} else {
|
||||
response.setStatus(HttpStatus.OK.value());
|
||||
}
|
||||
|
||||
response.setContentType("application/json");
|
||||
this.getObjectMapper().writeValue(response.getWriter(), responseMap);
|
||||
} catch (UnsupportedOperationException uoe) {
|
||||
throw new WebScriptException(HttpStatus.NOT_IMPLEMENTED.value(), uoe.getMessage(), uoe);
|
||||
} catch (InterruptedException ie) {
|
||||
throw new WebScriptException(HttpStatus.SERVICE_UNAVAILABLE.value(), "The execution was interrupted", ie);
|
||||
} catch (TimeoutException te) {
|
||||
throw new WebScriptException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "The execution may continue, but timed-out waiting", te);
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void executeAction(
|
||||
ActionCallback callback,
|
||||
long fullQueueTimeout, TimeUnit fullQueueUnit,
|
||||
long execTimeout, TimeUnit execUnit) throws TimeoutException, InterruptedException;
|
||||
|
||||
}
|
@@ -7,8 +7,6 @@ import org.slf4j.LoggerFactory;
|
||||
import org.springframework.extensions.webscripts.WebScriptRequest;
|
||||
import org.springframework.extensions.webscripts.WebScriptResponse;
|
||||
|
||||
import com.inteligr8.alfresco.asie.model.ShardSet;
|
||||
|
||||
public abstract class AbstractAsieNodeShardWebScript extends AbstractAsieShardableWebScript {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
@@ -20,16 +18,16 @@ public abstract class AbstractAsieNodeShardWebScript extends AbstractAsieShardab
|
||||
String nodeEndpoint = this.getRequiredPathParameter(req, "nodeEndpoint");
|
||||
int colon = nodeEndpoint.lastIndexOf(':');
|
||||
String nodeHostname = colon < 0 ? nodeEndpoint : nodeEndpoint.substring(0, colon);
|
||||
int nodePort = colon < 0 ? this.getDefaultSolrPort() : Integer.parseInt(nodeEndpoint.substring(colon+1));
|
||||
int nodePort = colon < 0 ? this.getApiService().getDefaultSolrPort() : Integer.parseInt(nodeEndpoint.substring(colon+1));
|
||||
|
||||
ShardSet shardSet = this.getRequiredPathParameter(req, "shardSet", ShardSet.class);
|
||||
String shardCore = this.getRequiredPathParameter(req, "shardCore", String.class);
|
||||
int shardId = this.getRequiredPathParameter(req, "shardId", Integer.class);
|
||||
|
||||
this.execute(req, res, nodeHostname, nodePort, shardSet, shardId);
|
||||
this.execute(req, res, nodeHostname, nodePort, shardCore, shardId);
|
||||
}
|
||||
|
||||
protected abstract void execute(WebScriptRequest req, WebScriptResponse res,
|
||||
String nodeHostname, int nodePort, ShardSet shardSet, int shardId)
|
||||
String nodeHostname, int nodePort, String shardCore, int shardId)
|
||||
throws IOException;
|
||||
|
||||
}
|
||||
|
@@ -31,7 +31,7 @@ public abstract class AbstractAsieNodeWebScript extends AbstractAsieShardableWeb
|
||||
int colon = nodeEndpoint.lastIndexOf(':');
|
||||
String nodeHostname = colon < 0 ? nodeEndpoint : nodeEndpoint.substring(0, colon);
|
||||
nodeHostname = nodeHostname.replace('_', '.');
|
||||
int nodePort = colon < 0 ? this.getDefaultSolrPort() : Integer.parseInt(nodeEndpoint.substring(colon+1));
|
||||
int nodePort = colon < 0 ? this.getApiService().getDefaultSolrPort() : Integer.parseInt(nodeEndpoint.substring(colon+1));
|
||||
|
||||
this.execute(req, res, nodeHostname, nodePort);
|
||||
}
|
||||
|
@@ -122,7 +122,7 @@ public abstract class AbstractAsieShardableWebScript extends AbstractAsieWebScri
|
||||
}
|
||||
|
||||
protected CoreAdminApi getApi(ShardInstance shard) {
|
||||
return this.createApi(shard.getHostName(), shard.getPort());
|
||||
return this.getApiService().createApi(shard.getHostName(), shard.getPort(), CoreAdminApi.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,140 +1,34 @@
|
||||
package com.inteligr8.alfresco.asie.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.extensions.webscripts.WebScriptRequest;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.inteligr8.alfresco.asie.Constants;
|
||||
import com.inteligr8.alfresco.asie.api.CoreAdminApi;
|
||||
import com.inteligr8.rs.AuthorizationFilter;
|
||||
import com.inteligr8.rs.Client;
|
||||
import com.inteligr8.rs.ClientCxfConfiguration;
|
||||
import com.inteligr8.rs.ClientCxfImpl;
|
||||
import com.inteligr8.alfresco.asie.service.ApiService;
|
||||
|
||||
import jakarta.ws.rs.client.ClientRequestContext;
|
||||
|
||||
public abstract class AbstractAsieWebScript extends AbstractWebScript implements InitializingBean {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
|
||||
@Value("${solr.secureComms}")
|
||||
private String solrSecureComms;
|
||||
|
||||
@Value("${solr.port}")
|
||||
private int solrPort;
|
||||
|
||||
@Value("${solr.port.ssl}")
|
||||
private int solrSslPort;
|
||||
|
||||
@Value("${solr.sharedSecret.header}")
|
||||
private String solrSharedSecretHeader;
|
||||
|
||||
@Value("${solr.sharedSecret}")
|
||||
private String solrSharedSecret;
|
||||
|
||||
@Value("${inteligr8.asie.allowedAuthorities}")
|
||||
private String authorizedAuthoritiesStr;
|
||||
|
||||
@Value("${inteligr8.asie.basePath}")
|
||||
private String solrBaseUrl;
|
||||
public abstract class AbstractAsieWebScript extends AbstractWebScript {
|
||||
|
||||
@Autowired
|
||||
@Qualifier(Constants.QUALIFIER_ASIE)
|
||||
private ObjectMapper objectMapper;
|
||||
|
||||
private Set<String> authorizedAuthorities;
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception {
|
||||
this.authorizedAuthorities = new HashSet<>();
|
||||
String[] authorities = this.authorizedAuthoritiesStr.split(",");
|
||||
for (String authority : authorities) {
|
||||
authority = StringUtils.trimToNull(authority);
|
||||
if (authority != null)
|
||||
this.authorizedAuthorities.add(authority);
|
||||
}
|
||||
|
||||
if (this.authorizedAuthorities.isEmpty())
|
||||
this.logger.warn("All authenticated users will be authorized to access ASIE web scripts");
|
||||
|
||||
this.solrSharedSecret = StringUtils.trimToNull(this.solrSharedSecret);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<String> getAuthorities() {
|
||||
return this.authorizedAuthorities;
|
||||
}
|
||||
@Autowired
|
||||
private ApiService api;
|
||||
|
||||
protected ObjectMapper getObjectMapper() {
|
||||
return this.objectMapper;
|
||||
}
|
||||
|
||||
protected CoreAdminApi createApi(String hostname, int port) {
|
||||
String solrBaseUrl = this.formulateSolrBaseUrl(hostname, port);
|
||||
this.logger.trace("Using Solr base URL: {}", solrBaseUrl);
|
||||
Client solrClient = this.createClient(solrBaseUrl);
|
||||
return this.getApi(solrClient);
|
||||
}
|
||||
|
||||
protected CoreAdminApi getApi(Client solrClient) {
|
||||
return solrClient.getApi(CoreAdminApi.class);
|
||||
}
|
||||
|
||||
protected int getDefaultSolrPort() {
|
||||
boolean isSsl = "https".equals(this.solrSecureComms);
|
||||
return isSsl ? this.solrSslPort : this.solrPort;
|
||||
protected ApiService getApiService() {
|
||||
return this.api;
|
||||
}
|
||||
|
||||
protected String formulateSolrBaseUrl(WebScriptRequest req) {
|
||||
String hostname = this.getRequiredPathParameter(req, "hostname");
|
||||
Integer port = this.getOptionalPathParameter(req, "port", Integer.class);
|
||||
return this.formulateSolrBaseUrl(hostname, port);
|
||||
}
|
||||
|
||||
protected String formulateSolrBaseUrl(String hostname, Integer port) {
|
||||
boolean isSsl = "https".equals(this.solrSecureComms);
|
||||
StringBuilder baseUrl = new StringBuilder(isSsl ? "https" : "http").append("://").append(hostname);
|
||||
baseUrl.append(':').append(port == null ? (isSsl ? this.solrSslPort : this.solrPort) : port);
|
||||
baseUrl.append(this.solrBaseUrl);
|
||||
return baseUrl.toString();
|
||||
}
|
||||
|
||||
protected Client createClient(final String baseUrl) {
|
||||
ClientCxfImpl client = new ClientCxfImpl(new ClientCxfConfiguration() {
|
||||
@Override
|
||||
public String getBaseUrl() {
|
||||
return baseUrl.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public AuthorizationFilter createAuthorizationFilter() {
|
||||
return solrSharedSecret == null ? null : new AuthorizationFilter() {
|
||||
@Override
|
||||
public void filter(ClientRequestContext requestContext) throws IOException {
|
||||
logger.debug("Adding authorization headers for ASIE shared auth: {}", solrSharedSecretHeader);
|
||||
requestContext.getHeaders().putSingle(solrSharedSecretHeader, solrSharedSecret);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDefaultBusEnabled() {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
client.register();
|
||||
return client;
|
||||
return this.api.formulateSolrBaseUrl(hostname, port);
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -4,11 +4,19 @@ import java.io.IOException;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.service.cmr.security.AuthorityService;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.extensions.webscripts.Description.RequiredAuthentication;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.extensions.webscripts.WebScriptException;
|
||||
import org.springframework.extensions.webscripts.WebScriptRequest;
|
||||
import org.springframework.extensions.webscripts.WebScriptResponse;
|
||||
@@ -16,9 +24,38 @@ import org.springframework.http.HttpStatus;
|
||||
|
||||
import net.sf.acegisecurity.GrantedAuthority;
|
||||
|
||||
public abstract class AbstractWebScript extends org.springframework.extensions.webscripts.AbstractWebScript {
|
||||
public abstract class AbstractWebScript extends org.springframework.extensions.webscripts.AbstractWebScript implements InitializingBean {
|
||||
|
||||
protected abstract Set<String> getAuthorities();
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
|
||||
@Value("${inteligr8.asie.allowedAuthorities}")
|
||||
private String authorizedAuthoritiesStr;
|
||||
|
||||
@Autowired
|
||||
private AuthorityService authorityService;
|
||||
|
||||
private Set<String> authorizedAuthorities;
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception {
|
||||
this.authorizedAuthorities = new HashSet<>();
|
||||
String[] authorities = this.authorizedAuthoritiesStr.split(",");
|
||||
for (String authority : authorities) {
|
||||
authority = StringUtils.trimToNull(authority);
|
||||
if (authority != null)
|
||||
this.authorizedAuthorities.add(authority);
|
||||
}
|
||||
|
||||
if (this.authorizedAuthorities.isEmpty()) {
|
||||
this.logger.warn("All authenticated users will be authorized to access web scripts");
|
||||
} else {
|
||||
this.logger.debug("Allowing only authorities: {}", this.authorizedAuthorities);
|
||||
}
|
||||
}
|
||||
|
||||
protected Set<String> getAuthorities() {
|
||||
return this.authorizedAuthorities;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void execute(WebScriptRequest request, WebScriptResponse response) throws IOException {
|
||||
@@ -38,6 +75,13 @@ public abstract class AbstractWebScript extends org.springframework.extensions.w
|
||||
return true;
|
||||
}
|
||||
|
||||
Set<String> authorities = this.authorityService.getAuthoritiesForUser(AuthenticationUtil.getFullyAuthenticatedUser());
|
||||
if (authorities != null) {
|
||||
if (!Collections.disjoint(this.getAuthorities(), authorities))
|
||||
return true;
|
||||
}
|
||||
|
||||
this.logger.trace("Not authorized: user '{}'; authorities: {} + {}", AuthenticationUtil.getFullyAuthenticatedUser(), AuthenticationUtil.getFullAuthentication().getAuthorities(), authorities);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@@ -3,7 +3,6 @@ package com.inteligr8.alfresco.asie.rest;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.extensions.webscripts.AbstractWebScript;
|
||||
import org.springframework.extensions.webscripts.WebScriptRequest;
|
||||
import org.springframework.extensions.webscripts.WebScriptResponse;
|
||||
import org.springframework.http.HttpStatus;
|
||||
@@ -20,13 +19,13 @@ public class ClearRegistryWebScript extends AbstractWebScript {
|
||||
|
||||
@Autowired
|
||||
private ShardStateService sss;
|
||||
|
||||
@Override
|
||||
public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException {
|
||||
|
||||
@Override
|
||||
public void executeAuthorized(WebScriptRequest request, WebScriptResponse response) throws IOException {
|
||||
this.sss.clear();
|
||||
this.sbs.forget();
|
||||
|
||||
res.setStatus(HttpStatus.OK.value());
|
||||
response.setStatus(HttpStatus.OK.value());
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -0,0 +1,24 @@
|
||||
package com.inteligr8.alfresco.asie.rest;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.service.FixService;
|
||||
import com.inteligr8.alfresco.asie.spi.ActionCallback;
|
||||
|
||||
@Component(value = "webscript.com.inteligr8.alfresco.asie.fix.post")
|
||||
public class FixWebScript extends AbstractActionWebScript {
|
||||
|
||||
@Autowired
|
||||
private FixService fixSerivce;
|
||||
|
||||
@Override
|
||||
protected void executeAction(ActionCallback callback, long fullQueueTimeout, TimeUnit fullQueueUnit,
|
||||
long execTimeout, TimeUnit execUnit) throws TimeoutException, InterruptedException {
|
||||
this.fixSerivce.fix(callback, 10L, TimeUnit.SECONDS, 30L, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,24 @@
|
||||
package com.inteligr8.alfresco.asie.rest;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.service.PurgeService;
|
||||
import com.inteligr8.alfresco.asie.spi.ActionCallback;
|
||||
|
||||
@Component(value = "webscript.com.inteligr8.alfresco.asie.purgeAcsNode.put")
|
||||
public class PurgeAcsNodeWebScript extends AbstractAcsNodeActionWebScript {
|
||||
|
||||
@Autowired
|
||||
private PurgeService purgeSerivce;
|
||||
|
||||
@Override
|
||||
protected void executeAction(long nodeDbId, ActionCallback callback, long fullQueueTimeout, TimeUnit fullQueueUnit,
|
||||
long execTimeout, TimeUnit execUnit) throws TimeoutException, InterruptedException {
|
||||
this.purgeSerivce.purge(nodeDbId, callback, 10L, TimeUnit.SECONDS, 30L, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,128 @@
|
||||
package com.inteligr8.alfresco.asie.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.extensions.webscripts.WebScriptException;
|
||||
import org.springframework.extensions.webscripts.WebScriptRequest;
|
||||
import org.springframework.extensions.webscripts.WebScriptResponse;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.model.ShardInstance;
|
||||
import com.inteligr8.alfresco.asie.service.AcsReconcileService;
|
||||
import com.inteligr8.alfresco.asie.spi.ReconcileCallback;
|
||||
|
||||
@Component(value = "webscript.com.inteligr8.alfresco.asie.reconcileAcsNodes.post")
|
||||
public class ReconcileAcsNodesWebScript extends AbstractAsieWebScript {
|
||||
|
||||
@Autowired
|
||||
private AcsReconcileService reconcileService;
|
||||
|
||||
@Override
|
||||
public void executeAuthorized(WebScriptRequest request, WebScriptResponse response) throws IOException {
|
||||
final int fromDbId = this.getRequestTemplateIntegerVariable(request, "fromDbId");
|
||||
final int toDbId = this.getRequestTemplateIntegerVariable(request, "toDbId");
|
||||
final boolean reindex = Boolean.TRUE.equals(this.getOptionalQueryParameter(request, "reindex", Boolean.class));
|
||||
final boolean includeReconciled = Boolean.TRUE.equals(this.getOptionalQueryParameter(request, "includeReconciled", Boolean.class));
|
||||
|
||||
final Map<String, Object> responseMap = new HashMap<>();
|
||||
|
||||
ReconcileCallback callback = new ReconcileCallback() {
|
||||
|
||||
@Override
|
||||
public void reconciled(long nodeDbId) {
|
||||
if (includeReconciled) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Long> unreconciledNodeDbIds = (List<Long>) responseMap.get("reconciled");
|
||||
if (unreconciledNodeDbIds == null)
|
||||
responseMap.put("reconciled", unreconciledNodeDbIds = new LinkedList<>());
|
||||
unreconciledNodeDbIds.add(nodeDbId);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void unreconciled(long nodeDbId) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Long> unreconciledNodeDbIds = (List<Long>) responseMap.get("unreconciled");
|
||||
if (unreconciledNodeDbIds == null)
|
||||
responseMap.put("unreconciled", unreconciledNodeDbIds = new LinkedList<>());
|
||||
unreconciledNodeDbIds.add(nodeDbId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processed(long nodeDbId, Set<ShardInstance> instsReconciled, Set<ShardInstance> instsReconciling,
|
||||
Map<ShardInstance, String> instsErrorMessages) {
|
||||
if (!instsReconciled.isEmpty()) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<Long, List<String>> nodeHosts = (Map<Long, List<String>>) responseMap.get("success");
|
||||
if (nodeHosts == null)
|
||||
responseMap.put("success", nodeHosts = new HashMap<>());
|
||||
|
||||
List<String> instances = new LinkedList<>();
|
||||
for (ShardInstance instance : instsReconciled)
|
||||
instances.add(instance.getSpec());
|
||||
nodeHosts.put(nodeDbId, instances);
|
||||
}
|
||||
|
||||
if (!instsReconciling.isEmpty()) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<Long, List<String>> nodeHosts = (Map<Long, List<String>>) responseMap.get("scheduled");
|
||||
if (nodeHosts == null)
|
||||
responseMap.put("scheduled", nodeHosts = new HashMap<>());
|
||||
|
||||
List<String> instances = new LinkedList<>();
|
||||
for (ShardInstance instance : instsReconciled)
|
||||
instances.add(instance.getSpec());
|
||||
nodeHosts.put(nodeDbId, instances);
|
||||
}
|
||||
|
||||
if (!instsErrorMessages.isEmpty()) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<Long, Map<String, Map<String, String>>> nodeHosts = (Map<Long, Map<String, Map<String, String>>>) responseMap.get("error");
|
||||
if (nodeHosts == null)
|
||||
responseMap.put("error", nodeHosts = new HashMap<>());
|
||||
|
||||
Map<String, Map<String, String>> nodeHost = new HashMap<>();
|
||||
for (Entry<ShardInstance, String> message : instsErrorMessages.entrySet())
|
||||
nodeHost.put(message.getKey().getSpec(), Collections.singletonMap("message", message.getValue()));
|
||||
nodeHosts.put(nodeDbId, nodeHost);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
this.reconcileService.reconcile(fromDbId, toDbId, null, reindex, callback, 1L, TimeUnit.HOURS, 2L, TimeUnit.MINUTES);
|
||||
|
||||
if (responseMap.containsKey("error")) {
|
||||
response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value());
|
||||
} else if (responseMap.containsKey("scheduled")) {
|
||||
response.setStatus(HttpStatus.ACCEPTED.value());
|
||||
} else {
|
||||
response.setStatus(HttpStatus.OK.value());
|
||||
}
|
||||
|
||||
response.setContentType("application/json");
|
||||
this.getObjectMapper().writeValue(response.getWriter(), responseMap);
|
||||
} catch (InterruptedException ie) {
|
||||
throw new WebScriptException(HttpStatus.SERVICE_UNAVAILABLE.value(), "The reindex was interrupted", ie);
|
||||
} catch (TimeoutException te) {
|
||||
throw new WebScriptException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "The reindex may continue, but timed-out waiting", te);
|
||||
}
|
||||
}
|
||||
|
||||
private int getRequestTemplateIntegerVariable(WebScriptRequest request, String templateVariableName) {
|
||||
String str = request.getServiceMatch().getTemplateVars().get(templateVariableName);
|
||||
return Integer.valueOf(str);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,24 @@
|
||||
package com.inteligr8.alfresco.asie.rest;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.service.ReindexService;
|
||||
import com.inteligr8.alfresco.asie.spi.ActionCallback;
|
||||
|
||||
@Component(value = "webscript.com.inteligr8.alfresco.asie.reindexAcsNode.put")
|
||||
public class ReindexAcsNodeWebScript extends AbstractAcsNodeActionWebScript {
|
||||
|
||||
@Autowired
|
||||
private ReindexService reindexSerivce;
|
||||
|
||||
@Override
|
||||
protected void executeAction(long nodeDbId, ActionCallback callback, long fullQueueTimeout, TimeUnit fullQueueUnit,
|
||||
long execTimeout, TimeUnit execUnit) throws TimeoutException, InterruptedException {
|
||||
this.reindexSerivce.reindex(nodeDbId, callback, 10L, TimeUnit.SECONDS, 30L, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,24 @@
|
||||
package com.inteligr8.alfresco.asie.rest;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.service.RetryService;
|
||||
import com.inteligr8.alfresco.asie.spi.ActionCallback;
|
||||
|
||||
@Component(value = "webscript.com.inteligr8.alfresco.asie.retry.post")
|
||||
public class RetryWebScript extends AbstractActionWebScript {
|
||||
|
||||
@Autowired
|
||||
private RetryService retrySerivce;
|
||||
|
||||
@Override
|
||||
protected void executeAction(ActionCallback callback, long fullQueueTimeout, TimeUnit fullQueueUnit,
|
||||
long execTimeout, TimeUnit execUnit) throws TimeoutException, InterruptedException {
|
||||
this.retrySerivce.retry(callback, 10L, TimeUnit.SECONDS, 30L, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,236 @@
|
||||
package com.inteligr8.alfresco.asie.service;
|
||||
|
||||
import java.net.URL;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.index.shard.Floc;
|
||||
import org.alfresco.repo.index.shard.Shard;
|
||||
import org.alfresco.repo.index.shard.ShardInstance;
|
||||
import org.alfresco.repo.index.shard.ShardRegistry;
|
||||
import org.alfresco.repo.index.shard.ShardState;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.search.SearchParameters;
|
||||
import org.alfresco.service.cmr.search.SearchService;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import com.inteligr8.alfresco.asie.Constants;
|
||||
import com.inteligr8.alfresco.asie.api.CoreAdminApi;
|
||||
import com.inteligr8.alfresco.asie.model.ActionCoreResponse;
|
||||
import com.inteligr8.alfresco.asie.model.ShardSet;
|
||||
import com.inteligr8.alfresco.asie.model.SolrHost;
|
||||
import com.inteligr8.alfresco.asie.spi.ActionCallback;
|
||||
import com.inteligr8.alfresco.asie.util.CompositeFuture;
|
||||
import com.inteligr8.alfresco.asie.util.ThrottledThreadPoolExecutor;
|
||||
import com.inteligr8.solr.model.Action;
|
||||
import com.inteligr8.solr.model.ActionResponse;
|
||||
import com.inteligr8.solr.model.BaseResponse;
|
||||
|
||||
public abstract class AbstractActionService {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
|
||||
@Autowired
|
||||
private NamespaceService namespaceService;
|
||||
|
||||
@Autowired
|
||||
private ApiService apiService;
|
||||
|
||||
@Autowired
|
||||
private ExecutorManager executorManager;
|
||||
|
||||
@Autowired(required = false)
|
||||
@Qualifier(Constants.QUALIFIER_ASIE)
|
||||
private ShardRegistry shardRegistry;
|
||||
|
||||
@Value("${inteligr8.asie.default.concurrentQueueSize:64}")
|
||||
private int concurrentQueueSize;
|
||||
|
||||
@Value("${inteligr8.asie.default.concurrency:16}")
|
||||
private int concurrency;
|
||||
|
||||
protected int getConcurrency() {
|
||||
return this.concurrency;
|
||||
}
|
||||
|
||||
protected int getConcurrentQueueSize() {
|
||||
return this.concurrentQueueSize;
|
||||
}
|
||||
|
||||
protected abstract String getThreadNamePrefix();
|
||||
|
||||
protected abstract String getActionName();
|
||||
|
||||
/**
|
||||
* This method executes an action on the specified node in Solr using its
|
||||
* ACS unique database identifier. The callback handles all the return
|
||||
* values. This is the synchronous alternative to the other `action`
|
||||
* method.
|
||||
*
|
||||
* There are two sets of parameters regarding timeouts. The queue timeouts
|
||||
* are for how long the requesting thread should wait for a full queue to
|
||||
* open up space for new executions. The execution timeouts are for how
|
||||
* long the execution should be allowed to take once dequeued. There is no
|
||||
* timeout for how long the execution is queued.
|
||||
*
|
||||
* @param callback A callback to process multiple returned values from the action.
|
||||
* @param fullQueueTimeout A timeout for how long the calling thread should wait for space on the queue.
|
||||
* @param fullQueueUnit The time units for the `queueTimeout`.
|
||||
* @param execTimeout A timeout for the elapsed time the execution should take when dequeued.
|
||||
* @param execUnit The time units for the `execTimeout`.
|
||||
* @throws TimeoutException Either the queue or execution timeout lapsed.
|
||||
* @throws InterruptedException The execution was interrupted (server shutdown).
|
||||
*/
|
||||
protected void action(ActionCallback callback, long fullQueueTimeout, TimeUnit fullQueueUnit, long execTimeout, TimeUnit execUnit) throws TimeoutException, InterruptedException {
|
||||
long fullQueueExpireTimeMillis = System.currentTimeMillis() + fullQueueUnit.toMillis(fullQueueTimeout);
|
||||
Future<Void> future = this._action(callback, fullQueueExpireTimeMillis);
|
||||
try {
|
||||
future.get(execTimeout, execUnit);
|
||||
} catch (ExecutionException ee) {
|
||||
this.logger.error("Reindex thread failed: " + ee.getMessage(), ee);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method executes an action on the specified node in Solr using its
|
||||
* ACS unique database identifier. The callback handles all the return
|
||||
* values. This is the asynchronous alternative to the other `action`
|
||||
* method.
|
||||
*
|
||||
* This method may block indefinitely when the queue is full. Once all
|
||||
* executions are queued, this will return a single future representing all
|
||||
* the executions.
|
||||
*
|
||||
* @param callback A callback to process multiple returned values from the execution.
|
||||
* @return A reference to the future or active executing task.
|
||||
* @throws InterruptedException The execution was interrupted (server shutdown).
|
||||
*/
|
||||
protected Future<Void> action(ActionCallback callback) throws InterruptedException {
|
||||
try {
|
||||
return this._action(callback, null);
|
||||
} catch (TimeoutException te) {
|
||||
throw new RuntimeException("This should never happen: " + te.getMessage(), te);
|
||||
}
|
||||
}
|
||||
|
||||
private Future<Void> _action(ActionCallback callback, Long fullQueueExpireTimeMillis) throws TimeoutException, InterruptedException {
|
||||
List<com.inteligr8.alfresco.asie.model.ShardInstance> eligibleInstances = this.findPossibleShardInstances();
|
||||
this.logger.debug("Will attempt to {} {} shard instances", this.getActionName(), eligibleInstances.size());
|
||||
|
||||
CompositeFuture<Void> future = new CompositeFuture<>();
|
||||
|
||||
ThrottledThreadPoolExecutor executor = this.executorManager.createThrottled(
|
||||
this.getThreadNamePrefix(),
|
||||
this.getConcurrency(), this.getConcurrency(), this.getConcurrentQueueSize(),
|
||||
1L, TimeUnit.MINUTES);
|
||||
|
||||
for (final com.inteligr8.alfresco.asie.model.ShardInstance instance : eligibleInstances) {
|
||||
this.logger.trace("Will attempt to {} shard instance: {}", this.getActionName(), instance);
|
||||
|
||||
Callable<Void> callable = new Callable<>() {
|
||||
@Override
|
||||
public Void call() {
|
||||
String core = instance.extractShard().getCoreName();
|
||||
SolrHost host = instance.extractNode();
|
||||
URL url = host.toUrl(apiService.isSecure() ? "https" : "http");
|
||||
CoreAdminApi api = apiService.createApi(url.toString(), CoreAdminApi.class);
|
||||
|
||||
try {
|
||||
logger.debug("Performing {} of shard instance: {}", getActionName(), instance);
|
||||
BaseResponse apiResponse = execute(api, core);
|
||||
logger.trace("Performed {} of shard instance: {}", getActionName(), instance);
|
||||
|
||||
Action action = null;
|
||||
if (apiResponse instanceof ActionCoreResponse<?>) {
|
||||
action = ((ActionCoreResponse<Action>) apiResponse).getCores().getByCore(core);
|
||||
} else if (apiResponse instanceof ActionResponse<?>) {
|
||||
action = ((ActionResponse<Action>) apiResponse).getAction();
|
||||
}
|
||||
|
||||
if (action == null) {
|
||||
callback.unknownResult(instance);
|
||||
} else {
|
||||
switch (action.getStatus()) {
|
||||
case Scheduled:
|
||||
callback.scheduled(instance);
|
||||
break;
|
||||
case Success:
|
||||
callback.success(instance);
|
||||
break;
|
||||
default:
|
||||
if (apiResponse instanceof com.inteligr8.alfresco.asie.model.BaseResponse) {
|
||||
com.inteligr8.alfresco.asie.model.BaseResponse asieResponse = (com.inteligr8.alfresco.asie.model.BaseResponse) apiResponse;
|
||||
logger.debug("Performance of {} of shard instance failed: {}: {}", getActionName(), instance, asieResponse.getException());
|
||||
callback.error(instance, asieResponse.getException());
|
||||
} else {
|
||||
logger.debug("Performance of {} of shard instance failed: {}: {}", getActionName(), instance, apiResponse.getResponseHeader().getStatus());
|
||||
callback.error(instance, String.valueOf(apiResponse.getResponseHeader().getStatus()));
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("An exception occurred", e);
|
||||
callback.error(instance, e.getMessage());
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
if (fullQueueExpireTimeMillis == null) {
|
||||
future.combine(executor.submit(callable, -1L, null));
|
||||
} else {
|
||||
future.combine(executor.submit(callable, fullQueueExpireTimeMillis - System.currentTimeMillis(), TimeUnit.MILLISECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
return future;
|
||||
}
|
||||
|
||||
protected abstract BaseResponse execute(CoreAdminApi api, String core);
|
||||
|
||||
private List<com.inteligr8.alfresco.asie.model.ShardInstance> findPossibleShardInstances() {
|
||||
if (this.shardRegistry == null)
|
||||
throw new UnsupportedOperationException("ACS instances without a sharding configuration are not yet implemented");
|
||||
|
||||
List<com.inteligr8.alfresco.asie.model.ShardInstance> instances = new LinkedList<>();
|
||||
|
||||
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : this.shardRegistry.getFlocs().entrySet()) {
|
||||
if (!floc.getKey().getStoreRefs().contains(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE))
|
||||
continue;
|
||||
for (Entry<Shard, Set<ShardState>> shard : floc.getValue().entrySet()) {
|
||||
for (ShardState shardState : shard.getValue())
|
||||
instances.add(this.toModel(shardState.getShardInstance(), shardState));
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.trace("Despite sharding, considering all shards and nodes: {}", instances);
|
||||
|
||||
return instances;
|
||||
}
|
||||
|
||||
private com.inteligr8.alfresco.asie.model.ShardInstance toModel(ShardInstance instance, ShardState anyShardState) {
|
||||
Floc floc = instance.getShard().getFloc();
|
||||
|
||||
ShardSet shardSet = ShardSet.from(floc, anyShardState);
|
||||
SolrHost host = SolrHost.from(instance);
|
||||
com.inteligr8.alfresco.asie.model.Shard shard = com.inteligr8.alfresco.asie.model.Shard.from(shardSet, instance.getShard().getInstance());
|
||||
return com.inteligr8.alfresco.asie.model.ShardInstance.from(shard, host);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,268 @@
|
||||
package com.inteligr8.alfresco.asie.service;
|
||||
|
||||
import java.net.URL;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.index.shard.Floc;
|
||||
import org.alfresco.repo.index.shard.Shard;
|
||||
import org.alfresco.repo.index.shard.ShardInstance;
|
||||
import org.alfresco.repo.index.shard.ShardRegistry;
|
||||
import org.alfresco.repo.index.shard.ShardState;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.search.SearchParameters;
|
||||
import org.alfresco.service.cmr.search.SearchService;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import com.inteligr8.alfresco.asie.Constants;
|
||||
import com.inteligr8.alfresco.asie.api.CoreAdminApi;
|
||||
import com.inteligr8.alfresco.asie.model.ActionCoreResponse;
|
||||
import com.inteligr8.alfresco.asie.model.ShardSet;
|
||||
import com.inteligr8.alfresco.asie.model.SolrHost;
|
||||
import com.inteligr8.alfresco.asie.spi.ActionCallback;
|
||||
import com.inteligr8.alfresco.asie.util.CompositeFuture;
|
||||
import com.inteligr8.alfresco.asie.util.ThrottledThreadPoolExecutor;
|
||||
import com.inteligr8.solr.model.Action;
|
||||
import com.inteligr8.solr.model.ActionResponse;
|
||||
import com.inteligr8.solr.model.BaseResponse;
|
||||
|
||||
public abstract class AbstractNodeActionService {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
|
||||
@Autowired
|
||||
private NamespaceService namespaceService;
|
||||
|
||||
@Autowired
|
||||
private ApiService apiService;
|
||||
|
||||
@Autowired
|
||||
private ExecutorManager executorManager;
|
||||
|
||||
@Autowired(required = false)
|
||||
@Qualifier(Constants.QUALIFIER_ASIE)
|
||||
private ShardRegistry shardRegistry;
|
||||
|
||||
@Value("${inteligr8.asie.default.concurrentQueueSize:64}")
|
||||
private int concurrentQueueSize;
|
||||
|
||||
@Value("${inteligr8.asie.default.concurrency:16}")
|
||||
private int concurrency;
|
||||
|
||||
protected int getConcurrency() {
|
||||
return this.concurrency;
|
||||
}
|
||||
|
||||
protected int getConcurrentQueueSize() {
|
||||
return this.concurrentQueueSize;
|
||||
}
|
||||
|
||||
protected abstract String getThreadNamePrefix();
|
||||
|
||||
protected abstract String getActionName();
|
||||
|
||||
/**
|
||||
* This method executes an action on the specified node in Solr using its
|
||||
* ACS unique database identifier. The callback handles all the return
|
||||
* values. This is the synchronous alternative to the other `action`
|
||||
* method.
|
||||
*
|
||||
* There are two sets of parameters regarding timeouts. The queue timeouts
|
||||
* are for how long the requesting thread should wait for a full queue to
|
||||
* open up space for new executions. The execution timeouts are for how
|
||||
* long the execution should be allowed to take once dequeued. There is no
|
||||
* timeout for how long the execution is queued.
|
||||
*
|
||||
* @param nodeDbId A node database ID.
|
||||
* @param callback A callback to process multiple returned values from the action.
|
||||
* @param fullQueueTimeout A timeout for how long the calling thread should wait for space on the queue.
|
||||
* @param fullQueueUnit The time units for the `queueTimeout`.
|
||||
* @param execTimeout A timeout for the elapsed time the execution should take when dequeued.
|
||||
* @param execUnit The time units for the `execTimeout`.
|
||||
* @throws TimeoutException Either the queue or execution timeout lapsed.
|
||||
* @throws InterruptedException The execution was interrupted (server shutdown).
|
||||
*/
|
||||
protected void action(long nodeDbId, ActionCallback callback, long fullQueueTimeout, TimeUnit fullQueueUnit, long execTimeout, TimeUnit execUnit) throws TimeoutException, InterruptedException {
|
||||
long fullQueueExpireTimeMillis = System.currentTimeMillis() + fullQueueUnit.toMillis(fullQueueTimeout);
|
||||
Future<Void> future = this._action(nodeDbId, callback, fullQueueExpireTimeMillis);
|
||||
try {
|
||||
future.get(execTimeout, execUnit);
|
||||
} catch (ExecutionException ee) {
|
||||
this.logger.error("Reindex thread failed: " + ee.getMessage(), ee);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method executes an action on the specified node in Solr using its
|
||||
* ACS unique database identifier. The callback handles all the return
|
||||
* values. This is the asynchronous alternative to the other `action`
|
||||
* method.
|
||||
*
|
||||
* This method may block indefinitely when the queue is full. Once all
|
||||
* executions are queued, this will return a single future representing all
|
||||
* the executions.
|
||||
*
|
||||
* @param nodeDbId A node database ID.
|
||||
* @param callback A callback to process multiple returned values from the execution.
|
||||
* @return A reference to the future or active executing task.
|
||||
* @throws InterruptedException The execution was interrupted (server shutdown).
|
||||
*/
|
||||
protected Future<Void> action(long nodeDbId, ActionCallback callback) throws InterruptedException {
|
||||
try {
|
||||
return this._action(nodeDbId, callback, null);
|
||||
} catch (TimeoutException te) {
|
||||
throw new RuntimeException("This should never happen: " + te.getMessage(), te);
|
||||
}
|
||||
}
|
||||
|
||||
private Future<Void> _action(long nodeDbId, ActionCallback callback, Long fullQueueExpireTimeMillis) throws TimeoutException, InterruptedException {
|
||||
List<com.inteligr8.alfresco.asie.model.ShardInstance> eligibleInstances = this.findPossibleShardInstances(nodeDbId);
|
||||
this.logger.debug("Will attempt to {} ACS node against {} shard instances: {}", this.getActionName(), eligibleInstances.size(), nodeDbId);
|
||||
|
||||
CompositeFuture<Void> future = new CompositeFuture<>();
|
||||
|
||||
ThrottledThreadPoolExecutor executor = this.executorManager.createThrottled(
|
||||
this.getThreadNamePrefix(),
|
||||
this.getConcurrency(), this.getConcurrency(), this.getConcurrentQueueSize(),
|
||||
1L, TimeUnit.MINUTES);
|
||||
|
||||
for (final com.inteligr8.alfresco.asie.model.ShardInstance instance : eligibleInstances) {
|
||||
this.logger.trace("Will attempt to {} ACS node against shard instance: {}: {}", this.getActionName(), nodeDbId, instance);
|
||||
|
||||
Callable<Void> callable = new Callable<>() {
|
||||
@Override
|
||||
public Void call() {
|
||||
String core = instance.extractShard().getCoreName();
|
||||
SolrHost host = instance.extractNode();
|
||||
URL url = host.toUrl(apiService.isSecure() ? "https" : "http");
|
||||
CoreAdminApi api = apiService.createApi(url.toString(), CoreAdminApi.class);
|
||||
|
||||
try {
|
||||
logger.debug("Performing {} of ACS node against shard instance: {}: {}", getActionName(), nodeDbId, instance);
|
||||
BaseResponse apiResponse = execute(api, core, nodeDbId);
|
||||
logger.trace("Performed {} of ACS node against shard instance: {}: {}", getActionName(), nodeDbId, instance);
|
||||
|
||||
Action action = null;
|
||||
if (apiResponse instanceof ActionCoreResponse<?>) {
|
||||
action = ((ActionCoreResponse<Action>) apiResponse).getCores().getByCore(core);
|
||||
} else if (apiResponse instanceof ActionResponse<?>) {
|
||||
action = ((ActionResponse<Action>) apiResponse).getAction();
|
||||
}
|
||||
|
||||
if (action == null) {
|
||||
callback.unknownResult(instance);
|
||||
} else {
|
||||
switch (action.getStatus()) {
|
||||
case Scheduled:
|
||||
callback.scheduled(instance);
|
||||
break;
|
||||
case Success:
|
||||
callback.success(instance);
|
||||
break;
|
||||
default:
|
||||
if (apiResponse instanceof com.inteligr8.alfresco.asie.model.BaseResponse) {
|
||||
com.inteligr8.alfresco.asie.model.BaseResponse asieResponse = (com.inteligr8.alfresco.asie.model.BaseResponse) apiResponse;
|
||||
logger.debug("Performance of {} of ACS node against shard instance failed: {}: {}: {}", getActionName(), nodeDbId, instance, asieResponse.getException());
|
||||
callback.error(instance, asieResponse.getException());
|
||||
} else {
|
||||
logger.debug("Performance of {} of ACS node against shard instance failed: {}: {}: {}", getActionName(), nodeDbId, instance, apiResponse.getResponseHeader().getStatus());
|
||||
callback.error(instance, String.valueOf(apiResponse.getResponseHeader().getStatus()));
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("An exception occurred", e);
|
||||
callback.error(instance, e.getMessage());
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
if (fullQueueExpireTimeMillis == null) {
|
||||
future.combine(executor.submit(callable, -1L, null));
|
||||
} else {
|
||||
future.combine(executor.submit(callable, fullQueueExpireTimeMillis - System.currentTimeMillis(), TimeUnit.MILLISECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
return future;
|
||||
}
|
||||
|
||||
protected abstract BaseResponse execute(CoreAdminApi api, String core, long nodeDbId);
|
||||
|
||||
private List<com.inteligr8.alfresco.asie.model.ShardInstance> findPossibleShardInstances(long nodeDbId) {
|
||||
if (this.shardRegistry == null)
|
||||
throw new UnsupportedOperationException("ACS instances without a sharding configuration are not yet implemented");
|
||||
|
||||
SearchParameters searchParams = new SearchParameters();
|
||||
searchParams.setLanguage(SearchService.LANGUAGE_FTS_ALFRESCO);
|
||||
searchParams.setQuery("@" + this.formatForFts(ContentModel.PROP_NODE_DBID) + ":" + nodeDbId);
|
||||
|
||||
List<com.inteligr8.alfresco.asie.model.ShardInstance> instances = new LinkedList<>();
|
||||
|
||||
List<ShardInstance> slicedInstances = this.shardRegistry.getIndexSlice(searchParams);
|
||||
if (slicedInstances != null) {
|
||||
this.logger.trace("Due to a sharding method, considering only applicable shards and their ASIE nodes: {}: {}", nodeDbId, slicedInstances);
|
||||
|
||||
for (ShardInstance instance : slicedInstances)
|
||||
instances.add(this.toModel(instance));
|
||||
} else {
|
||||
for (Entry<Floc, Map<Shard, Set<ShardState>>> floc : this.shardRegistry.getFlocs().entrySet()) {
|
||||
if (!floc.getKey().getStoreRefs().contains(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE))
|
||||
continue;
|
||||
for (Entry<Shard, Set<ShardState>> shard : floc.getValue().entrySet()) {
|
||||
for (ShardState shardState : shard.getValue())
|
||||
instances.add(this.toModel(shardState.getShardInstance(), shardState));
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.trace("Despite sharding, considering all shards and nodes: {}: {}", nodeDbId, instances);
|
||||
}
|
||||
|
||||
return instances;
|
||||
}
|
||||
|
||||
private com.inteligr8.alfresco.asie.model.ShardInstance toModel(ShardInstance instance) {
|
||||
// get any random shardState
|
||||
Floc floc = instance.getShard().getFloc();
|
||||
Map<Shard, Set<ShardState>> shardsStates = this.shardRegistry.getFlocs().get(floc);
|
||||
if (shardsStates == null)
|
||||
throw new IllegalStateException();
|
||||
Set<ShardState> shardStates = shardsStates.get(instance.getShard());
|
||||
if (shardStates == null || shardStates.isEmpty())
|
||||
throw new IllegalStateException();
|
||||
ShardState anyShardState = shardStates.iterator().next();
|
||||
|
||||
return this.toModel(instance, anyShardState);
|
||||
}
|
||||
|
||||
private com.inteligr8.alfresco.asie.model.ShardInstance toModel(ShardInstance instance, ShardState anyShardState) {
|
||||
Floc floc = instance.getShard().getFloc();
|
||||
|
||||
ShardSet shardSet = ShardSet.from(floc, anyShardState);
|
||||
SolrHost host = SolrHost.from(instance);
|
||||
com.inteligr8.alfresco.asie.model.Shard shard = com.inteligr8.alfresco.asie.model.Shard.from(shardSet, instance.getShard().getInstance());
|
||||
return com.inteligr8.alfresco.asie.model.ShardInstance.from(shard, host);
|
||||
}
|
||||
|
||||
private String formatForFts(QName qname) {
|
||||
return qname.toPrefixString(this.namespaceService).replace("-", "\\-").replace(":", "\\:");
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,309 @@
|
||||
package com.inteligr8.alfresco.asie.service;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.model.RenditionModel;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeRef.Status;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.search.QueryConsistency;
|
||||
import org.alfresco.service.cmr.search.ResultSet;
|
||||
import org.alfresco.service.cmr.search.SearchParameters;
|
||||
import org.alfresco.service.cmr.search.SearchService;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.apache.commons.collections4.SetUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.model.ShardInstance;
|
||||
import com.inteligr8.alfresco.asie.spi.ReconcileCallback;
|
||||
import com.inteligr8.alfresco.asie.spi.ReindexCallback;
|
||||
import com.inteligr8.alfresco.asie.util.CompositeFuture;
|
||||
import com.inteligr8.alfresco.asie.util.ThrottledThreadPoolExecutor;
|
||||
|
||||
@Component
|
||||
public class AcsReconcileService implements InitializingBean, DisposableBean {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
private final Logger reconcileLogger = LoggerFactory.getLogger("inteligr8.asie.reconcile");
|
||||
private final Set<QName> ignoreNodesWithAspects = SetUtils.unmodifiableSet(
|
||||
RenditionModel.ASPECT_RENDITION,
|
||||
RenditionModel.ASPECT_RENDITION2);
|
||||
|
||||
@Autowired
|
||||
private NodeService nodeService;
|
||||
|
||||
@Autowired
|
||||
private NamespaceService namespaceService;
|
||||
|
||||
@Autowired
|
||||
private SearchService searchService;
|
||||
|
||||
@Autowired
|
||||
private ReindexService reindexService;
|
||||
|
||||
@Value("${inteligr8.asie.reconciliation.nodesChunkSize:250}")
|
||||
private int nodesChunkSize;
|
||||
|
||||
@Value("${inteligr8.asie.reconciliation.nodeTimeoutSeconds:10}")
|
||||
private int nodeTimeoutSeconds;
|
||||
|
||||
@Value("${inteligr8.asie.reconciliation.concurrentQueueSize:64}")
|
||||
private int concurrentQueueSize;
|
||||
|
||||
@Value("${inteligr8.asie.reconciliation.concurrency:2}")
|
||||
private int concurrency;
|
||||
|
||||
private ThrottledThreadPoolExecutor executor;
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() {
|
||||
this.executor = new ThrottledThreadPoolExecutor(this.concurrency, this.concurrency, this.concurrentQueueSize, 1L, TimeUnit.MINUTES, "solr-reconcile");
|
||||
this.executor.prestartAllCoreThreads();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void destroy() {
|
||||
this.executor.shutdown();
|
||||
}
|
||||
|
||||
/**
|
||||
* This method reconciles the specified node range between ACS and Solr.
|
||||
* The node range is specified using the ACS unique database identifiers.
|
||||
* There is no other reasonably efficient attack vector. The callback
|
||||
* handles all the return values. This is the synchronous alternative to
|
||||
* the other `reconcile` method.
|
||||
*
|
||||
* There are two sets of parameters regarding timeouts. The queue timeouts
|
||||
* are for how long the requesting thread should wait for a full queue to
|
||||
* open up space for new re-index executions. The execution timeouts are
|
||||
* for how long the execution should be allowed to take once dequeued.
|
||||
* There is no timeout for how long the execution is queued.
|
||||
*
|
||||
* @param fromDbId A node database ID, inclusive.
|
||||
* @param toDbId A node database ID, exclusive.
|
||||
* @param reindexUnreconciled For nodes not found in Solr, attempt to re-index against all applicable Solr instances.
|
||||
* @param callback A callback to process multiple returned values from the re-index.
|
||||
* @param queueTimeout A timeout for how long the calling thread should wait for space on the queue.
|
||||
* @param queueUnit The time units for the `queueTimeout`.
|
||||
* @param execTimeout A timeout for the elapsed time the reindex execution should take when dequeued.
|
||||
* @param execUnit The time units for the `execTimeout`.
|
||||
* @throws TimeoutException Either the queue or execution timeout lapsed.
|
||||
* @throws InterruptedException The re-index was interrupted (server shutdown).
|
||||
*/
|
||||
public void reconcile(
|
||||
long fromDbId, long toDbId, Integer nodesChunkSize,
|
||||
boolean reindexUnreconciled,
|
||||
ReconcileCallback callback,
|
||||
long queueTimeout, TimeUnit queueUnit,
|
||||
long execTimeout, TimeUnit execUnit) throws InterruptedException, TimeoutException {
|
||||
if (nodesChunkSize == null)
|
||||
nodesChunkSize = this.nodesChunkSize;
|
||||
if (this.logger.isTraceEnabled())
|
||||
this.logger.trace("reconcile({}, {}, {}, {}, {}, {})", fromDbId, toDbId, nodesChunkSize, reindexUnreconciled, queueUnit.toMillis(queueTimeout), execUnit.toMillis(execTimeout));
|
||||
|
||||
CompositeFuture<Void> future = new CompositeFuture<>();
|
||||
|
||||
for (long startDbId = fromDbId; startDbId < toDbId; startDbId += nodesChunkSize) {
|
||||
long endDbId = Math.min(toDbId, startDbId + nodesChunkSize);
|
||||
future.combine(this.reconcileChunk(startDbId, endDbId, reindexUnreconciled, callback, queueTimeout, queueUnit, execTimeout, execUnit));
|
||||
future.purge(true);
|
||||
}
|
||||
|
||||
try {
|
||||
future.get(execTimeout, execUnit);
|
||||
} catch (ExecutionException ee) {
|
||||
this.logger.error("Reconciliation thread failed: " + ee.getMessage(), ee);
|
||||
}
|
||||
}
|
||||
|
||||
public Future<Void> reconcile(
|
||||
long fromDbId, long toDbId, Integer nodesChunkSize,
|
||||
boolean reindexUnreconciled,
|
||||
ReconcileCallback callback) throws InterruptedException {
|
||||
if (nodesChunkSize == null)
|
||||
nodesChunkSize = this.nodesChunkSize;
|
||||
this.logger.trace("reconcile({}, {}, {}, {})", fromDbId, toDbId, nodesChunkSize, reindexUnreconciled);
|
||||
|
||||
CompositeFuture<Void> future = new CompositeFuture<>();
|
||||
|
||||
try {
|
||||
for (long startDbId = fromDbId; startDbId < toDbId; startDbId += nodesChunkSize) {
|
||||
long endDbId = Math.min(toDbId, startDbId + nodesChunkSize);
|
||||
future.combine(this.reconcileChunk(startDbId, endDbId, reindexUnreconciled, callback, -1L, null, -1L, null));
|
||||
future.purge(true);
|
||||
}
|
||||
} catch (TimeoutException te) {
|
||||
throw new RuntimeException("This should never happen: " + te.getMessage(), te);
|
||||
}
|
||||
|
||||
return future;
|
||||
}
|
||||
|
||||
protected Future<Void> reconcileChunk(
|
||||
long fromDbId, long toDbId,
|
||||
boolean reindexUnreconciled,
|
||||
ReconcileCallback callback,
|
||||
long queueTimeout, TimeUnit queueUnit,
|
||||
long execTimeout, TimeUnit execUnit) throws InterruptedException, TimeoutException {
|
||||
if (this.logger.isTraceEnabled())
|
||||
this.logger.trace("reconcileChunk({}, {}, {}, {}, {})", fromDbId, toDbId, reindexUnreconciled, queueUnit.toMillis(queueTimeout), execUnit.toMillis(execTimeout));
|
||||
|
||||
int dbIdCount = (int) (toDbId - fromDbId);
|
||||
|
||||
SearchParameters searchParams = new SearchParameters();
|
||||
searchParams.addStore(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
|
||||
searchParams.setQueryConsistency(QueryConsistency.EVENTUAL); // force Solr
|
||||
searchParams.setLanguage(SearchService.LANGUAGE_FTS_ALFRESCO);
|
||||
searchParams.setQuery("@" + this.formatForFts(ContentModel.PROP_NODE_DBID) + ":[" + fromDbId + " TO " + toDbId + ">");
|
||||
searchParams.setMaxItems(dbIdCount);
|
||||
searchParams.setBulkFetchEnabled(false);
|
||||
searchParams.setIncludeMetadata(false);
|
||||
|
||||
// `null`: unknown; or does not exist in DB
|
||||
// `true`: exists in DB and Solr
|
||||
// `false`: exists in DB, but not Solr
|
||||
NodeRef[] nodeRefs = new NodeRef[dbIdCount];
|
||||
|
||||
this.logger.trace("Querying for nodes in chunk: {}", searchParams.getQuery());
|
||||
ResultSet nodes = this.searchService.query(searchParams);
|
||||
this.logger.debug("Found {} of {} possible nodes in chunk: {}-{}", nodes.getNumberFound(), dbIdCount, fromDbId, toDbId);
|
||||
for (NodeRef nodeRef : nodes.getNodeRefs()) {
|
||||
Status nodeStatus = this.nodeService.getNodeStatus(nodeRef);
|
||||
long nodeDbId = nodeStatus.getDbId();
|
||||
if (nodeDbId < fromDbId || nodeDbId >= toDbId) {
|
||||
this.logger.warn("An unexpected DB ID was included in the result set; ignoring: {} != [{}, {})", nodeDbId, fromDbId, toDbId);
|
||||
continue;
|
||||
}
|
||||
|
||||
int dbIdIndex = (int) (nodeDbId - fromDbId);
|
||||
nodeRefs[dbIdIndex] = nodeRef;
|
||||
}
|
||||
|
||||
CompositeFuture<Void> future = new CompositeFuture<>();
|
||||
|
||||
for (long _nodeDbId = fromDbId; _nodeDbId < toDbId; _nodeDbId++) {
|
||||
final long nodeDbId = _nodeDbId;
|
||||
this.logger.trace("Attempting to reconcile ACS node: {}", nodeDbId);
|
||||
|
||||
final int dbIdIndex = (int) (nodeDbId - fromDbId);
|
||||
if (nodeRefs[dbIdIndex] != null) {
|
||||
this.logger.trace("A node in the DB is already indexed in Solr: {}: {}", nodeDbId, nodeRefs[dbIdIndex]);
|
||||
this.reconcileLogger.info("RECONCILED: {} <=> {}", nodeDbId, nodeRefs[dbIdIndex]);
|
||||
callback.reconciled(nodeDbId);
|
||||
continue;
|
||||
}
|
||||
|
||||
Callable<Void> callable = new Callable<Void>() {
|
||||
@Override
|
||||
public Void call() throws InterruptedException, TimeoutException {
|
||||
reconcile(nodeDbId, reindexUnreconciled, callback, execTimeout, execUnit);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
if (queueTimeout < 0L) {
|
||||
future.combine(this.executor.submit(callable, -1L, null));
|
||||
} else {
|
||||
future.combine(this.executor.submit(callable, queueTimeout, queueUnit));
|
||||
}
|
||||
}
|
||||
|
||||
return future;
|
||||
}
|
||||
|
||||
public void reconcile(long nodeDbId,
|
||||
boolean reindexUnreconciled,
|
||||
ReconcileCallback callback,
|
||||
long execTimeout, TimeUnit execUnit) throws InterruptedException, TimeoutException {
|
||||
NodeRef nodeRef = this.nodeService.getNodeRef(nodeDbId);
|
||||
if (nodeRef == null) {
|
||||
this.logger.trace("No such ACS node: {}; skipping ...", nodeDbId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!StoreRef.STORE_REF_WORKSPACE_SPACESSTORE.equals(nodeRef.getStoreRef())) {
|
||||
this.logger.trace("A deliberately ignored store in the DB is not indexed in Solr: {}: {}", nodeDbId, nodeRef);
|
||||
return;
|
||||
}
|
||||
|
||||
Set<QName> aspects = this.nodeService.getAspects(nodeRef);
|
||||
aspects.retainAll(this.ignoreNodesWithAspects);
|
||||
if (!aspects.isEmpty()) {
|
||||
this.logger.trace("A deliberately ignored node in the DB is not indexed in Solr: {}: {}: {}", nodeDbId, nodeRef, aspects);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!reindexUnreconciled) {
|
||||
this.logger.debug("A node in the DB is not indexed in Solr: {}: {}", nodeDbId, nodeRef);
|
||||
this.reconcileLogger.info("UNRECONCILED: {} <=> {}", nodeDbId, nodeRef);
|
||||
callback.unreconciled(nodeDbId);
|
||||
} else {
|
||||
logger.debug("A node in the DB is not indexed in Solr; attempt to reindex: {}: {}", nodeDbId, nodeRef);
|
||||
this.reindex(nodeDbId, nodeRef, callback, execTimeout, execUnit);
|
||||
}
|
||||
}
|
||||
|
||||
public void reindex(long nodeDbId, NodeRef nodeRef,
|
||||
ReconcileCallback callback,
|
||||
long execTimeout, TimeUnit execUnit) throws InterruptedException, TimeoutException {
|
||||
Set<ShardInstance> syncHosts = new HashSet<>();
|
||||
Set<ShardInstance> asyncHosts = new HashSet<>();
|
||||
Map<ShardInstance, String> errorHosts = new HashMap<>();
|
||||
|
||||
ReindexCallback reindexCallback = new ReindexCallback() {
|
||||
|
||||
@Override
|
||||
public void success(ShardInstance instance) {
|
||||
reconcileLogger.info("REINDEXED: {} <=> {}", nodeDbId, nodeRef);
|
||||
syncHosts.add(instance);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void scheduled(ShardInstance instance) {
|
||||
reconcileLogger.info("REINDEXING: {} <=> {}", nodeDbId, nodeRef);
|
||||
asyncHosts.add(instance);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(ShardInstance instance, String message) {
|
||||
reconcileLogger.info("UNINDEXED: {} <=> {}", nodeDbId, nodeRef);
|
||||
errorHosts.put(instance, message);
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
if (execTimeout < 0L) {
|
||||
this.reindexService.reindex(nodeDbId, reindexCallback).get();
|
||||
} else {
|
||||
this.reindexService.reindex(nodeDbId, reindexCallback).get(execTimeout, execUnit);
|
||||
}
|
||||
} catch (ExecutionException ee) {
|
||||
throw new RuntimeException("An unexpected exception occurred: " + ee.getMessage(), ee);
|
||||
}
|
||||
|
||||
if (callback != null)
|
||||
callback.processed(nodeDbId, syncHosts, asyncHosts, errorHosts);
|
||||
}
|
||||
|
||||
private String formatForFts(QName qname) {
|
||||
return qname.toPrefixString(this.namespaceService).replace("-", "\\-").replace(":", "\\:");
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,118 @@
|
||||
package com.inteligr8.alfresco.asie.service;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
|
||||
import org.alfresco.repo.index.shard.ShardInstance;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.model.SolrHost;
|
||||
import com.inteligr8.rs.AuthorizationFilter;
|
||||
import com.inteligr8.rs.Client;
|
||||
import com.inteligr8.rs.ClientCxfConfiguration;
|
||||
import com.inteligr8.rs.ClientCxfImpl;
|
||||
|
||||
import jakarta.ws.rs.client.ClientRequestContext;
|
||||
|
||||
@Component
|
||||
public class ApiService implements InitializingBean {
|
||||
|
||||
public static final String SOLR_CORE = "alfresco";
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
|
||||
@Value("${solr.secureComms}")
|
||||
private String solrSecureComms;
|
||||
|
||||
@Value("${solr.port}")
|
||||
private int solrPort;
|
||||
|
||||
@Value("${solr.port.ssl}")
|
||||
private int solrSslPort;
|
||||
|
||||
@Value("${solr.sharedSecret.header}")
|
||||
private String solrSharedSecretHeader;
|
||||
|
||||
@Value("${solr.sharedSecret}")
|
||||
private String solrSharedSecret;
|
||||
|
||||
@Value("${inteligr8.asie.basePath}")
|
||||
private String solrBaseUrl;
|
||||
|
||||
@Value("${inteligr8.asie.reconciliation.nodesChunkSize:250}")
|
||||
private int nodesChunkSize;
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception {
|
||||
this.solrSharedSecret = StringUtils.trimToNull(this.solrSharedSecret);
|
||||
}
|
||||
|
||||
public <T> T createApi(String hostname, int port, Class<T> apiClass) {
|
||||
String solrBaseUrl = this.formulateSolrBaseUrl(hostname, port);
|
||||
this.logger.trace("Using Solr base URL: {}", solrBaseUrl);
|
||||
return this.createApi(solrBaseUrl, apiClass);
|
||||
}
|
||||
|
||||
public <T> T createApi(ShardInstance instance, Class<T> apiClass) {
|
||||
URL url = SolrHost.from(instance).toUrl("http");
|
||||
return this.createApi(url.toString(), apiClass);
|
||||
}
|
||||
|
||||
public <T> T createApi(String solrBaseUrl, Class<T> apiClass) {
|
||||
Client solrClient = this.createClient(solrBaseUrl);
|
||||
return this.getApi(solrClient, apiClass);
|
||||
}
|
||||
|
||||
public <T> T getApi(Client solrClient, Class<T> apiClass) {
|
||||
return solrClient.getApi(apiClass);
|
||||
}
|
||||
|
||||
public boolean isSecure() {
|
||||
return "https".equals(this.solrSecureComms);
|
||||
}
|
||||
|
||||
public int getDefaultSolrPort() {
|
||||
return this.isSecure() ? this.solrSslPort : this.solrPort;
|
||||
}
|
||||
|
||||
public String formulateSolrBaseUrl(String hostname, Integer port) {
|
||||
boolean isSsl = "https".equals(this.solrSecureComms);
|
||||
StringBuilder baseUrl = new StringBuilder(isSsl ? "https" : "http").append("://").append(hostname);
|
||||
baseUrl.append(':').append(port == null ? (isSsl ? this.solrSslPort : this.solrPort) : port);
|
||||
baseUrl.append(this.solrBaseUrl);
|
||||
return baseUrl.toString();
|
||||
}
|
||||
|
||||
public Client createClient(final String baseUrl) {
|
||||
ClientCxfImpl client = new ClientCxfImpl(new ClientCxfConfiguration() {
|
||||
@Override
|
||||
public String getBaseUrl() {
|
||||
return baseUrl.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public AuthorizationFilter createAuthorizationFilter() {
|
||||
return solrSharedSecret == null ? null : new AuthorizationFilter() {
|
||||
@Override
|
||||
public void filter(ClientRequestContext requestContext) throws IOException {
|
||||
logger.trace("Adding authorization headers for ASIE shared auth: {}", solrSharedSecretHeader);
|
||||
requestContext.getHeaders().putSingle(solrSharedSecretHeader, solrSharedSecret);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDefaultBusEnabled() {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
client.register();
|
||||
return client;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,138 @@
|
||||
package com.inteligr8.alfresco.asie.service;
|
||||
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.RejectedExecutionHandler;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import com.google.common.cache.RemovalNotification;
|
||||
import com.inteligr8.alfresco.asie.util.ThrottledThreadPoolExecutor;
|
||||
|
||||
/**
|
||||
* This class manages the instantiation and shutdown of `ExecutorService`
|
||||
* instances that are "rarely" used.
|
||||
*
|
||||
* The default implementation of each `ExecutorService` do not support 0 core
|
||||
* threads unless you want undesirable effects. So they hold threads and could
|
||||
* hold them for weeks without being used.
|
||||
*
|
||||
* We will shutdown and dereference the whole `ExecutorService` when there are
|
||||
* no other hard references and after the `keepAliveTime` expires.
|
||||
*/
|
||||
@Component
|
||||
public class ExecutorManager implements InitializingBean, DisposableBean, RemovalListener<String, ExecutorService> {
|
||||
|
||||
@Value("${inteligr8.asie.executors.expireTimeInMinutes:30}")
|
||||
private int expireTimeInMinutes;
|
||||
|
||||
private Cache<String, ExecutorService> refCache;
|
||||
private Cache<String, ExecutorService> expiringCache;
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception {
|
||||
// a weak value happens when the executor is no longer referenced
|
||||
// the possible references are by the caller temporarily using and the `expiringCache` (below; so it expired)
|
||||
// this keeps the pool from being shutdown after it expires if the caller is still referencing it
|
||||
// ultimately, if it is cached, it will be in this cache and MAY be in the `expiringCache`.
|
||||
this.refCache = CacheBuilder.newBuilder()
|
||||
.initialCapacity(8)
|
||||
.weakValues()
|
||||
.removalListener(this)
|
||||
.build();
|
||||
|
||||
this.expiringCache = CacheBuilder.newBuilder()
|
||||
.initialCapacity(8)
|
||||
.expireAfterAccess(this.expireTimeInMinutes, TimeUnit.MINUTES)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void destroy() throws Exception {
|
||||
this.refCache.invalidateAll();
|
||||
this.refCache.cleanUp();
|
||||
this.expiringCache.invalidateAll();
|
||||
this.expiringCache.cleanUp();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRemoval(RemovalNotification<String, ExecutorService> notification) {
|
||||
notification.getValue().shutdown();
|
||||
}
|
||||
|
||||
public ThrottledThreadPoolExecutor createThrottled(
|
||||
String name,
|
||||
int coreThreadPoolSize,
|
||||
int maximumThreadPoolSize,
|
||||
int maximumQueueSize,
|
||||
long keepAliveTime,
|
||||
TimeUnit unit) {
|
||||
return this.createThrottled(name, coreThreadPoolSize, maximumThreadPoolSize, maximumQueueSize, keepAliveTime, unit, null);
|
||||
}
|
||||
|
||||
public ThrottledThreadPoolExecutor createThrottled(
|
||||
final String name,
|
||||
final int coreThreadPoolSize,
|
||||
final int maximumThreadPoolSize,
|
||||
final int maximumQueueSize,
|
||||
final long keepAliveTime,
|
||||
final TimeUnit unit,
|
||||
final RejectedExecutionHandler rejectedExecutionHandler) {
|
||||
try {
|
||||
// if it is already cached, reuse the cache; otherwise create one
|
||||
final ExecutorService executor = this.refCache.get(name, new Callable<ThrottledThreadPoolExecutor>() {
|
||||
@Override
|
||||
public ThrottledThreadPoolExecutor call() {
|
||||
ThrottledThreadPoolExecutor executor = null;
|
||||
if (rejectedExecutionHandler == null) {
|
||||
executor = new ThrottledThreadPoolExecutor(coreThreadPoolSize, maximumThreadPoolSize, maximumQueueSize,
|
||||
keepAliveTime, unit,
|
||||
name);
|
||||
} else {
|
||||
executor = new ThrottledThreadPoolExecutor(coreThreadPoolSize, maximumThreadPoolSize, maximumQueueSize,
|
||||
keepAliveTime, unit,
|
||||
name,
|
||||
rejectedExecutionHandler);
|
||||
}
|
||||
|
||||
executor.prestartAllCoreThreads();
|
||||
return executor;
|
||||
}
|
||||
});
|
||||
|
||||
return (ThrottledThreadPoolExecutor) this.expiringCache.get(name, new Callable<ExecutorService>() {
|
||||
@Override
|
||||
public ExecutorService call() throws Exception {
|
||||
return executor;
|
||||
}
|
||||
});
|
||||
} catch (ExecutionException ee) {
|
||||
throw new RuntimeException("This should never happen", ee);
|
||||
}
|
||||
}
|
||||
|
||||
public ExecutorService get(String name) {
|
||||
// grab from the expiring cache first, so we can
|
||||
ExecutorService executor = this.expiringCache.getIfPresent(name);
|
||||
if (executor != null)
|
||||
return executor;
|
||||
|
||||
executor = this.refCache.getIfPresent(name);
|
||||
if (executor == null)
|
||||
return null;
|
||||
|
||||
// the executor expired, but it was still referenced by the caller
|
||||
// re-cache it
|
||||
this.expiringCache.put(name, executor);
|
||||
return executor;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,43 @@
|
||||
package com.inteligr8.alfresco.asie.service;
|
||||
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.api.CoreAdminApi;
|
||||
import com.inteligr8.alfresco.asie.model.ActionCoreResponse;
|
||||
import com.inteligr8.alfresco.asie.model.core.FixAction;
|
||||
import com.inteligr8.alfresco.asie.model.core.FixRequest;
|
||||
import com.inteligr8.alfresco.asie.spi.ActionCallback;
|
||||
|
||||
@Component
|
||||
public class FixService extends AbstractActionService {
|
||||
|
||||
@Override
|
||||
protected String getActionName() {
|
||||
return "fix";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getThreadNamePrefix() {
|
||||
return "solr-fix";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ActionCoreResponse<FixAction> execute(CoreAdminApi api, String core) {
|
||||
FixRequest apiRequest = new FixRequest().withCore(core);
|
||||
return api.fix(apiRequest);
|
||||
}
|
||||
|
||||
public Future<Void> fix(ActionCallback callback) throws InterruptedException {
|
||||
return super.action(callback);
|
||||
}
|
||||
|
||||
public void fix(ActionCallback callback, long fullQueueTimeout, TimeUnit fullQueueUnit,
|
||||
long execTimeout, TimeUnit execUnit) throws TimeoutException, InterruptedException {
|
||||
super.action(callback, fullQueueTimeout, fullQueueUnit, execTimeout, execUnit);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,43 @@
|
||||
package com.inteligr8.alfresco.asie.service;
|
||||
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.api.CoreAdminApi;
|
||||
import com.inteligr8.alfresco.asie.model.ActionCoreResponse;
|
||||
import com.inteligr8.alfresco.asie.model.core.PurgeRequest;
|
||||
import com.inteligr8.alfresco.asie.spi.ActionCallback;
|
||||
import com.inteligr8.solr.model.Action;
|
||||
|
||||
@Component
|
||||
public class PurgeService extends AbstractNodeActionService {
|
||||
|
||||
@Override
|
||||
protected String getActionName() {
|
||||
return "purge";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getThreadNamePrefix() {
|
||||
return "solr-purge";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ActionCoreResponse<Action> execute(CoreAdminApi api, String core, long nodeDbId) {
|
||||
PurgeRequest apiRequest = new PurgeRequest().withCore(core).withNodeId(nodeDbId);
|
||||
return api.purge(apiRequest);
|
||||
}
|
||||
|
||||
public Future<Void> purge(long nodeDbId, ActionCallback callback) throws InterruptedException {
|
||||
return super.action(nodeDbId, callback);
|
||||
}
|
||||
|
||||
public void purge(long nodeDbId, ActionCallback callback, long fullQueueTimeout, TimeUnit fullQueueUnit,
|
||||
long execTimeout, TimeUnit execUnit) throws TimeoutException, InterruptedException {
|
||||
super.action(nodeDbId, callback, fullQueueTimeout, fullQueueUnit, execTimeout, execUnit);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,43 @@
|
||||
package com.inteligr8.alfresco.asie.service;
|
||||
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.api.CoreAdminApi;
|
||||
import com.inteligr8.alfresco.asie.model.ActionCoreResponse;
|
||||
import com.inteligr8.alfresco.asie.model.core.ReindexRequest;
|
||||
import com.inteligr8.alfresco.asie.spi.ActionCallback;
|
||||
import com.inteligr8.solr.model.Action;
|
||||
|
||||
@Component
|
||||
public class ReindexService extends AbstractNodeActionService {
|
||||
|
||||
@Override
|
||||
protected String getActionName() {
|
||||
return "re-index";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getThreadNamePrefix() {
|
||||
return "solr-reindex";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ActionCoreResponse<Action> execute(CoreAdminApi api, String core, long nodeDbId) {
|
||||
ReindexRequest apiRequest = new ReindexRequest().withCore(core).withNodeId(nodeDbId);
|
||||
return api.reindex(apiRequest);
|
||||
}
|
||||
|
||||
public Future<Void> reindex(long nodeDbId, ActionCallback callback) throws InterruptedException {
|
||||
return super.action(nodeDbId, callback);
|
||||
}
|
||||
|
||||
public void reindex(long nodeDbId, ActionCallback callback, long fullQueueTimeout, TimeUnit fullQueueUnit,
|
||||
long execTimeout, TimeUnit execUnit) throws TimeoutException, InterruptedException {
|
||||
super.action(nodeDbId, callback, fullQueueTimeout, fullQueueUnit, execTimeout, execUnit);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,43 @@
|
||||
package com.inteligr8.alfresco.asie.service;
|
||||
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.api.CoreAdminApi;
|
||||
import com.inteligr8.alfresco.asie.model.ActionCoreResponse;
|
||||
import com.inteligr8.alfresco.asie.model.core.RetryAction;
|
||||
import com.inteligr8.alfresco.asie.model.core.RetryRequest;
|
||||
import com.inteligr8.alfresco.asie.spi.ActionCallback;
|
||||
|
||||
@Component
|
||||
public class RetryService extends AbstractActionService {
|
||||
|
||||
@Override
|
||||
protected String getActionName() {
|
||||
return "retry";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getThreadNamePrefix() {
|
||||
return "solr-retry";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ActionCoreResponse<RetryAction> execute(CoreAdminApi api, String core) {
|
||||
RetryRequest apiRequest = new RetryRequest().withCore(core);
|
||||
return api.retry(apiRequest);
|
||||
}
|
||||
|
||||
public Future<Void> retry(ActionCallback callback) throws InterruptedException {
|
||||
return super.action(callback);
|
||||
}
|
||||
|
||||
public void retry(ActionCallback callback, long fullQueueTimeout, TimeUnit fullQueueUnit,
|
||||
long execTimeout, TimeUnit execUnit) throws TimeoutException, InterruptedException {
|
||||
super.action(callback, fullQueueTimeout, fullQueueUnit, execTimeout, execUnit);
|
||||
}
|
||||
|
||||
}
|
@@ -1,7 +1,5 @@
|
||||
package com.inteligr8.alfresco.asie.service;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.alfresco.service.cmr.attributes.AttributeService;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -11,6 +9,7 @@ import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.inteligr8.alfresco.asie.Constants;
|
||||
import com.inteligr8.alfresco.asie.model.PersistedNode;
|
||||
import com.inteligr8.alfresco.asie.model.ShardSet;
|
||||
import com.inteligr8.alfresco.asie.model.SolrHost;
|
||||
|
||||
@@ -31,13 +30,13 @@ public class ShardBackupService implements com.inteligr8.alfresco.asie.spi.Shard
|
||||
String shardKey = shardSet.getCore() + "-" + shardId;
|
||||
|
||||
PersistedNode backupNode = (PersistedNode) this.attributeService.getAttribute(Constants.ATTR_ASIE, ATTR_BACKUP_NODE, shardKey);
|
||||
this.logger.debug("Found backup node: {}", backupNode);
|
||||
logger.debug("Found backup node: {}", backupNode);
|
||||
|
||||
if (backupNode == null || backupNode.isExpired()) {
|
||||
backupNode = new PersistedNode(node);
|
||||
backupNode = new PersistedNode(node, this.persistTimeMinutes);
|
||||
this.attributeService.setAttribute(backupNode, Constants.ATTR_ASIE, ATTR_BACKUP_NODE, shardKey);
|
||||
}
|
||||
|
||||
|
||||
return backupNode.getNode();
|
||||
}
|
||||
|
||||
@@ -49,38 +48,5 @@ public class ShardBackupService implements com.inteligr8.alfresco.asie.spi.Shard
|
||||
String shardKey = shardSet.getCore() + "-" + shardId;
|
||||
this.attributeService.removeAttribute(Constants.ATTR_ASIE, ATTR_BACKUP_NODE, shardKey);
|
||||
}
|
||||
|
||||
|
||||
|
||||
private class PersistedNode implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 4105196543023419818L;
|
||||
|
||||
private final SolrHost node;
|
||||
private long expireTimeMillis;
|
||||
|
||||
PersistedNode(SolrHost node) {
|
||||
this.node = node;
|
||||
this.reset();
|
||||
}
|
||||
|
||||
void reset() {
|
||||
this.expireTimeMillis = System.currentTimeMillis() + persistTimeMinutes * 60L * 1000L;
|
||||
}
|
||||
|
||||
boolean isExpired() {
|
||||
return this.expireTimeMillis < System.currentTimeMillis();
|
||||
}
|
||||
|
||||
SolrHost getNode() {
|
||||
return this.node;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "node: " + this.node + "; expires in: " + (System.currentTimeMillis() - this.expireTimeMillis) + " ms";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -0,0 +1,15 @@
|
||||
package com.inteligr8.alfresco.asie.spi;
|
||||
|
||||
import com.inteligr8.alfresco.asie.model.ShardInstance;
|
||||
|
||||
public interface ActionCallback {
|
||||
|
||||
void success(ShardInstance instance);
|
||||
|
||||
void scheduled(ShardInstance instance);
|
||||
|
||||
void error(ShardInstance instance, String message);
|
||||
|
||||
void unknownResult(ShardInstance instance);
|
||||
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
package com.inteligr8.alfresco.asie.spi;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import com.inteligr8.alfresco.asie.model.ShardInstance;
|
||||
|
||||
public interface ReconcileCallback {
|
||||
|
||||
void reconciled(long nodeDbId);
|
||||
|
||||
void unreconciled(long nodeDbId);
|
||||
|
||||
void processed(long nodeDbId,
|
||||
Set<ShardInstance> instsReconciled,
|
||||
Set<ShardInstance> instsReconciling,
|
||||
Map<ShardInstance, String> instsErrorMessages);
|
||||
|
||||
}
|
@@ -0,0 +1,12 @@
|
||||
package com.inteligr8.alfresco.asie.spi;
|
||||
|
||||
import com.inteligr8.alfresco.asie.model.ShardInstance;
|
||||
|
||||
public interface ReindexCallback extends ActionCallback {
|
||||
|
||||
@Override
|
||||
default void unknownResult(ShardInstance instance) {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,150 @@
|
||||
package com.inteligr8.alfresco.asie.util;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.apache.commons.lang3.tuple.MutablePair;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class CompositeFuture<T> implements Future<T> {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
private final Collection<Future<T>> futures = new ConcurrentLinkedQueue<>();
|
||||
|
||||
public MutablePair<Integer, Integer> counts() {
|
||||
MutablePair<Integer, Integer> counts = MutablePair.of(0, 0);
|
||||
for (Future<T> future : this.futures) {
|
||||
if (future.isDone()) {
|
||||
counts.setRight(counts.getRight().intValue() + 1);
|
||||
} else {
|
||||
counts.setLeft(counts.getLeft().intValue() + 1);
|
||||
}
|
||||
}
|
||||
|
||||
return counts;
|
||||
}
|
||||
|
||||
public int countIncomplete() {
|
||||
return this.counts().getLeft().intValue();
|
||||
}
|
||||
|
||||
public int countCompleted() {
|
||||
return this.counts().getRight().intValue();
|
||||
}
|
||||
|
||||
public void combine(Future<T> future) {
|
||||
this.futures.add(future);
|
||||
}
|
||||
|
||||
public void combine(Collection<Future<T>> futures) {
|
||||
this.futures.addAll(futures);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean cancel(boolean mayInterruptIfRunning) {
|
||||
boolean cancelled = true;
|
||||
for (Future<T> future : this.futures)
|
||||
if (!future.cancel(mayInterruptIfRunning))
|
||||
cancelled = false;
|
||||
|
||||
return cancelled;
|
||||
}
|
||||
|
||||
public List<T> getList() throws InterruptedException, ExecutionException {
|
||||
List<T> results = new ArrayList<>(this.futures.size());
|
||||
for (Future<T> future : this.futures)
|
||||
results.add(future.get());
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T get() throws InterruptedException, ExecutionException {
|
||||
this.getList();
|
||||
return null;
|
||||
}
|
||||
|
||||
public List<T> getList(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
|
||||
long expireTimeMillis = System.currentTimeMillis() + unit.toMillis(timeout);
|
||||
|
||||
List<T> results = new ArrayList<>(this.futures.size());
|
||||
for (Future<T> future : this.futures) {
|
||||
if (future instanceof RunnableFuture<?>) {
|
||||
this.logger.debug("Waiting {} ms since the start of the exectuion of the future to complete", unit.toMillis(timeout));
|
||||
results.add(((RunnableFuture<T>) future).get(timeout, unit));
|
||||
} else {
|
||||
long remainingTimeMillis = expireTimeMillis - System.currentTimeMillis();
|
||||
this.logger.debug("Waiting {} ms for the future to complete", remainingTimeMillis);
|
||||
results.add(future.get(remainingTimeMillis, TimeUnit.MILLISECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
|
||||
this.getList(timeout, unit);
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCancelled() {
|
||||
for (Future<T> future : this.futures)
|
||||
if (!future.isCancelled())
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDone() {
|
||||
for (Future<T> future : this.futures)
|
||||
if (!future.isDone())
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove any futures that are done (or cancelled).
|
||||
*
|
||||
* @param includeCancelled `true` to purge cancelled futures; `false` to purge only completed futures
|
||||
*/
|
||||
public void purge(boolean includeCancelled) {
|
||||
List<CompositeFuture<?>> cfutures = new LinkedList<>();
|
||||
int removedCancelled = 0;
|
||||
int removedDone = 0;
|
||||
|
||||
Iterator<Future<T>> i = this.futures.iterator();
|
||||
while (i.hasNext()) {
|
||||
Future<T> future = i.next();
|
||||
if (future.isCancelled()) {
|
||||
if (includeCancelled) {
|
||||
removedCancelled++;
|
||||
i.remove();
|
||||
}
|
||||
} else if (future.isDone()) {
|
||||
removedDone++;
|
||||
i.remove();
|
||||
} else if (future instanceof CompositeFuture<?>) {
|
||||
cfutures.add((CompositeFuture<?>) future);
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.debug("Purged {} cancelled and {} completed futures", removedCancelled, removedDone);
|
||||
|
||||
for (CompositeFuture<?> cfuture : cfutures)
|
||||
cfuture.purge(includeCancelled);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,43 @@
|
||||
package com.inteligr8.alfresco.asie.util;
|
||||
|
||||
public abstract class ElapsedInterruptableRunnable extends InterruptableRunnable {
|
||||
|
||||
private final Object runLock = new Object();
|
||||
private boolean runOnce = true;
|
||||
private boolean run = false;
|
||||
private Long executionStartTimeMillis;
|
||||
private Long executionEndTimeMillis;
|
||||
|
||||
public void setRunOnce(boolean runOnce) {
|
||||
this.runOnce = runOnce;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void runInterruptable() throws InterruptedException {
|
||||
synchronized (this.runLock) {
|
||||
if (this.runOnce && this.run)
|
||||
return;
|
||||
this.run = true;
|
||||
}
|
||||
|
||||
this.executionStartTimeMillis = System.currentTimeMillis();
|
||||
try {
|
||||
this.runElapsed();
|
||||
} finally {
|
||||
this.executionEndTimeMillis = System.currentTimeMillis();
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void runElapsed() throws InterruptedException;
|
||||
|
||||
public Long computeElapsedExecutionMillis() {
|
||||
if (this.executionEndTimeMillis != null) {
|
||||
return this.executionEndTimeMillis - this.executionStartTimeMillis;
|
||||
} else if (this.executionStartTimeMillis != null) {
|
||||
return System.currentTimeMillis() - this.executionStartTimeMillis;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,75 @@
|
||||
package com.inteligr8.alfresco.asie.util;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public abstract class InterruptableRunnable implements Runnable {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
private final Object threadLock = new Object();
|
||||
private boolean run = false;
|
||||
private Thread runThread = null;
|
||||
private boolean completed = false;
|
||||
private boolean interrupted = false;
|
||||
|
||||
@Override
|
||||
public final void run() {
|
||||
synchronized (this.threadLock) {
|
||||
this.run = true;
|
||||
this.runThread = Thread.currentThread();
|
||||
}
|
||||
|
||||
try {
|
||||
this.runInterruptable();
|
||||
this.completed = true;
|
||||
} catch (InterruptedException ie) {
|
||||
this.logger.debug("Runnable interrupted");
|
||||
this.interrupted = true;
|
||||
this.interrupted(ie);
|
||||
} finally {
|
||||
this.runThread = null;
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void runInterruptable() throws InterruptedException;
|
||||
|
||||
public boolean interrupt() {
|
||||
synchronized (this.threadLock) {
|
||||
if (this.runThread == null)
|
||||
return false;
|
||||
|
||||
this.logger.trace("Runnable interrupting ...");
|
||||
this.runThread.interrupt();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
protected void interrupted(InterruptedException ie) {
|
||||
}
|
||||
|
||||
public boolean isQueued() {
|
||||
return !this.run;
|
||||
}
|
||||
|
||||
public boolean isInterrupted() {
|
||||
return this.interrupted;
|
||||
}
|
||||
|
||||
public boolean isCompleted() {
|
||||
return this.completed;
|
||||
}
|
||||
|
||||
public boolean isFailed() {
|
||||
synchronized (this.threadLock) {
|
||||
return this.run && !this.completed && !this.interrupted && this.runThread == null;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isDone() {
|
||||
synchronized (this.threadLock) {
|
||||
return this.run && this.runThread == null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,72 @@
|
||||
package com.inteligr8.alfresco.asie.util;
|
||||
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class RunnableFuture<T> implements Future<T> {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
private final ThreadPoolExecutor executor;
|
||||
private final WaitableRunnable runnable;
|
||||
|
||||
public RunnableFuture(ThreadPoolExecutor executor, WaitableRunnable runnable) {
|
||||
this.executor = executor;
|
||||
this.runnable = runnable;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean cancel(boolean mayInterruptIfRunning) {
|
||||
if (this.executor.remove(this.runnable)) {
|
||||
this.logger.debug("Cancelled runnable by removing from queue");
|
||||
return true;
|
||||
} else if (mayInterruptIfRunning && this.runnable.interrupt()) {
|
||||
this.logger.debug("Cancelled runnable by interrupting it");
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will not never timeout and will only return or throw when
|
||||
* the runnable is complete or interrupted.
|
||||
*
|
||||
* @return Always `null`.
|
||||
*/
|
||||
@Override
|
||||
public T get() throws InterruptedException {
|
||||
this.runnable.waitUntilDone();
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will timeout in the specified amount of time after the
|
||||
* execution commences. This will wait indefinitely as the execution is
|
||||
* queued. Once it is in the executing state, the clock starts. This
|
||||
* alters the default behavior of the `Future` interface.
|
||||
*
|
||||
* @param timeout A positive integer representing a period of time.
|
||||
* @param unit The time unit of the `timeout` parameter.
|
||||
* @return Always `null`.
|
||||
*/
|
||||
@Override
|
||||
public T get(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException {
|
||||
this.runnable.waitUntilExecutionElapsed(timeout, unit);
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCancelled() {
|
||||
return this.runnable.isInterrupted();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDone() {
|
||||
return this.runnable.isDone();
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,96 @@
|
||||
package com.inteligr8.alfresco.asie.util;
|
||||
|
||||
import java.util.concurrent.ArrayBlockingQueue;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.RejectedExecutionHandler;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
|
||||
public class ThrottledThreadPoolExecutor extends ThreadPoolExecutor {
|
||||
|
||||
public ThrottledThreadPoolExecutor(
|
||||
int coreThreadPoolSize,
|
||||
int maximumThreadPoolSize,
|
||||
int maximumQueueSize,
|
||||
long keepAliveTime,
|
||||
TimeUnit unit,
|
||||
String threadNamePrefix) {
|
||||
super(coreThreadPoolSize, maximumThreadPoolSize, keepAliveTime, unit,
|
||||
new ArrayBlockingQueue<>(maximumQueueSize),
|
||||
new ThreadFactoryBuilder()
|
||||
.setNameFormat(threadNamePrefix + "-%d")
|
||||
.build());
|
||||
}
|
||||
|
||||
public ThrottledThreadPoolExecutor(
|
||||
int coreThreadPoolSize,
|
||||
int maximumThreadPoolSize,
|
||||
int maximumQueueSize,
|
||||
long keepAliveTime,
|
||||
TimeUnit unit,
|
||||
String threadNamePrefix,
|
||||
RejectedExecutionHandler rejectedExecutionHandler) {
|
||||
super(coreThreadPoolSize, maximumThreadPoolSize, keepAliveTime, unit,
|
||||
new ArrayBlockingQueue<>(maximumQueueSize),
|
||||
new ThreadFactoryBuilder()
|
||||
.setNameFormat(threadNamePrefix + "-%d")
|
||||
.build(),
|
||||
rejectedExecutionHandler);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param timeout Negative to not wait
|
||||
*/
|
||||
public <T> RunnableFuture<T> submit(Callable<T> task, long timeout, TimeUnit unit) throws InterruptedException, TimeoutException {
|
||||
WaitableRunnable wrunnable = new WaitableRunnable() {
|
||||
|
||||
@Override
|
||||
protected void runWaitable() throws InterruptedException {
|
||||
try {
|
||||
task.call();
|
||||
} catch (InterruptedException ie) {
|
||||
throw ie;
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
return new RunnableFuture<T>(this, this.submit(wrunnable, timeout, unit));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param timeout Negative to not wait
|
||||
*/
|
||||
public RunnableFuture<?> submit(Runnable runnable, long timeout, TimeUnit unit) throws InterruptedException, TimeoutException {
|
||||
WaitableRunnable wrunnable = new WaitableRunnable() {
|
||||
|
||||
@Override
|
||||
protected void runWaitable() {
|
||||
runnable.run();
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
return new RunnableFuture<Void>(this, this.submit(wrunnable, timeout, unit));
|
||||
}
|
||||
|
||||
private WaitableRunnable submit(WaitableRunnable runnable, long throttlingBlockTimeout, TimeUnit throttlingBlockUnit) throws InterruptedException, TimeoutException {
|
||||
// if no core threads are running, the queue won't be monitored for runnables
|
||||
this.prestartAllCoreThreads();
|
||||
|
||||
if (throttlingBlockTimeout < 0L) {
|
||||
this.getQueue().put(runnable);
|
||||
} else {
|
||||
if (!this.getQueue().offer(runnable, throttlingBlockTimeout, throttlingBlockUnit))
|
||||
throw new TimeoutException();
|
||||
}
|
||||
|
||||
return runnable;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,60 @@
|
||||
package com.inteligr8.alfresco.asie.util;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
public abstract class WaitableRunnable extends ElapsedInterruptableRunnable {
|
||||
|
||||
private final Object lock = new Object();
|
||||
private boolean done = false;
|
||||
|
||||
@Override
|
||||
public final void runElapsed() throws InterruptedException {
|
||||
this.done = false;
|
||||
try {
|
||||
this.runWaitable();
|
||||
} finally {
|
||||
synchronized (this.lock) {
|
||||
this.done = true;
|
||||
this.lock.notifyAll();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void runWaitable() throws InterruptedException;
|
||||
|
||||
public void waitUntilDone() throws InterruptedException {
|
||||
synchronized (this.lock) {
|
||||
if (!this.done && !this.isDone())
|
||||
this.lock.wait();
|
||||
}
|
||||
}
|
||||
|
||||
public void waitUntilDone(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException {
|
||||
synchronized (this.lock) {
|
||||
if (!this.done && !this.isDone()) {
|
||||
long waitTime = unit.toMillis(timeout);
|
||||
long startTime = System.currentTimeMillis();
|
||||
this.lock.wait(waitTime);
|
||||
if (System.currentTimeMillis() - startTime >= waitTime)
|
||||
throw new TimeoutException();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void waitUntilExecutionElapsed(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException {
|
||||
synchronized (this.lock) {
|
||||
if (!this.done && !this.isDone()) {
|
||||
while (this.isQueued())
|
||||
Thread.sleep(50L);
|
||||
Long elapsedExecutionMillis = this.computeElapsedExecutionMillis();
|
||||
long waitTime = unit.toMillis(timeout) - elapsedExecutionMillis;
|
||||
long startTime = System.currentTimeMillis();
|
||||
this.lock.wait(waitTime);
|
||||
if (System.currentTimeMillis() - startTime >= waitTime)
|
||||
throw new TimeoutException();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -8,10 +8,8 @@
|
||||
<p>Retrieve a reference to the ASIE node that should be used for the backup of the specified ASIE shard registered with ACS.</p>
|
||||
<p>The following path parameters are expected:</p>
|
||||
<dl>
|
||||
<dt>shardSet</dt>
|
||||
<dd>A shard method combined with its distinguishing properties;
|
||||
methods: MOD_ACL_ID, ACL_ID, DB_ID, DB_ID_RANGE, DATE, PROPERTY, EXPLICIT_ID;
|
||||
e.g. PROPERTY;key:cm:created;regex:^d{4} or DB_ID</dd>
|
||||
<dt>shardCore</dt>
|
||||
<dd>A core name (prefix) for the ASIE shard (e.g. alfresco)</dd>
|
||||
<dt>shardId</dt>
|
||||
<dd>A number starting at 1</dd>
|
||||
</dl>
|
||||
@@ -34,12 +32,15 @@
|
||||
]]></description>
|
||||
|
||||
<!-- Endpoint Configuration -->
|
||||
<url>/inteligr8/asie/shard/{shardSet}/{shardId}/backup</url>
|
||||
<url>/inteligr8/asie/shard/{shardCore}/{shardId}/backup</url>
|
||||
<format default="json">any</format>
|
||||
|
||||
<!-- Security -->
|
||||
<authentication>none</authentication>
|
||||
|
||||
<!-- Transaction -->
|
||||
<transaction>required</transaction>
|
||||
|
||||
<!-- Functionality -->
|
||||
<cache>
|
||||
<never>false</never>
|
||||
|
@@ -0,0 +1,46 @@
|
||||
<webscript xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:noNamespaceSchemaLocation="https://bitbucket.org/!api/2.0/snippets/inteligr8/AzMgbp/80fdd26a6b3769a63cdc6b54bf1f39e378545cf7/files/snippet.txt">
|
||||
|
||||
<!-- Naming & Organization -->
|
||||
<shortname>Fix ASIE Indexes</shortname>
|
||||
<family>Inteligr8 ASIE</family>
|
||||
<description><![CDATA[
|
||||
<p>Issue a 'fix' command to the ASIE indexes.
|
||||
This call will attempt to fix all Solr nodes.
|
||||
The fix operation is asynchronous and could fail on any Solr node without notification.</p>
|
||||
<p>The following response body should be expected in most cases (202 and 500 status codes):</p>
|
||||
<pre>
|
||||
{
|
||||
"scheduled": [
|
||||
"solrHostAsync:8983/solr",
|
||||
...
|
||||
],
|
||||
"error": [
|
||||
"solrHostThatFailed:8983/solr": {
|
||||
"message": "string"
|
||||
},
|
||||
...
|
||||
]
|
||||
}
|
||||
</pre>
|
||||
<p>The following status codes should be expected:</p>
|
||||
<dl>
|
||||
<dt>202</dt>
|
||||
<dd>Accepted</dd>
|
||||
</dl>
|
||||
]]></description>
|
||||
|
||||
<!-- Endpoint Configuration -->
|
||||
<url>/inteligr8/asie/acs/fix</url>
|
||||
<format default="json">any</format>
|
||||
|
||||
<!-- Security -->
|
||||
<authentication>user</authentication>
|
||||
|
||||
<!-- Functionality -->
|
||||
<cache>
|
||||
<never>false</never>
|
||||
<public>false</public>
|
||||
</cache>
|
||||
|
||||
</webscript>
|
@@ -8,10 +8,8 @@
|
||||
<p>Retrieve a reference to the most current/up-to-date ASIE node for the specified ASIE shard registered with ACS.</p>
|
||||
<p>The following path parameters are expected:</p>
|
||||
<dl>
|
||||
<dt>shardSet</dt>
|
||||
<dd>A shard method combined with its distinguishing properties;
|
||||
methods: MOD_ACL_ID, ACL_ID, DB_ID, DB_ID_RANGE, DATE, PROPERTY, EXPLICIT_ID;
|
||||
e.g. PROPERTY;key:cm:created;regex:^d{4} or DB_ID</dd>
|
||||
<dt>shardCore</dt>
|
||||
<dd>A core name (prefix) for the ASIE shard (e.g. alfresco)</dd>
|
||||
<dt>shardId</dt>
|
||||
<dd>A number starting at 1</dd>
|
||||
</dl>
|
||||
@@ -30,12 +28,15 @@
|
||||
]]></description>
|
||||
|
||||
<!-- Endpoint Configuration -->
|
||||
<url>/inteligr8/asie/shard/{shardSet}/{shardId}/lead</url>
|
||||
<url>/inteligr8/asie/shard/{shardCore}/{shardId}/lead</url>
|
||||
<format default="json">any</format>
|
||||
|
||||
<!-- Security -->
|
||||
<authentication>none</authentication>
|
||||
|
||||
<!-- Transaction -->
|
||||
<transaction>required</transaction>
|
||||
|
||||
<!-- Functionality -->
|
||||
<cache>
|
||||
<never>false</never>
|
||||
|
@@ -29,7 +29,10 @@
|
||||
<url>/inteligr8/asie/node/{nodeEndpoint}</url>
|
||||
|
||||
<!-- Security -->
|
||||
<authentication>admin</authentication>
|
||||
<authentication>user</authentication>
|
||||
|
||||
<!-- Transaction -->
|
||||
<transaction>required</transaction>
|
||||
|
||||
<!-- Functionality -->
|
||||
<cache>
|
||||
|
@@ -58,7 +58,7 @@
|
||||
<format default="json">any</format>
|
||||
|
||||
<!-- Security -->
|
||||
<authentication>admin</authentication>
|
||||
<authentication>user</authentication>
|
||||
|
||||
<!-- Functionality -->
|
||||
<cache>
|
||||
|
@@ -31,7 +31,7 @@
|
||||
<url>/inteligr8/asie/node/{nodeEndpoint}?coreName={coreName?}&shardRange={shardRange?}&template={template?}&shardCount={shardCount?}&nodeId={nodeId?}&nodeCount={nodeCount?}</url>
|
||||
|
||||
<!-- Security -->
|
||||
<authentication>admin</authentication>
|
||||
<authentication>user</authentication>
|
||||
|
||||
<!-- Functionality -->
|
||||
<cache>
|
||||
|
@@ -32,7 +32,10 @@
|
||||
<url>/inteligr8/asie/node/{nodeEndpoint}/shard/{shardCore}/{shardId}</url>
|
||||
|
||||
<!-- Security -->
|
||||
<authentication>admin</authentication>
|
||||
<authentication>user</authentication>
|
||||
|
||||
<!-- Transaction -->
|
||||
<transaction>required</transaction>
|
||||
|
||||
<!-- Functionality -->
|
||||
<cache>
|
||||
|
@@ -30,7 +30,7 @@
|
||||
<url>/inteligr8/asie/node/{nodeEndpoint}/shard/{shardCore}/{shardId}</url>
|
||||
|
||||
<!-- Security -->
|
||||
<authentication>admin</authentication>
|
||||
<authentication>user</authentication>
|
||||
|
||||
<!-- Functionality -->
|
||||
<cache>
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user