Compare commits

...

52 Commits

Author SHA1 Message Date
Jared Ottley
d9c56bbc79 [MNT-25404] [LFG] Query Performance - High performance cost in retrieving nodes/node properties for large result sets
- Bulkified queries on the preload for a query.
- Added additional configuraiton properties around the code changes
- New Properties are (with the defaults):
nodes.bulkLoad.batchSize=256
nodes.bulkLoad.forceBatching=false
nodes.bulkLoad.preloadContentData=true
2025-11-04 21:49:33 -07:00
alfresco-build
a16473100d [maven-release-plugin][skip ci] prepare for next development iteration 2025-11-03 12:46:19 +00:00
alfresco-build
6b2fafac45 [maven-release-plugin][skip ci] prepare release 25.3.0.67 2025-11-03 12:46:17 +00:00
Damian Ujma
3508e17907 ACS-10456 Bump Netty and Camel (#3632) 2025-11-03 12:03:17 +01:00
alfresco-build
b9d0773989 [maven-release-plugin][skip ci] prepare for next development iteration 2025-11-02 00:10:06 +00:00
alfresco-build
df4a70b61e [maven-release-plugin][skip ci] prepare release 25.3.0.66 2025-11-02 00:10:04 +00:00
Alfresco CI User
331464f106 [force] Force release for 2025-11-02. 2025-11-02 00:05:03 +00:00
alfresco-build
d21fdb09b5 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-30 14:32:25 +00:00
alfresco-build
38a4da7413 [maven-release-plugin][skip ci] prepare release 25.3.0.65 2025-10-30 14:32:23 +00:00
Belal Ansari
920285b209 ACS-10404 bump ATS (#3627) 2025-10-30 18:38:51 +05:30
alfresco-build
4ab8e36170 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-30 09:16:50 +00:00
alfresco-build
9860cf63ae [maven-release-plugin][skip ci] prepare release 25.3.0.64 2025-10-30 09:16:47 +00:00
Somnath-Deshmukh
a49e0b2ae3 MNT-25422 Bulleted list, numbered list, and underline are not working properly when adding comment in the file. (#3624) 2025-10-30 13:26:32 +05:30
alfresco-build
e28b7b35da [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-28 11:24:54 +00:00
alfresco-build
6baafc0e88 [maven-release-plugin][skip ci] prepare release 25.3.0.63 2025-10-28 11:24:51 +00:00
Somnath-Deshmukh
b1cf78ce7c Fix/mnt 25359 Prevent XSS attack during posting a comment keeping the editor styles intact (#3623)
Fix for MNT-25359 Prevent XSS attack during posting a comment keeping the editor styles intact.
2025-10-28 15:35:34 +05:30
alfresco-build
a2f9857442 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-26 00:10:29 +00:00
alfresco-build
8c69432052 [maven-release-plugin][skip ci] prepare release 25.3.0.62 2025-10-26 00:10:28 +00:00
Alfresco CI User
124f87ee21 [force] Force release for 2025-10-26. 2025-10-26 00:05:04 +00:00
Swarnajit3004
3cd3b2c2d6 [ACS-10547] Added code to handle PATCH request (#3622) 2025-10-24 10:18:34 +05:30
alfresco-build
14da8d2002 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-22 09:51:01 +00:00
alfresco-build
6a4bbb021c [maven-release-plugin][skip ci] prepare release 25.3.0.61 2025-10-22 09:50:59 +00:00
Debjit Chattopadhyay
42d70b17c7 Revert "MNT-24776 adding if-else conditionals to avoid null values"
Revert "MNT-24776 adding if-else conditionals to avoid null values"
2025-10-22 14:33:09 +05:30
Debjit Chattopadhyay
c7eba0ddc8 Revert "MNT-24776 adding if-else conditionals to avoid null values"
This reverts commit be02be5a8b.
2025-10-22 13:08:21 +05:30
alfresco-build
266094c0e1 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-19 00:08:29 +00:00
alfresco-build
e442b4acf0 [maven-release-plugin][skip ci] prepare release 25.3.0.60 2025-10-19 00:08:28 +00:00
Alfresco CI User
fd1028a685 [force] Force release for 2025-10-19. 2025-10-19 00:05:12 +00:00
alfresco-build
0a7e275a9c [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-17 06:19:01 +00:00
alfresco-build
d1bbba7286 [maven-release-plugin][skip ci] prepare release 25.3.0.59 2025-10-17 06:18:59 +00:00
Somnath-Deshmukh
e1baddebee Fix/mnt 25359 (#3613)
Prevent XSS attack during posting a comment
2025-10-17 10:58:02 +05:30
alfresco-build
3263dcaf2f [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-12 00:09:47 +00:00
alfresco-build
8926f7f9a7 [maven-release-plugin][skip ci] prepare release 25.3.0.58 2025-10-12 00:09:45 +00:00
Alfresco CI User
764a1b656c [force] Force release for 2025-10-12. 2025-10-12 00:04:45 +00:00
alfresco-build
cf265f2dea [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-10 08:32:39 +00:00
alfresco-build
fd0d5204eb [maven-release-plugin][skip ci] prepare release 25.3.0.57 2025-10-10 08:32:36 +00:00
Piotr Żurek
f9b8a4b42d Fix missing import 2025-10-10 09:44:45 +02:00
Piotr Żurek
fcdc1438e7 Fix formatting 2025-10-10 09:26:24 +02:00
Axel Faust
7cd1416561 Governance Serivces: LinkedHashSet for stable reader/writer authorities set order + hash code for in-place group names (#2664) 2025-10-10 08:39:23 +02:00
alfresco-build
f197757f94 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-09 09:23:06 +00:00
alfresco-build
af995f1087 [maven-release-plugin][skip ci] prepare release 25.3.0.56 2025-10-09 09:23:04 +00:00
Sayan Bhattacharya
2cfcd3dfa7 ACS-9990 changed extract metadata overwrite policy to EAGER (#3562) 2025-10-09 13:18:25 +05:30
alfresco-build
89e09b0162 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-08 17:09:34 +00:00
alfresco-build
495808b172 [maven-release-plugin][skip ci] prepare release 25.3.0.55 2025-10-08 17:09:31 +00:00
cezary-witkowski
57060af84b [ACS-10454] Bump AOS to fix "Edit in Microsoft Office" error (#3607) 2025-10-08 17:58:19 +02:00
alfresco-build
60261aafd1 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-07 07:55:25 +00:00
alfresco-build
8dad225394 [maven-release-plugin][skip ci] prepare release 25.3.0.54 2025-10-07 07:55:23 +00:00
Debjit Chattopadhyay
5cc21c55e7 MNT-24776 adding if-else conditionals to avoid null values
MNT-24776 adding if-else conditionals to avoid null values
2025-10-07 12:38:14 +05:30
alfresco-build
c71aaf7537 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-06 16:27:33 +00:00
alfresco-build
b7d16ac915 [maven-release-plugin][skip ci] prepare release 25.3.0.53 2025-10-06 16:27:31 +00:00
cezary-witkowski
1a436b06e4 [ACS-10454] Bump AOS to fix "Edit in Microsoft Office" error (#3603) 2025-10-06 17:17:28 +02:00
Debjit Chattopadhyay
be02be5a8b MNT-24776 adding if-else conditionals to avoid null values 2025-10-06 19:01:58 +05:30
alfresco-build
a674e574c5 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-05 00:10:04 +00:00
61 changed files with 1454 additions and 89 deletions

View File

@@ -104,7 +104,7 @@ jobs:
mkdir temp-dir-for-sast
bash ./scripts/ci/remove-sast-exclusions.sh ./packaging/war/target/alfresco.war temp-dir-for-sast/reduced.war
- name: "Run SAST Scan"
uses: veracode/Veracode-pipeline-scan-action@v1.0.16
uses: veracode/Veracode-pipeline-scan-action@v1.0.20
with:
vid: ${{ secrets.VERACODE_API_ID }}
vkey: ${{ secrets.VERACODE_API_KEY }}

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-automation-community-repo</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<build>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -31,6 +31,7 @@ import static org.alfresco.service.cmr.security.PermissionService.GROUP_PREFIX;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
@@ -247,7 +248,7 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
*/
private Set<String> getAuthorities(String group)
{
Set<String> result = new HashSet<>();
Set<String> result = new LinkedHashSet<>();
result.addAll(authorityService.getContainedAuthorities(null, group, true));
return result;
}

View File

@@ -33,6 +33,7 @@ import java.io.Serializable;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
@@ -325,8 +326,8 @@ public class ExtendedPermissionServiceImpl extends PermissionServiceImpl impleme
return aclReaders;
}
HashSet<String> assigned = new HashSet<>();
HashSet<String> readers = new HashSet<>();
Set<String> assigned = new LinkedHashSet<>();
Set<String> readers = new LinkedHashSet<>();
for (AccessControlEntry ace : acl.getEntries())
{
@@ -412,8 +413,8 @@ public class ExtendedPermissionServiceImpl extends PermissionServiceImpl impleme
return aclWriters;
}
HashSet<String> assigned = new HashSet<>();
HashSet<String> readers = new HashSet<>();
Set<String> assigned = new LinkedHashSet<>();
Set<String> readers = new LinkedHashSet<>();
for (AccessControlEntry ace : acl.getEntries())
{
@@ -485,7 +486,7 @@ public class ExtendedPermissionServiceImpl extends PermissionServiceImpl impleme
Set<String> writers = getWriters(aclId);
// add the current owner to the list of extended writers
Set<String> modifiedWrtiers = new HashSet<>(writers);
Set<String> modifiedWrtiers = new LinkedHashSet<>(writers);
String owner = ownableService.getOwner(nodeRef);
if (StringUtils.isNotBlank(owner) &&
!owner.equals(OwnableService.NO_OWNER) &&

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<build>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -9,6 +9,6 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
</project>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -6,7 +6,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<organization>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -675,6 +675,11 @@ public class RestWrapper extends DSLWrapper<RestWrapper>
{
returnedResponse = onRequest().get(restRequest.getPath(), restRequest.getPathParams()).andReturn();
}
else if (HttpMethod.PATCH.equals(httpMethod))
{
returnedResponse = onRequest().body(restRequest.getBody())
.patch(restRequest.getPath(), restRequest.getPathParams()).andReturn();
}
else
{
returnedResponse = onRequest().get(restRequest.getPath(), restRequest.getPathParams()).andReturn();

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

14
pom.xml
View File

@@ -2,7 +2,7 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Alfresco Community Repo Parent</name>
@@ -51,8 +51,8 @@
<dependency.alfresco-server-root.version>7.0.2</dependency.alfresco-server-root.version>
<dependency.activiti-engine.version>5.23.0</dependency.activiti-engine.version>
<dependency.activiti.version>5.23.0</dependency.activiti.version>
<dependency.alfresco-transform-core.version>5.2.2</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-service.version>4.2.2</dependency.alfresco-transform-service.version>
<dependency.alfresco-transform-core.version>5.2.3-A.2</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-service.version>4.2.3-A.2</dependency.alfresco-transform-service.version>
<dependency.alfresco-greenmail.version>7.1</dependency.alfresco-greenmail.version>
<dependency.acs-event-model.version>1.0.11</dependency.acs-event-model.version>
@@ -86,8 +86,8 @@
<dependency.truezip.version>7.7.10</dependency.truezip.version>
<dependency.poi.version>5.4.0</dependency.poi.version>
<dependency.jboss.logging.version>3.5.0.Final</dependency.jboss.logging.version>
<dependency.camel.version>4.11.0</dependency.camel.version> <!-- when bumping this version, please keep track/sync with included netty.io dependencies -->
<dependency.netty.version>4.1.118.Final</dependency.netty.version> <!-- must be in sync with camels transitive dependencies, e.g.: netty-common -->
<dependency.camel.version>4.15.0</dependency.camel.version> <!-- when bumping this version, please keep track/sync with included netty.io dependencies -->
<dependency.netty.version>4.1.127.Final</dependency.netty.version> <!-- must be in sync with camels transitive dependencies, e.g.: netty-common -->
<dependency.activemq.version>5.18.6</dependency.activemq.version>
<dependency.apache-compress.version>1.27.1</dependency.apache-compress.version>
<dependency.awaitility.version>4.2.2</dependency.awaitility.version>
@@ -115,7 +115,7 @@
<dependency.jakarta-json-path.version>2.9.0</dependency.jakarta-json-path.version>
<dependency.json-smart.version>2.5.2</dependency.json-smart.version>
<alfresco.googledrive.version>4.1.0</alfresco.googledrive.version>
<alfresco.aos-module.version>3.3.0</alfresco.aos-module.version>
<alfresco.aos-module.version>3.4.0</alfresco.aos-module.version>
<alfresco.api-explorer.version>25.2.0</alfresco.api-explorer.version> <!-- Also in alfresco-enterprise-share -->
<alfresco.maven-plugin.version>2.2.0</alfresco.maven-plugin.version>
@@ -154,7 +154,7 @@
<connection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</connection>
<developerConnection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</developerConnection>
<url>https://github.com/Alfresco/alfresco-community-repo</url>
<tag>25.3.0.52</tag>
<tag>HEAD</tag>
</scm>
<distributionManagement>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* Copyright (C) 2005 - 2025 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -31,7 +31,10 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.json.simple.JSONObject;
import org.owasp.html.HtmlPolicyBuilder;
import org.owasp.html.PolicyFactory;
import org.springframework.extensions.webscripts.Cache;
import org.springframework.extensions.webscripts.Status;
import org.springframework.extensions.webscripts.WebScriptRequest;
@@ -67,6 +70,29 @@ public class CommentsPost extends AbstractCommentsWebScript
// get json object from request
JSONObject json = parseJSON(req);
// Validating and Sanitizing comment content to prevent XSS
String commentContent = getOrNull(json, "content");
if (StringUtils.isBlank(commentContent))
{
throw new IllegalArgumentException("Comment content must not be empty");
}
else
{
// Allowed HTML elements and attributes in comment content e.g. Text formatting ,Lists and Structure & Styling
String[] allowedElements = {"b", "i", "u", "strong", "em", "ul", "ol", "li", "p", "br", "span", "div"};
PolicyFactory policy = new HtmlPolicyBuilder()
.allowElements(allowedElements)
.allowAttributes("style")
.onElements("span", "div", "p", "ul")
.allowStyling()
.allowStandardUrlProtocols()
.toFactory();
String safeContent = policy.sanitize(commentContent);
json.replace("content", safeContent);
}
/* MNT-10231, MNT-9771 fix */
this.behaviourFilter.disableBehaviour(nodeRef, ContentModel.ASPECT_AUDITABLE);

View File

@@ -51,6 +51,14 @@ function main()
}
}
var contentChanged = false;
if (itemKind === "node") {
contentChanged = metadataExtractAction.isContentChanged(itemId,repoFormData);
}
if(logger.isLoggingEnabled() && contentChanged) {
logger.log("Content has been changed");
}
var persistedObject = null;
try
{
@@ -83,9 +91,50 @@ function main()
return;
}
if (itemKind === "node") {
checkAndExtractNodeMetadata(persistedObject, itemId, contentChanged);
}
model.persistedObject = persistedObject.toString();
model.message = "Successfully persisted form for item [" + itemKind + "]" + itemId;
}
function checkAndExtractNodeMetadata(persistedObject, itemId, isContentChanged) {
var nodeRefStr = toNodeRefString(persistedObject, itemId);
var node = search.findNode(nodeRefStr);
if (node == null) {
if (logger.isLoggingEnabled()) {
logger.log("Node not found: " + nodeRefStr);
}
} else if(isContentChanged) {
extractMetadata(node, isContentChanged);
} else {
if (logger.isLoggingEnabled()) {
logger.log("Content not changed, skipping metadata extraction for node: " + nodeRefStr);
}
}
}
function extractMetadata(file, isContentChanged) {
var emAction = metadataExtractAction.create(isContentChanged);
if (emAction) {
// readOnly=false, newTransaction=false
emAction.execute(file, false, false);
}
}
function toNodeRefString(persistedObject, itemId) {
// Prefer the NodeRef returned by saveForm (when kind=node).
if (persistedObject instanceof Packages.org.alfresco.service.cmr.repository.NodeRef) {
return persistedObject.toString();
}
// If the client passed a full noderef, keep it.
if (itemId && itemId.indexOf("://") !== -1) {
return itemId;
}
// Otherwise assume SpacesStore UUID.
return "workspace://SpacesStore/" + itemId;
}
main();

View File

@@ -2,7 +2,7 @@ function extractMetadata(file)
{
// Extract metadata - via repository action for now.
// This should use the MetadataExtracter API to fetch properties, allowing for possible failures.
var emAction = actions.create("extract-metadata");
var emAction = metadataExtractAction.create(true);
if (emAction != null)
{
// Call using readOnly = false, newTransaction = false

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.52</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -0,0 +1,67 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2025 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.action.evaluator;
import java.util.List;
import org.alfresco.service.cmr.action.ActionCondition;
import org.alfresco.service.cmr.action.ParameterDefinition;
import org.alfresco.service.cmr.repository.NodeRef;
/**
* Content change condition evaluator implementation. Required only in Scripted Actions to allow determination if content has changed. <br>
* Usage in {@link org.alfresco.repo.jscript.MetaDataExtractAction#create(boolean)}
*
* @author Sayan Bhattacharya
*/
public class CompareContentConditionEvaluator extends ActionConditionEvaluatorAbstractBase
{
/**
* Evaluator constants
*/
public static final String NAME = "compare-content";
public static final String PARAM_IS_CONTENT_CHANGED = "isContentChanged";
/**
* @see ActionConditionEvaluatorAbstractBase#evaluateImpl(ActionCondition, NodeRef)
*/
@Override
public boolean evaluateImpl(ActionCondition ruleCondition, NodeRef actionedUponNodeRef)
{
return true;
}
/**
* @see org.alfresco.repo.action.ParameterizedItemAbstractBase#addParameterDefinitions(List)
*/
@Override
protected void addParameterDefinitions(List<ParameterDefinition> paramList)
{
// No parameters to add
}
}

View File

@@ -51,12 +51,14 @@ import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.action.evaluator.CompareContentConditionEvaluator;
import org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter;
import org.alfresco.repo.content.metadata.AsynchronousExtractor;
import org.alfresco.repo.content.metadata.MetadataExtracter;
@@ -403,6 +405,7 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
((AbstractMappingMetadataExtracter) extracter).setEnableStringTagging(enableStringTagging);
}
MetadataExtracter.OverwritePolicy overwritePolicy = determineOverwritePolicy(ruleAction);
// Get all the node's properties
Map<QName, Serializable> nodeProperties = nodeService.getProperties(actionedUponNodeRef);
@@ -415,7 +418,7 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
modifiedProperties = extracter.extract(
actionedUponNodeRef,
reader,
/* OverwritePolicy.PRAGMATIC, */
overwritePolicy,
nodeProperties);
}
catch (Throwable e)
@@ -456,6 +459,21 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
stringTaggingSeparators);
}
private MetadataExtracter.OverwritePolicy determineOverwritePolicy(Action ruleAction)
{
return Optional.ofNullable(ruleAction.getActionConditions())
.flatMap(conditions -> conditions.stream()
.filter(e -> CompareContentConditionEvaluator.NAME.equals(e.getActionConditionDefinitionName()))
.findAny()
.map(e -> {
Serializable contentChanged = e.getParameterValue(CompareContentConditionEvaluator.PARAM_IS_CONTENT_CHANGED);
return Boolean.TRUE.equals(contentChanged)
? MetadataExtracter.OverwritePolicy.EAGER
: MetadataExtracter.OverwritePolicy.PRAGMATIC;
}))
.orElse(MetadataExtracter.OverwritePolicy.PRAGMATIC);
}
public static void addExtractedMetadataToNode(NodeRef actionedUponNodeRef, Map<QName, Serializable> nodeProperties,
Map<QName, Serializable> modifiedProperties,
NodeService nodeService, DictionaryService dictionaryService,

View File

@@ -27,6 +27,13 @@ package org.alfresco.repo.cache.lookup;
import java.io.Serializable;
import java.sql.Savepoint;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.extensions.surf.util.ParameterCheck;
@@ -73,6 +80,19 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
*/
VK1 getValueKey(V1 value);
/**
* Resolve the given values into unique value keys that can be used to find an entity's ID. A return value should be small and efficient; don't return a value if this is not possible.
* <p/>
* Implementations will often return values themselves, provided that the values are both serializable and have good <code>equals</code> and <code>hashCode</code>.
* <p/>
* Were no adequate key can be generated for the value, then it should not be returned. In this case, the {@link #findByValue(Object) findByValue} method might not even do a search and just return <tt>null</tt> or nothing itself i.e. if it is difficult to look the value up in storage then it is probably difficult to generate a cache key from it, too.. In this scenario, the cache will be purely for key-based lookups
*
* @param values
* full values being keyed (never <tt>null</tt>)
* @return Returns the business keys representing the entities
*/
List<VK1> getValueKeys(List<V1> values);
/**
* Find an entity for a given key.
*
@@ -82,6 +102,15 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
*/
Pair<K1, V1> findByKey(K1 key);
/**
* Find entities for a list of given key.
*
* @param keys
* the keys (IDs) used to identify the entity (never <tt>null</tt>)
* @return Return a list of entities or <tt>null</tt> if no entities exists for the IDs
*/
List<Pair<K1, V1>> findByKeys(List<K1> keys);
/**
* Find and entity using the given value key. The <code>equals</code> and <code>hashCode</code> methods of the value object should respect case-sensitivity in the same way that this lookup treats case-sensitivity i.e. if the <code>equals</code> method is <b>case-sensitive</b> then this method should look the entity up using a <b>case-sensitive</b> search.
* <p/>
@@ -177,6 +206,16 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
return null;
}
/**
* This implementation does not find values and is backed by {@link #findByValue(Object)} returning nothing.
*
* @return Returns empty list always
*/
public List<VK2> getValueKeys(List<V2> values)
{
return Collections.emptyList();
}
/**
* Disallows the operation.
*
@@ -343,6 +382,98 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
return entityPair;
}
/**
* Find the entities associated with the given key list. The {@link EntityLookupCallbackDAO#findByKey(Serializable) entity callback} will be used if necessary.
* <p/>
* It is up to the client code to decide if a returned empty list indicates a concurrency violation or not; the former would normally result in a concurrency-related exception such as {@link ConcurrencyFailureException}.
*
* @param keys
* The entity keys, which may be valid or invalid (<tt>null</tt> not allowed)
* @return Returns a list of key-value pairs or an empty list if no keys reference any entities
*/
@SuppressWarnings("unchecked")
public List<Pair<K, V>> getByKeys(List<K> keys)
{
if (keys == null || keys.isEmpty())
{
throw new IllegalArgumentException("An entity lookup key list may not be null or empty");
}
// Create a defensive copy and remove any nulls for safety
List<K> filteredKeys = new ArrayList<>(keys.size());
for (K k : keys)
{
if (k != null)
{
filteredKeys.add(k);
}
}
// Handle missing cache
if (cache == null)
{
return entityLookup.findByKeys(filteredKeys);
}
List<Pair<K, V>> results = new ArrayList<>(filteredKeys.size());
Map<K, CacheRegionKey> keysToResolve = new HashMap<>();
for (K key : filteredKeys)
{
CacheRegionKey keyCacheKey = new CacheRegionKey(cacheRegion, key);
// Look in the cache
V value = (V) cache.get(keyCacheKey);
if (value != null)
{
if (value.equals(VALUE_NOT_FOUND))
{
// We checked before.
continue; // not costly...making it clear that we are moving to the next key
}
else if (value.equals(VALUE_NULL))
{
results.add(new Pair<K, V>(key, null));
}
else
{
results.add(new Pair<K, V>(key, value));
}
}
else
{
// Need to resolve this key
keysToResolve.put(key, keyCacheKey);
}
}
// Resolve any missing keys
List<Pair<K, V>> entityPairs = entityLookup.findByKeys(new ArrayList<>(keysToResolve.keySet()));
if (entityPairs != null && !entityPairs.isEmpty())
{
for (Pair<K, V> entityPair : entityPairs)
{
V value = entityPair.getSecond();
// Get the value key
VK valueKey = (value == null) ? (VK) VALUE_NULL : entityLookup.getValueKey(value);
// Check if the value has a good key
if (valueKey != null)
{
CacheRegionValueKey valueCacheKey = new CacheRegionValueKey(cacheRegion, valueKey);
// The key is good, so we can cache the value
cache.put(valueCacheKey, entityPair.getFirst());
}
cache.put(
new CacheRegionKey(cacheRegion, entityPair.getFirst()),
(value == null ? VALUE_NULL : value));
results.add(entityPair);
}
}
// Done
return results;
}
/**
* Find the entity associated with the given value. The {@link EntityLookupCallbackDAO#findByValue(Object) entity callback} will be used if no entry exists in the cache.
* <p/>
@@ -689,6 +820,22 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
// Done
}
public void setValues(Map<K, V> keyValues)
{
// Handle missing cache
if (cache == null)
{
return;
}
List<K> keys = keyValues.keySet().stream().collect(Collectors.toList());
// Remove entries for the keys (bidirectional removal removes the old values as well)
// but leave the keys as they will get updated
removeByKeys(keys, false);
}
/**
* Delete the entity associated with the given key. The {@link EntityLookupCallbackDAO#deleteByKey(Serializable)} callback will be used if necessary.
* <p/>
@@ -751,6 +898,20 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
removeByKey(key, true);
}
/**
* Cache-only operation: Remove all cache values associated with the given keys.
*/
public void removeByKeys(Collection<K> keys)
{
// Handle missing cache
if (cache == null)
{
return;
}
removeByKeys(keys, true);
}
/**
* Cache-only operation: Remove all cache values associated with the given key.
*
@@ -778,6 +939,46 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
}
}
/**
* Cache-only operation: Remove all cache values associated with the given keys.
*
* @param removeKey
* <tt>true</tt> to remove the given keys' entry
*/
@SuppressWarnings("unchecked")
private void removeByKeys(Collection<K> keys, boolean removeKey)
{
List<V> values = new ArrayList<>();
for (K key : keys)
{
CacheRegionKey keyCacheKey = new CacheRegionKey(cacheRegion, key);
V value = (V) cache.get(keyCacheKey);
if (value != null && !value.equals(VALUE_NOT_FOUND))
{
values.add(value);
}
if (removeKey)
{
cache.remove(keyCacheKey);
}
}
if (!values.isEmpty())
{
// Get the value key and remove it
List<VK> valueKeys = entityLookup.getValueKeys(values);
if (valueKeys != null && !valueKeys.isEmpty())
{
for (VK vk : valueKeys)
{
CacheRegionValueKey valueCacheKey = new CacheRegionValueKey(cacheRegion, vk);
cache.remove(valueCacheKey);
}
}
}
}
/**
* Cache-only operation: Remove all cache values associated with the given value
*

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2021 Alfresco Software Limited
* Copyright (C) 2005 - 2025 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -102,6 +102,7 @@ import org.alfresco.service.namespace.QName;
* @author Jesper Steen Møller
* @author Derek Hulley
*/
@SuppressWarnings("PMD.CyclomaticComplexity")
@AlfrescoPublicApi
abstract public class AbstractMappingMetadataExtracter implements MetadataExtracter, MetadataEmbedder, BeanNameAware, ApplicationContextAware
{
@@ -1118,6 +1119,15 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
return extract(nodeRef, reader, overwritePolicy, destination, mapping);
}
/**
* {@inheritDoc}
*/
@Override
public Map<QName, Serializable> extract(NodeRef nodeRef, ContentReader reader, OverwritePolicy overwritePolicy, Map<QName, Serializable> destination)
{
return extract(nodeRef, reader, overwritePolicy, destination, mapping);
}
/**
* {@inheritDoc}
*/
@@ -1154,7 +1164,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
// Check that the content has some meat
if (reader.getSize() > 0 && reader.exists())
{
rawMetadata = extractRaw(nodeRef, reader, getLimits(reader.getMimetype()));
rawMetadata = extractRaw(nodeRef, reader, getLimits(reader.getMimetype()), overwritePolicy);
}
else
{
@@ -2002,7 +2012,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
}
/**
* Exception wrapper to handle exceeded limits imposed by {@link MetadataExtracterLimits} {@link AbstractMappingMetadataExtracter#extractRaw(NodeRef, ContentReader, MetadataExtracterLimits)}
* Exception wrapper to handle exceeded limits imposed by {@link MetadataExtracterLimits} {@link AbstractMappingMetadataExtracter#extractRaw(NodeRef, ContentReader, MetadataExtracterLimits,OverwritePolicy)}
*/
private class LimitExceededException extends Exception
{
@@ -2032,7 +2042,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
* All exception conditions can be handled.
*/
private Map<String, Serializable> extractRaw(NodeRef nodeRef,
ContentReader reader, MetadataExtracterLimits limits) throws Throwable
ContentReader reader, MetadataExtracterLimits limits, OverwritePolicy overwritePolicy) throws Throwable
{
if (reader.getSize() > limits.getMaxDocumentSizeMB() * MEGABYTE_SIZE)
{
@@ -2059,6 +2069,12 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
}
}
return extractRawInThread(nodeRef, reader, limits, overwritePolicy);
}
protected Map<String, Serializable> extractRawInThread(NodeRef nodeRef, ContentReader reader, MetadataExtracterLimits limits, OverwritePolicy policy)
throws Throwable
{
return extractRawInThread(nodeRef, reader, limits);
}

View File

@@ -93,6 +93,9 @@ public class AsynchronousExtractor extends AbstractMappingMetadataExtracter
private static final String METADATA = "metadata";
private static final Map<String, Serializable> EMPTY_METADATA = Collections.emptyMap();
private static final OverwritePolicy DEFAULT_OVERWRITE_POLICY = OverwritePolicy.PRAGMATIC;
private OverwritePolicy extractOverwritePolicy = DEFAULT_OVERWRITE_POLICY;
private final ObjectMapper jsonObjectMapper = new ObjectMapper();
private NodeService nodeService;
@@ -260,9 +263,9 @@ public class AsynchronousExtractor extends AbstractMappingMetadataExtracter
}
@Override
protected Map<String, Serializable> extractRawInThread(NodeRef nodeRef, ContentReader reader, MetadataExtracterLimits limits)
throws Throwable
protected Map<String, Serializable> extractRawInThread(NodeRef nodeRef, ContentReader reader, MetadataExtracterLimits limits, OverwritePolicy overwritePolicy) throws Throwable
{
this.extractOverwritePolicy = overwritePolicy != null ? overwritePolicy : DEFAULT_OVERWRITE_POLICY;
Map<String, String> options = getExtractOptions(nodeRef, reader, limits);
transformInBackground(nodeRef, reader, MIMETYPE_METADATA_EXTRACT, EXTRACT, options);
return EMPTY_METADATA;
@@ -461,7 +464,7 @@ public class AsynchronousExtractor extends AbstractMappingMetadataExtracter
}
// Remove well know entries from the map that drive how the real metadata is applied.
OverwritePolicy overwritePolicy = removeOverwritePolicy(metadata, "sys:overwritePolicy", OverwritePolicy.PRAGMATIC);
OverwritePolicy overwritePolicy = removeOverwritePolicy(metadata, "sys:overwritePolicy", extractOverwritePolicy);
Boolean enableStringTagging = removeBoolean(metadata, "sys:enableStringTagging", false);
Boolean carryAspectProperties = removeBoolean(metadata, "sys:carryAspectProperties", true);
List<String> stringTaggingSeparators = removeTaggingSeparators(metadata, "sys:stringTaggingSeparators",

View File

@@ -404,6 +404,24 @@ public interface MetadataExtracter extends ContentWorker
return extract(reader, destination);
}
/**
* Identical to {@link #extract(ContentReader, OverwritePolicy ,Map)} but with the addition of the {@code NodeRef} being acted on. By default, the method without the {@code NodeRef} is called.
*
* @param nodeRef
* the node being acted on.
* @param reader
* the source of the content
* @param destination
* the map of properties to populate (essentially a return value)
* @return Returns a map of all properties on the destination map that were added or modified. If the return map is empty, then no properties were modified.
* @throws ContentIOException
* if a detectable error occurs
*/
default Map<QName, Serializable> extract(NodeRef nodeRef, ContentReader reader, OverwritePolicy overwritePolicy, Map<QName, Serializable> destination)
{
return extract(reader, overwritePolicy, destination);
}
/**
* Identical to {@link #extract(ContentReader, OverwritePolicy, Map, Map)} but with the addition of the {@code NodeRef} being acted on. By default, the method without the {@code NodeRef} is called.
*

View File

@@ -26,10 +26,12 @@
package org.alfresco.repo.domain.contentdata;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -243,9 +245,11 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
public void cacheContentDataForNodes(Set<Long> nodeIds)
{
for (ContentDataEntity entity : getContentDataEntitiesForNodes(nodeIds))
List<ContentDataEntity> contentDataEntities = getContentDataEntitiesForNodes(nodeIds);
// We may need to add additional protections here
for (ContentDataEntity contentDataEntity : contentDataEntities)
{
contentDataCache.setValue(entity.getId(), makeContentData(entity));
contentDataCache.setValue(contentDataEntity.getId(), makeContentData(contentDataEntity));
}
}
@@ -299,6 +303,41 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
*/
private class ContentDataCallbackDAO extends EntityLookupCallbackDAOAdaptor<Long, ContentData, Serializable>
{
@Override
public Serializable getValueKey(ContentData value)
{
if (value == null)
{
throw new IllegalArgumentException("ContentData value cannot be null");
}
// It is a gross hack for now, but we need to find the entity based on the value
ContentDataEntity contentDataEntity = getContentDataEntities(Collections.singletonList(value)).stream().findFirst().orElse(null);
if (contentDataEntity == null)
{
return null;
}
return contentDataEntity.getId();
}
@Override
public List<Serializable> getValueKeys(List<ContentData> values)
{
if (values == null || values.isEmpty())
{
return Collections.emptyList();
}
List<ContentDataEntity> contentDataEntities = getContentDataEntities(values);
List<Serializable> result = new ArrayList<>(contentDataEntities.size());
for (ContentDataEntity contentDataEntity : contentDataEntities)
{
result.add(contentDataEntity.getId());
}
return result;
}
public Pair<Long, ContentData> createValue(ContentData value)
{
value = sanitizeMimetype(value);
@@ -319,6 +358,29 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
return new Pair<Long, ContentData>(key, contentData);
}
public List<Pair<Long, ContentData>> findByKeys(List<Long> keys)
{
if (keys == null || keys.isEmpty())
{
return null;
}
List<ContentDataEntity> contentDataEntities = getContentDataEntitiesForNodes(keys.stream().collect(Collectors.toSet()));
if (contentDataEntities == null || contentDataEntities.isEmpty())
{
return null;
}
List<Pair<Long, ContentData>> result = new ArrayList<>(contentDataEntities.size());
for (ContentDataEntity contentDataEntity : contentDataEntities)
{
ContentData contentData = makeContentData(contentDataEntity);
result.add(new Pair<Long, ContentData>(contentDataEntity.getId(), contentData));
}
return result;
}
@Override
public int updateValue(Long key, ContentData value)
{
@@ -351,6 +413,28 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
return value.getContentUrl();
}
@Override
public List<Pair<Long, ContentUrlEntity>> findByKeys(List<Long> keys)
{
if (keys == null || keys.isEmpty())
{
return null;
}
List<ContentUrlEntity> contentUrlEntities = getContentUrlEntities(keys);
if (contentUrlEntities == null || contentUrlEntities.isEmpty())
{
return null;
}
List<Pair<Long, ContentUrlEntity>> result = new ArrayList<>(contentUrlEntities.size());
for (ContentUrlEntity contentUrlEntity : contentUrlEntities)
{
result.add(new Pair<Long, ContentUrlEntity>(contentUrlEntity.getId(), contentUrlEntity));
}
return result;
}
/**
* Looks the entity up based on the ContentURL of the given node
*/
@@ -412,18 +496,60 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
{
// Decode content URL
Long contentUrlId = contentDataEntity.getContentUrlId();
String contentUrl = null;
Pair<Long, ContentUrlEntity> entityPair = null;
if (contentUrlId != null)
{
Pair<Long, ContentUrlEntity> entityPair = contentUrlCache.getByKey(contentUrlId);
if (entityPair == null)
{
throw new DataIntegrityViolationException("No ContentUrl value exists for ID " + contentUrlId);
}
ContentUrlEntity contentUrlEntity = entityPair.getSecond();
contentUrl = contentUrlEntity.getContentUrl();
entityPair = contentUrlCache.getByKey(contentUrlId);
}
return processContentDataEntity(entityPair, contentDataEntity);
}
/**
* Translates these instances into an externally-usable <code>ContentData</code> instances.
*/
private List<ContentData> makeContentData(List<ContentDataEntity> contentDataEntities)
{
List<ContentData> contentDataList = new ArrayList<>(contentDataEntities.size());
List<Long> contentUrlIds = new ArrayList<>();
List<Pair<Long, ContentUrlEntity>> entityPairs = new ArrayList<>(contentDataEntities.size());
for (ContentDataEntity contentDataEntity : contentDataEntities)
{
// Decode content URL
contentUrlIds.add(contentDataEntity.getContentUrlId());
}
if (!contentUrlIds.isEmpty())
{
entityPairs = contentUrlCache.getByKeys(contentUrlIds);
}
for (Pair<Long, ContentUrlEntity> pair : entityPairs)
{
ContentDataEntity contentDataEntity = contentDataEntities.stream()
.filter(cde -> cde.getContentUrlId().equals(pair.getFirst()))
.findFirst()
.orElse(null);
ContentData contentData = processContentDataEntity(pair, contentDataEntity);
contentDataList.add(contentData);
}
return contentDataList;
}
private ContentData processContentDataEntity(Pair<Long, ContentUrlEntity> entityPair, ContentDataEntity contentDataEntity)
{
// Decode content URL
Long contentUrlId = contentDataEntity.getContentUrlId();
String contentUrl = null;
if (entityPair == null)
{
throw new DataIntegrityViolationException("No ContentUrl value exists for ID " + contentUrlId);
}
ContentUrlEntity contentUrlEntity = entityPair.getSecond();
contentUrl = contentUrlEntity.getContentUrl();
long size = contentDataEntity.getSize() == null ? 0L : contentDataEntity.getSize().longValue();
// Decode mimetype
@@ -658,6 +784,13 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
*/
protected abstract ContentUrlEntity getContentUrlEntity(Long id);
/**
* @param ids
* the IDs of the <b>content urls</b> entities
* @return Return a list of entities or an empty list if there are none
*/
protected abstract List<ContentUrlEntity> getContentUrlEntities(List<Long> ids);
protected abstract ContentUrlEntity getContentUrlEntity(String contentUrl);
/**
@@ -703,6 +836,20 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
*/
protected abstract List<ContentDataEntity> getContentDataEntitiesForNodes(Set<Long> nodeIds);
/**
* @param contentData
* the content data
* @return Returns the entity or <tt>null</tt> if it doesn't exist
*/
protected abstract ContentDataEntity getContentDataEntity(ContentData contentData);
/**
* @param contentDataList
* the list of content data
* @return Returns the list of entities or <tt>null</tt> if none exist
*/
protected abstract List<ContentDataEntity> getContentDataEntities(List<ContentData> contentDataList);
/**
* Update an existing <b>alf_content_data</b> entity
*

View File

@@ -62,6 +62,7 @@ import org.alfresco.util.ParameterCheck;
public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
{
private static final String SELECT_CONTENT_URL_BY_ID = "alfresco.content.select_ContentUrlById";
private static final String SELECT_CONTENT_URLS_BY_IDS = "alfresco.content.select_ContentUrlsByIds";
private static final String SELECT_CONTENT_URL_BY_KEY = "alfresco.content.select_ContentUrlByKey";
private static final String SELECT_CONTENT_URL_BY_KEY_UNREFERENCED = "alfresco.content.select_ContentUrlByKeyUnreferenced";
private static final String SELECT_CONTENT_URLS_ORPHANED = "alfresco.content.select.select_ContentUrlsOrphaned";
@@ -69,6 +70,7 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
private static final String SELECT_CONTENT_DATA_BY_ID = "alfresco.content.select_ContentDataById";
private static final String SELECT_CONTENT_DATA_BY_NODE_AND_QNAME = "alfresco.content.select_ContentDataByNodeAndQName";
private static final String SELECT_CONTENT_DATA_BY_NODE_IDS = "alfresco.content.select_ContentDataByNodeIds";
private static final String SELECT_CONTENT_DATA_BY_CONTENT_DATA = "alfresco.content.select_ContentDataByContentData";
private static final String INSERT_CONTENT_URL = "alfresco.content.insert.insert_ContentUrl";
private static final String INSERT_CONTENT_DATA = "alfresco.content.insert.insert_ContentData";
private static final String UPDATE_CONTENT_URL_ORPHAN_TIME = "alfresco.content.update_ContentUrlOrphanTime";
@@ -132,6 +134,18 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
return contentUrlEntity;
}
@Override
protected List<ContentUrlEntity> getContentUrlEntities(List<Long> ids)
{
if (ids == null || ids.isEmpty())
{
return Collections.emptyList();
}
List<ContentUrlEntity> contentUrlEntities = template.selectList(SELECT_CONTENT_URLS_BY_IDS, ids);
// Done
return contentUrlEntities;
}
@Override
public ContentUrlEntity getContentUrlEntity(String contentUrl)
{
@@ -269,6 +283,30 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
return template.selectList(SELECT_CONTENT_DATA_BY_NODE_IDS, idsEntity);
}
@Override
protected ContentDataEntity getContentDataEntity(ContentData contentData)
{
if (contentData == null)
{
return null;
}
return template.selectOne(SELECT_CONTENT_DATA_BY_CONTENT_DATA, contentData);
// Done
}
@Override
protected List<ContentDataEntity> getContentDataEntities(List<ContentData> contentDataList)
{
if (contentDataList != null && !contentDataList.isEmpty())
{
return template.selectList(SELECT_CONTENT_DATA_BY_CONTENT_DATA, contentDataList);
}
// There will be no results
return Collections.emptyList();
}
@Override
protected int updateContentDataEntity(ContentDataEntity entity)
{

View File

@@ -25,6 +25,8 @@
*/
package org.alfresco.repo.domain.encoding;
import java.util.List;
import org.springframework.extensions.surf.util.ParameterCheck;
import org.alfresco.repo.cache.SimpleCache;
@@ -109,6 +111,12 @@ public abstract class AbstractEncodingDAOImpl implements EncodingDAO
}
}
@Override
public List<Pair<Long, String>> findByKeys(List<Long> ids)
{
throw new UnsupportedOperationException("Batch lookup not supported for encodings.");
}
@Override
public Pair<Long, String> findByValue(String encoding)
{

View File

@@ -25,6 +25,7 @@
*/
package org.alfresco.repo.domain.locale;
import java.util.List;
import java.util.Locale;
import org.springframework.dao.DataIntegrityViolationException;
@@ -239,6 +240,12 @@ public abstract class AbstractLocaleDAOImpl implements LocaleDAO
}
}
@Override
public List<Pair<Long, String>> findByKeys(List<Long> ids)
{
throw new UnsupportedOperationException("Batch lookup not supported for locales.");
}
@Override
public Pair<Long, String> findByValue(String localeStr)
{

View File

@@ -43,6 +43,7 @@ import java.util.Stack;
import java.util.TreeSet;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.Collectors;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -140,6 +141,9 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
private UsageDAO usageDAO;
private int cachingThreshold = 10;
private int batchSize = 256;
private boolean forceBatching = false;
private boolean preloadContentData = true;
/**
* Cache for the Store root nodes by StoreRef:<br/>
@@ -410,6 +414,36 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
this.childByNameCache = childByNameCache;
}
/**
* Set the batch size for batch operations
*
* @param batchSize
*/
public void setBatchSize(int batchSize)
{
this.batchSize = batchSize;
}
/**
* Set whether to force batching even for small sets
*
* @param forceBatching
*/
public void setForceBatching(boolean forceBatching)
{
this.forceBatching = forceBatching;
}
/**
* Set whether to preload content data for properties when bulk loading properties
*
* @param preloadContentData
*/
public void setPreloadContentData(boolean preloadContentData)
{
this.preloadContentData = preloadContentData;
}
/* Initialize */
public void init()
@@ -831,6 +865,15 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
NodeEntity node = selectStoreRootNode(storeRef);
return node == null ? null : new Pair<StoreRef, Node>(storeRef, node);
}
/**
* @throws UnsupportedOperationException
* Bulk root node lookup not supported
*/
public List<Pair<StoreRef, Node>> findByKeys(List<StoreRef> storeRefs)
{
throw new UnsupportedOperationException("Bulk root node lookup not supported: " + storeRefs);
}
}
/* Nodes */
@@ -873,6 +916,37 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
}
}
/**
* @param nodeIds
* list of node IDs keys
*/
@Override
public List<Pair<Long, Node>> findByKeys(List<Long> nodeIds)
{
if (nodeIds == null || nodeIds.size() == 0)
{
return new ArrayList<Pair<Long, Node>>(0);
}
List<Pair<Long, Node>> results = new ArrayList<Pair<Long, Node>>(nodeIds.size());
SortedSet<Long> uniqueNodeIds = new TreeSet<Long>(nodeIds);
List<Node> nodes = selectNodesByIds(uniqueNodeIds);
for (Node node : nodes)
{
// Shouldn't be null, but...
if (node != null)
{
// Lock it to prevent 'accidental' modification
node.lock();
results.add(new Pair<Long, Node>(node.getId(), node));
}
}
return results;
}
/**
* @return Returns the Node's NodeRef
*/
@@ -1151,6 +1225,71 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
}
}
/**
* Get node instances regardless of whether they are considered <b>live</b> or <b>deleted</b>
*
* @param nodeIds
* the node IDs to look for
* @param liveOnly
* <tt>true</tt> to ensure that only <b>live</b> nodes are retrieved
* @return nodes that will be <b>live</b> if requested. Nodes not found will be ignored.
*/
private List<Node> getNodesNotNull(List<Long> nodeIds, boolean liveOnly)
{
List<Pair<Long, Node>> pairs = nodesCache.getByKeys(nodeIds);
if (pairs.isEmpty())
{
// The nodes have no entry in the database
List<NodeEntity> dbNodes = selectNodesByIds(nodeIds);
nodesCache.removeByKeys(nodeIds);
logger.debug(
"No node rows exists: \n" +
" IDs: " + nodeIds + "\n" +
" DB rows: " + dbNodes);
return Collections.emptyList();
}
List<Long> deletedNodeIds = new ArrayList<>();
List<Node> liveNodes = new ArrayList<>();
for (Pair<Long, Node> pair : pairs)
{
// This might initially seem less performant but after the first iteration the qname will be cached if it is already not there
if (pair.getSecond().getDeleted(qnameDAO) && liveOnly)
{
deletedNodeIds.add(pair.getFirst());
}
else
{
// Keep the live node
liveNodes.add(pair.getSecond());
}
}
if (!deletedNodeIds.isEmpty())
{
// The node is not 'live' as was requested
List<NodeEntity> dbNode = selectNodesByIds(deletedNodeIds);
nodesCache.removeByKeys(deletedNodeIds);
// Now the pain of pruning dangling assocs for each deleted node...this could be slow if there are many deleted nodes
for (Long nodeId : deletedNodeIds)
{
pruneDanglingAssocs(nodeId);
// In the single node case we would force a retry on the transaction...we can't do that here so just log it
if (isDebugEnabled)
{
logger.debug(
"No node rows exists: \n" +
" IDs: " + nodeId + "\n" +
" DB rows: " + dbNode);
}
}
}
return liveNodes;
}
@Override
public QName getNodeType(Long nodeId)
{
@@ -1666,7 +1805,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
public int touchNodes(Long txnId, List<Long> nodeIds)
{
// limit in clause to 1000 node ids
int batchSize = 1000;
var batchSize = 1000;
int touched = 0;
ArrayList<Long> batch = new ArrayList<Long>(batchSize);
@@ -2595,6 +2734,16 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
// Done
return new Pair<NodeVersionKey, Map<QName, Serializable>>(nodeVersionKey, Collections.unmodifiableMap(props));
}
/**
* Batch lookup is not supported
*
* @throws UnsupportedOperationException
*/
public List<Pair<NodeVersionKey, Map<QName, Serializable>>> findByKeys(List<NodeVersionKey> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for node properties.");
}
}
/* Aspects */
@@ -2831,6 +2980,24 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
aspectsCache.setValue(nodeVersionKey, Collections.unmodifiableSet(aspects));
}
/**
* Update the node aspects cache. The incoming set will be wrapped to be unmodifiable.
*/
private void setNodeAspectsCached(Map<Long, Set<QName>> nodeAspects)
{
List<Long> nodeIds = nodeAspects.keySet().stream().toList();
List<NodeVersionKey> nodeVersionKeys = getNodesNotNull(nodeIds, false).stream()
.map(Node::getNodeVersionKey)
.collect(Collectors.toList());
// Should have mimimal impact
for (NodeVersionKey nodeVersionKey : nodeVersionKeys)
{
aspectsCache.setValue(nodeVersionKey, Collections.unmodifiableSet(nodeAspects.get(nodeVersionKey.getNodeId())));
}
}
/**
* Helper method to copy cache values from one key to another
*/
@@ -2882,6 +3049,16 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
// Done
return new Pair<NodeVersionKey, Set<QName>>(nodeVersionKey, Collections.unmodifiableSet(nodeAspectQNames));
}
/**
* Batch lookup is not supported
*
* @throws UnsupportedOperationException
*/
public List<Pair<NodeVersionKey, Set<QName>>> findByKeys(List<NodeVersionKey> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for node aspects.");
}
}
/* Node assocs */
@@ -4558,12 +4735,11 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
int foundCacheEntryCount = 0;
int missingCacheEntryCount = 0;
boolean forceBatch = false;
List<Long> batchLoadNodeIds = new ArrayList<Long>(nodeIds.size());
for (Long nodeId : nodeIds)
{
if (!forceBatch)
if (!forceBatching)
{
// Is this node in the cache?
if (nodesCache.getValue(nodeId) != null)
@@ -4578,7 +4754,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
if (foundCacheEntryCount + missingCacheEntryCount % 100 == 0)
{
// We force the batch if the number of hits drops below the number of misses
forceBatch = foundCacheEntryCount < missingCacheEntryCount;
forceBatching = foundCacheEntryCount < missingCacheEntryCount;
}
}
@@ -4672,7 +4848,6 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
StoreEntity store = getStoreNotNull(storeRef);
Long storeId = store.getId();
int batchSize = 256;
SortedSet<String> batch = new TreeSet<String>();
for (String uuid : uuids)
{
@@ -4690,12 +4865,12 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
{
List<Node> nodes = selectNodesByUuids(storeId, batch);
cacheNodesNoBatch(nodes);
logger.info("Batch size may be too small " + batch.size() + " nodes.");
}
}
private void cacheNodesBatch(List<Long> nodeIds)
{
int batchSize = 256;
SortedSet<Long> batch = new TreeSet<Long>();
for (Long nodeId : nodeIds)
{
@@ -4713,6 +4888,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
{
List<Node> nodes = selectNodesByIds(batch);
cacheNodesNoBatch(nodes);
logger.info("Batch size may be too small " + batch.size() + " nodes.");
}
}
@@ -4749,22 +4925,40 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
}
Map<NodeVersionKey, Set<QName>> nodeAspects = selectNodeAspects(aspectNodeIds);
Map<Long, Set<QName>> aspectsMappedByNodeId = new HashMap<Long, Set<QName>>(aspectNodeIds.size());
Map<Long, Set<QName>> nodesWithNoAspects = new HashMap<Long, Set<QName>>(aspectNodeIds.size());
for (Map.Entry<NodeVersionKey, Set<QName>> entry : nodeAspects.entrySet())
{
NodeVersionKey nodeVersionKeyFromDb = entry.getKey();
Long nodeId = nodeVersionKeyFromDb.getNodeId();
Set<QName> qnames = entry.getValue();
setNodeAspectsCached(nodeId, qnames);
aspectNodeIds.remove(nodeId);
NodeVersionKey oldKey = entry.getKey();
Long newKey = oldKey.getNodeId();
Set<QName> value = entry.getValue();
aspectsMappedByNodeId.put(newKey, value);
// Remove the nodeIds from the original Set
aspectNodeIds.remove(newKey);
}
if (!aspectsMappedByNodeId.isEmpty())
{
setNodeAspectsCached(aspectsMappedByNodeId);
}
// Cache the absence of aspects too!
for (Long nodeId : aspectNodeIds)
{
setNodeAspectsCached(nodeId, Collections.<QName> emptySet());
nodesWithNoAspects.put(nodeId, Collections.<QName> emptySet());
}
if (!nodesWithNoAspects.isEmpty())
{
setNodeAspectsCached(nodesWithNoAspects);
}
// First ensure all content data are pre-cached, so we don't have to load them individually when converting properties
contentDataDAO.cacheContentDataForNodes(propertiesNodeIds);
if (preloadContentData && !propertiesNodeIds.isEmpty())
{
contentDataDAO.cacheContentDataForNodes(propertiesNodeIds);
}
// Now bulk load the properties
Map<NodeVersionKey, Map<NodePropertyKey, NodePropertyValue>> propsByNodeId = selectNodeProperties(propertiesNodeIds);
@@ -4774,7 +4968,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
Map<NodePropertyKey, NodePropertyValue> propertyValues = entry.getValue();
Map<QName, Serializable> props = nodePropertyHelper.convertToPublicProperties(propertyValues);
setNodePropertiesCached(nodeId, props);
}
} // Rework the above .... it is not the best approach .... post processing approach is better
}
/**
@@ -4943,6 +5137,8 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
protected abstract NodeEntity selectNodeById(Long id);
protected abstract List<NodeEntity> selectNodesByIds(List<Long> ids);
protected abstract NodeEntity selectNodeByNodeRef(NodeRef nodeRef);
protected abstract List<Node> selectNodesByUuids(Long storeId, SortedSet<String> uuids);

View File

@@ -419,6 +419,19 @@ public class NodeDAOImpl extends AbstractNodeDAOImpl
return template.selectOne(SELECT_NODE_BY_ID, node);
}
@Override
protected List<NodeEntity> selectNodesByIds(List<Long> ids)
{
List<NodeEntity> nodes = new ArrayList<>();
ids.forEach(id -> {
NodeEntity node = new NodeEntity();
node.setId(id);
nodes.add(node);
});
return template.selectList(SELECT_NODES_BY_IDS, nodes);
}
@Override
protected NodeEntity selectNodeByNodeRef(NodeRef nodeRef)
{

View File

@@ -310,6 +310,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return null;
}
@Override
public List<Serializable> getValueKeys(List<AclEntity> values)
{
throw new UnsupportedOperationException("Batch lookup not supported for ACLs.");
}
public Pair<Long, AclEntity> createValue(AclEntity value)
{
AclEntity entity = createAclEntity(value);
@@ -322,6 +328,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, AclEntity>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for ACLs.");
}
public Pair<Long, AclEntity> findByValue(AclEntity value)
{
if ((value != null) && (value.getId() != null))
@@ -816,6 +828,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return value;
}
@Override
public List<PermissionEntity> getValueKeys(List<PermissionEntity> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for permissions.");
}
public Pair<Long, PermissionEntity> createValue(PermissionEntity value)
{
PermissionEntity entity = createPermissionEntity(value);
@@ -828,6 +846,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, PermissionEntity>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for permissions.");
}
public Pair<Long, PermissionEntity> findByValue(PermissionEntity value)
{
if ((value == null) || (value.getName() == null) || (value.getTypeQNameId() == null))
@@ -999,6 +1023,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return value.getAuthority();
}
@Override
public List<String> getValueKeys(List<AuthorityEntity> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for authorities.");
}
public Pair<Long, AuthorityEntity> createValue(AuthorityEntity value)
{
AuthorityEntity entity = createAuthorityEntity(value);
@@ -1011,6 +1041,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, AuthorityEntity>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for authorities.");
}
public Pair<Long, AuthorityEntity> findByValue(AuthorityEntity value)
{
if ((value == null) || (value.getAuthority() == null))

View File

@@ -373,6 +373,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, Class<?>>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property classes.");
}
public Pair<Long, Class<?>> findByValue(Class<?> value)
{
PropertyClassEntity entity = findClassByValue(value);
@@ -465,6 +471,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, Date>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property date values.");
}
public Pair<Long, Date> findByValue(Date value)
{
PropertyDateValueEntity entity = findDateValueByValue(value);
@@ -566,6 +578,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
}
}
@Override
public List<Pair<Long, String>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property string values.");
}
public Pair<Long, String> findByValue(String value)
{
Long key = findStringValueByValue(value);
@@ -658,6 +676,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, Double>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property double values.");
}
public Pair<Long, Double> findByValue(Double value)
{
PropertyDoubleValueEntity entity = findDoubleValueByValue(value);
@@ -727,6 +751,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
PropertySerializableValueEntity entity = findSerializableValueById(key);
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, Serializable>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property serializable values.");
}
}
protected abstract PropertySerializableValueEntity findSerializableValueById(Long id);
@@ -833,6 +863,11 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
return convertEntityToPair(entity);
}
public List<Pair<Long, Serializable>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property values.");
}
public Pair<Long, Serializable> findByValue(Serializable value)
{
PropertyValueEntity entity = findPropertyValueByValue(value);
@@ -937,6 +972,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
return new Pair<Long, Serializable>(key, value);
}
@Override
public List<Pair<Long, Serializable>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for properties.");
}
/**
* Updates a property. The <b>alf_prop_root</b> entity is updated to ensure concurrent modification is detected.
*

View File

@@ -27,6 +27,7 @@ package org.alfresco.repo.domain.qname;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -191,6 +192,12 @@ public abstract class AbstractQNameDAOImpl implements QNameDAO
}
}
@Override
public List<Pair<Long, String>> findByKeys(List<Long> ids)
{
throw new UnsupportedOperationException("Batch lookup not supported for namespaces.");
}
@Override
public Pair<Long, String> findByValue(String uri)
{
@@ -351,6 +358,12 @@ public abstract class AbstractQNameDAOImpl implements QNameDAO
}
}
@Override
public List<Pair<Long, QName>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for QNames.");
}
@Override
public Pair<Long, QName> findByValue(QName qname)
{

View File

@@ -210,6 +210,12 @@ public abstract class AbstractTenantAdminDAOImpl implements TenantAdminDAO
return null;
}
@Override
public List<Serializable> getValueKeys(List<TenantEntity> values)
{
throw new UnsupportedOperationException("Batch lookup not supported for tenants.");
}
@Override
public Pair<String, TenantEntity> createValue(TenantEntity value)
{
@@ -224,6 +230,12 @@ public abstract class AbstractTenantAdminDAOImpl implements TenantAdminDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<String, TenantEntity>> findByKeys(List<String> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for tenants.");
}
@Override
public Pair<String, TenantEntity> findByValue(TenantEntity value)
{

View File

@@ -4,21 +4,21 @@
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
@@ -34,7 +34,7 @@ import org.alfresco.service.cmr.action.ActionService;
/**
* Scripted Action service for describing and executing actions against Nodes.
*
*
* @author davidc
*/
public final class Actions extends BaseScopableProcessorExtension
@@ -44,7 +44,7 @@ public final class Actions extends BaseScopableProcessorExtension
/**
* Set the service registry
*
*
* @param serviceRegistry
* the service registry
*/
@@ -55,7 +55,7 @@ public final class Actions extends BaseScopableProcessorExtension
/**
* Gets the list of registered action names
*
*
* @return the registered action names
*/
public String[] getRegistered()
@@ -73,7 +73,7 @@ public final class Actions extends BaseScopableProcessorExtension
/**
* Create an Action
*
*
* @param actionName
* the action name
* @return the action

View File

@@ -0,0 +1,171 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2020 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
/*
* Copyright (C) 2005 - 2025 Alfresco Software Limited
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.jscript;
import org.apache.commons.lang3.Strings;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.action.evaluator.CompareContentConditionEvaluator;
import org.alfresco.repo.forms.FormData;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.action.Action;
import org.alfresco.service.cmr.action.ActionCondition;
import org.alfresco.service.cmr.action.ActionDefinition;
import org.alfresco.service.cmr.action.ActionService;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.NodeRef;
/**
* JavaScript wrapper for the "extract-metadata" action.
* <p>
* This class provides a scriptable interface to trigger metadata extraction actions within the Alfresco repository.</br>
* It is similar to {@link Actions} class but is dedicated to metadata extraction functionality.
*
* </br>
*
* @author Sayan Bhattacharya
*/
public final class MetaDataExtractAction extends BaseScopableProcessorExtension
{
private static final Log LOG = LogFactory.getLog(MetaDataExtractAction.class);
private final static String ACTION_NAME = "extract-metadata";
private ContentService contentService;
private ServiceRegistry services;
/**
* Set the service registry
*
* @param serviceRegistry
* the service registry
*/
public void setServiceRegistry(ServiceRegistry serviceRegistry)
{
this.services = serviceRegistry;
}
public void setContentService(ContentService contentService)
{
this.contentService = contentService;
}
/**
* Create a new metadata extraction action instance
*
* @param setActionContext
* if true, sets the action context to "scriptaction".
* @return the newly created action
*/
public ScriptAction create(boolean isContentChanged)
{
ScriptAction scriptAction = null;
ActionService actionService = services.getActionService();
ActionDefinition actionDef = actionService.getActionDefinition(ACTION_NAME);
if (actionDef != null)
{
Action action = actionService.createAction(ACTION_NAME);
ActionCondition actionCondition = actionService.createActionCondition(CompareContentConditionEvaluator.NAME);
actionCondition.setParameterValue(CompareContentConditionEvaluator.PARAM_IS_CONTENT_CHANGED, isContentChanged);
action.addActionCondition(actionCondition);
scriptAction = new ScriptAction(this.services, action, actionDef);
scriptAction.setScope(getScope());
}
return scriptAction;
}
/**
* Check if the content has been updated in the form data compared to the existing content of the node.
*
* @param itemId
* @param formData
* @return true if content has changed, false otherwise
*/
public boolean isContentChanged(String itemId, FormData formData)
{
try
{
NodeRef nodeRef = NodeRef.isNodeRef(itemId) ? new NodeRef(itemId) : parseNodeRef(itemId);
if (nodeRef == null)
{
return false;
}
ContentReader reader = contentService.getReader(nodeRef, ContentModel.PROP_CONTENT);
String contentString = reader.getContentString();
FormData.FieldData fieldData = formData.getFieldData("prop_cm_content");
if (fieldData == null || fieldData.getValue() == null)
{
return false;
}
String propCmContent = String.valueOf(fieldData.getValue());
return !Strings.CS.equals(contentString, propCmContent);
}
catch (Exception e)
{
if (LOG.isDebugEnabled())
{
LOG.debug("Unable to determine if content has changed for node: " + itemId, e);
}
return false;
}
}
private NodeRef parseNodeRef(String itemId)
{
String[] parts = itemId.split("/");
return (parts.length == 3) ? new NodeRef(parts[0], parts[1], parts[2]) : null;
}
}

View File

@@ -565,6 +565,12 @@ public class DBQueryEngine implements QueryEngine
return null;
}
@Override
public List<Pair<Long, Node>> findByKeys(List<Long> nodeIds)
{
throw new UnsupportedOperationException("Batch lookup not supported for Nodes.");
}
@Override
public NodeRef getValueKey(Node value)
{

View File

@@ -237,6 +237,12 @@
<bean id="no-condition" class="org.alfresco.repo.action.evaluator.NoConditionEvaluator" parent="action-condition-evaluator">
</bean>
<bean id="compare-content" class="org.alfresco.repo.action.evaluator.CompareContentConditionEvaluator" parent="action-condition-evaluator">
<property name="publicCondition">
<value>false</value>
</property>
</bean>
<bean id="compare-property-value" class="org.alfresco.repo.action.evaluator.ComparePropertyValueEvaluator" parent="action-condition-evaluator">
<property name="nodeService">
<ref bean="nodeService" />

View File

@@ -140,6 +140,9 @@
<property name="parentAssocsCacheLimitFactor" value="${system.cache.parentAssocs.limitFactor}"/>
<property name="childByNameCache" ref="node.childByNameCache"/>
<property name="cachingThreshold" value="${nodes.bulkLoad.cachingThreshold}"/>
<property name="batchSize" value="${nodes.bulkLoad.batchSize:256}"/>
<property name="forceBatching" value="${nodes.bulkLoad.forceBatching:false}"/>
<property name="preloadContentData" value="${nodes.bulkLoad.preloadContentData:true}"/>
</bean>
<bean id="nodeDAO.org.alfresco.repo.domain.dialect.Dialect" class="org.alfresco.repo.domain.node.ibatis.NodeDAOImpl" parent="nodeDAObase" />

View File

@@ -235,6 +235,31 @@
u.id = #{id}
</select>
<!-- Get the content URL entities by IDs -->
<select id="select_ContentUrlsByIds" parameterType="list" resultMap="result_ContentUrl">
select
u.id as id,
u.content_url as content_url,
u.content_url_short as content_url_short,
u.content_url_crc as content_url_crc,
u.content_size as content_size,
u.orphan_time as orphan_time,
ce.algorithm as algorithm,
ce.key_size as key_size,
ce.encrypted_key as encrypted_key,
ce.master_keystore_id as master_keystore_id,
ce.master_key_alias as master_key_alias,
ce.unencrypted_file_size as unencrypted_file_size
from
alf_content_url u
left join alf_content_url_encryption ce on (u.id = ce.content_url_id)
where
u.id in
<foreach item="item" index="index" collection="list" open="(" separator="," close=")">
#{item}
</foreach>
</select>
<!-- Get the content URL entity by unique key -->
<select id="select_ContentUrlByKey" parameterType="ContentUrl" resultMap="result_ContentUrl">
select
@@ -373,6 +398,28 @@
and (np.actual_type_n = 3 or np.actual_type_n = 21)
</select>
<!-- Get ContentData entities by Content Data -->
<select id="select_ContentDataByContentData" parameterType="ContentData" resultMap="result_ContentData">
select
cd.id as id,
cd.version as version,
cd.content_url_id as content_url_id,
cu.content_size as content_size,
cd.content_mimetype_id as content_mimetype_id,
cd.content_encoding_id as content_encoding_id,
cd.content_locale_id as content_locale_id
from
alf_content_data cd
join alf_node_properties np on (cd.id = np.long_value)
left join alf_content_url cu on (cd.content_url_id = cu.id)
where
cu.content_url in
<foreach item="item" index="index" collection="list" open="(" separator="," close=")">
#{item.contentUrl}
</foreach>
and (np.actual_type_n = 3 or np.actual_type_n = 21)
</select>
<!-- Get the ContentData entity by Node and property QName -->
<select id="select_ContentDataByNodeAndQName" parameterType="Ids" resultType="long">
select

View File

@@ -762,13 +762,15 @@
alf_node node
join alf_node_aspects aspects on (aspects.node_id = node.id)
<where>
<if test="nodeId != null">aspects.node_id = #{nodeId}</if>
<if test="nodeIds != null">
and aspects.node_id in
<foreach item="item" index="index" collection="nodeIds" open="(" separator="," close=")">
#{item}
</foreach>
</if>
<choose>
<when test="nodeId != null">aspects.node_id = #{nodeId}</when>
<when test="nodeIds != null">
aspects.node_id in
<foreach item="item" index="index" collection="nodeIds" open="(" separator="," close=")">
#{item}
</foreach>
</when>
</choose>
</where>
</select>

View File

@@ -918,6 +918,9 @@ mail.service.corePoolSize=8
mail.service.maximumPoolSize=20
nodes.bulkLoad.cachingThreshold=10
nodes.bulkLoad.batchSize=256
nodes.bulkLoad.forceBatching=false
nodes.bulkLoad.preloadContentData=true
# Multi-Tenancy

View File

@@ -104,6 +104,17 @@
</property>
</bean>
<bean id="metadataExtractServiceScript" parent="baseJavaScriptExtension"
class="org.alfresco.repo.jscript.MetaDataExtractAction">
<property name="extensionName">
<value>metadataExtractAction</value>
</property>
<property name="contentService" ref="ContentService" />
<property name="serviceRegistry">
<ref bean="ServiceRegistry"/>
</property>
</bean>
<bean id="imapScript" parent="baseJavaScriptExtension" class="org.alfresco.repo.jscript.Imap">
<property name="extensionName">
<value>imap</value>

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2017 Alfresco Software Limited
* Copyright (C) 2005 - 2025 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -66,6 +66,7 @@ import org.alfresco.util.testing.category.NonBuildTests;
org.alfresco.repo.importer.FileImporterTest.class,
org.alfresco.repo.importer.ImporterComponentTest.class,
org.alfresco.repo.jscript.PeopleTest.class,
org.alfresco.repo.jscript.MetaDataExtractActionTest.class,
org.alfresco.repo.jscript.RhinoScriptTest.class,
// needs a clean DB to run

View File

@@ -28,6 +28,7 @@ package org.alfresco.repo.action.executer;
import static org.awaitility.Awaitility.await;
import java.io.Serializable;
import java.time.Duration;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
@@ -46,6 +47,8 @@ import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter;
import org.alfresco.repo.content.metadata.MetadataExtracterRegistry;
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
import org.alfresco.repo.jscript.MetaDataExtractAction;
import org.alfresco.repo.jscript.ScriptAction;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.service.cmr.repository.ContentReader;
@@ -74,6 +77,10 @@ public class ContentMetadataExtracterTest extends BaseSpringTest
protected static final String QUICK_DESCRIPTION = "Pangram, fox, dog, Gym class featuring a brown fox and lazy dog";
protected static final String QUICK_CREATOR = "Nevin Nollop";
protected static final String QUICK_UPDATED_TITLE = "The hot dog is eaten by the city fox";
protected static final String QUICK_UPDATED_DESCRIPTION = "Pangram, fox, dog, Gym class featuring only brown fox";
protected static final String QUICK_UPDATED_CREATOR = "Friday";
private NodeService nodeService;
private ContentService contentService;
private MetadataExtracterRegistry registry;
@@ -84,6 +91,8 @@ public class ContentMetadataExtracterTest extends BaseSpringTest
private ContentMetadataExtracter executer;
private MetaDataExtractAction extractAction;
private final static String ID = GUID.generate();
@Before
@@ -116,6 +125,9 @@ public class ContentMetadataExtracterTest extends BaseSpringTest
// Get the executer instance
this.executer = (ContentMetadataExtracter) this.applicationContext.getBean("extract-metadata");
// get the js script action
this.extractAction = (MetaDataExtractAction) this.applicationContext.getBean("metadataExtractServiceScript");
}
/**
@@ -351,4 +363,45 @@ public class ContentMetadataExtracterTest extends BaseSpringTest
}
});
}
@Test
public void testUsingScriptAction_WhenContentChanged() throws Exception
{
// update the content
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
cw.setMimetype(MimetypeMap.MIMETYPE_PDF);
cw.putContent(AbstractContentTransformerTest.loadNamedQuickTestFile("quickupdated.pdf"));
// Make the nodeRef visible to other transactions as it will need to be in async requests
TestTransaction.flagForCommit();
TestTransaction.end();
// Execute the action
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>() {
public Void execute() throws Throwable
{
ScriptAction action = extractAction.create(true);
action.execute(nodeRef, false, false);
return null;
}
});
// Need to wait for the async extract
await().pollInSameThread()
.atMost(Duration.ofSeconds(100))
.until(() -> nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION), Objects::nonNull);
// Check that the properties have been preserved, but that description has been set
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>() {
public Void execute() throws Throwable
{
assertEquals(QUICK_UPDATED_TITLE, nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE));
assertEquals(QUICK_UPDATED_CREATOR, nodeService.getProperty(nodeRef, ContentModel.PROP_AUTHOR));
assertEquals(QUICK_UPDATED_DESCRIPTION, nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION));
return null;
}
});
}
}

View File

@@ -28,6 +28,8 @@ package org.alfresco.repo.cache.lookup;
import static org.junit.Assert.*;
import java.sql.Savepoint;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
@@ -332,6 +334,16 @@ public class EntityLookupCacheTest implements EntityLookupCallbackDAO<Long, Obje
return dbValue;
}
public List<String> getValueKeys(List<Object> values)
{
List<String> keys = new ArrayList<>(values.size());
for (Object value : values)
{
keys.add(getValueKey(value));
}
return keys;
}
public Pair<Long, Object> findByKey(Long key)
{
assertNotNull(key);
@@ -346,6 +358,12 @@ public class EntityLookupCacheTest implements EntityLookupCallbackDAO<Long, Obje
return new Pair<Long, Object>(key, value);
}
@Override
public List<Pair<Long, Object>> findByKeys(List<Long> key)
{
throw new UnsupportedOperationException("Batch lookup not supported in test DAO.");
}
public Pair<Long, Object> findByValue(Object value)
{
assertTrue(value == null || value instanceof TestValue);

View File

@@ -0,0 +1,128 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2025 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
/*
* Copyright (C) 2005 Jesper Steen Møller
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.jscript;
import static org.junit.Assert.*;
import org.junit.Test;
import org.mockito.Mockito;
import org.alfresco.repo.forms.FormData;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.action.Action;
import org.alfresco.service.cmr.action.ActionCondition;
import org.alfresco.service.cmr.action.ActionDefinition;
import org.alfresco.service.cmr.action.ActionService;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentService;
public class MetaDataExtractActionTest
{
@Test
public void testIsContentChangedReturnsTrue()
{
MetaDataExtractAction action = new MetaDataExtractAction();
ContentService contentService = Mockito.mock(ContentService.class);
ContentReader reader = Mockito.mock(ContentReader.class);
FormData formData = Mockito.mock(FormData.class);
FormData.FieldData fieldData = Mockito.mock(FormData.FieldData.class);
String nodeRefStr = "workspace://SpacesStore/abc/def";
Mockito.when(contentService.getReader(Mockito.any(), Mockito.any())).thenReturn(reader);
Mockito.when(reader.getContentString()).thenReturn("oldContent");
Mockito.when(formData.getFieldData("prop_cm_content")).thenReturn(fieldData);
Mockito.when(fieldData.getValue()).thenReturn("newContent");
action.setContentService(contentService);
boolean result = action.isContentChanged(nodeRefStr, formData);
assertTrue(result);
}
@Test
public void testIsContentChangedReturnsFalse()
{
MetaDataExtractAction action = new MetaDataExtractAction();
ContentService contentService = Mockito.mock(ContentService.class);
ContentReader reader = Mockito.mock(ContentReader.class);
FormData formData = Mockito.mock(FormData.class);
FormData.FieldData fieldData = Mockito.mock(FormData.FieldData.class);
String nodeRefStr = "workspace://SpacesStore/abc/def";
Mockito.when(contentService.getReader(Mockito.any(), Mockito.any())).thenReturn(reader);
Mockito.when(reader.getContentString()).thenReturn("sameContent");
Mockito.when(formData.getFieldData("prop_cm_content")).thenReturn(fieldData);
Mockito.when(fieldData.getValue()).thenReturn("sameContent");
action.setContentService(contentService);
boolean result = action.isContentChanged(nodeRefStr, formData);
assertFalse(result);
}
@Test
public void testCreateWhenContentChangedReturnsScriptAction()
{
MetaDataExtractAction action = new MetaDataExtractAction();
ServiceRegistry serviceRegistry = Mockito.mock(ServiceRegistry.class);
ActionService actionService = Mockito.mock(ActionService.class);
ActionDefinition actionDefinition = Mockito.mock(ActionDefinition.class);
Action alfrescoAction = Mockito.mock(Action.class);
ActionCondition actionCondition = Mockito.mock(ActionCondition.class);
Mockito.when(serviceRegistry.getActionService()).thenReturn(actionService);
Mockito.when(actionService.getActionDefinition(Mockito.anyString())).thenReturn(actionDefinition);
Mockito.when(actionService.createAction(Mockito.anyString())).thenReturn(alfrescoAction);
Mockito.when(actionService.createActionCondition(Mockito.anyString())).thenReturn(actionCondition);
action.setServiceRegistry(serviceRegistry);
ScriptAction result = action.create(true);
assertNotNull("ScriptAction should not be null when content has changed", result);
}
}

Binary file not shown.