Compare commits

...

39 Commits

Author SHA1 Message Date
Jared Ottley
d9c56bbc79 [MNT-25404] [LFG] Query Performance - High performance cost in retrieving nodes/node properties for large result sets
- Bulkified queries on the preload for a query.
- Added additional configuraiton properties around the code changes
- New Properties are (with the defaults):
nodes.bulkLoad.batchSize=256
nodes.bulkLoad.forceBatching=false
nodes.bulkLoad.preloadContentData=true
2025-11-04 21:49:33 -07:00
alfresco-build
a16473100d [maven-release-plugin][skip ci] prepare for next development iteration 2025-11-03 12:46:19 +00:00
alfresco-build
6b2fafac45 [maven-release-plugin][skip ci] prepare release 25.3.0.67 2025-11-03 12:46:17 +00:00
Damian Ujma
3508e17907 ACS-10456 Bump Netty and Camel (#3632) 2025-11-03 12:03:17 +01:00
alfresco-build
b9d0773989 [maven-release-plugin][skip ci] prepare for next development iteration 2025-11-02 00:10:06 +00:00
alfresco-build
df4a70b61e [maven-release-plugin][skip ci] prepare release 25.3.0.66 2025-11-02 00:10:04 +00:00
Alfresco CI User
331464f106 [force] Force release for 2025-11-02. 2025-11-02 00:05:03 +00:00
alfresco-build
d21fdb09b5 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-30 14:32:25 +00:00
alfresco-build
38a4da7413 [maven-release-plugin][skip ci] prepare release 25.3.0.65 2025-10-30 14:32:23 +00:00
Belal Ansari
920285b209 ACS-10404 bump ATS (#3627) 2025-10-30 18:38:51 +05:30
alfresco-build
4ab8e36170 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-30 09:16:50 +00:00
alfresco-build
9860cf63ae [maven-release-plugin][skip ci] prepare release 25.3.0.64 2025-10-30 09:16:47 +00:00
Somnath-Deshmukh
a49e0b2ae3 MNT-25422 Bulleted list, numbered list, and underline are not working properly when adding comment in the file. (#3624) 2025-10-30 13:26:32 +05:30
alfresco-build
e28b7b35da [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-28 11:24:54 +00:00
alfresco-build
6baafc0e88 [maven-release-plugin][skip ci] prepare release 25.3.0.63 2025-10-28 11:24:51 +00:00
Somnath-Deshmukh
b1cf78ce7c Fix/mnt 25359 Prevent XSS attack during posting a comment keeping the editor styles intact (#3623)
Fix for MNT-25359 Prevent XSS attack during posting a comment keeping the editor styles intact.
2025-10-28 15:35:34 +05:30
alfresco-build
a2f9857442 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-26 00:10:29 +00:00
alfresco-build
8c69432052 [maven-release-plugin][skip ci] prepare release 25.3.0.62 2025-10-26 00:10:28 +00:00
Alfresco CI User
124f87ee21 [force] Force release for 2025-10-26. 2025-10-26 00:05:04 +00:00
Swarnajit3004
3cd3b2c2d6 [ACS-10547] Added code to handle PATCH request (#3622) 2025-10-24 10:18:34 +05:30
alfresco-build
14da8d2002 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-22 09:51:01 +00:00
alfresco-build
6a4bbb021c [maven-release-plugin][skip ci] prepare release 25.3.0.61 2025-10-22 09:50:59 +00:00
Debjit Chattopadhyay
42d70b17c7 Revert "MNT-24776 adding if-else conditionals to avoid null values"
Revert "MNT-24776 adding if-else conditionals to avoid null values"
2025-10-22 14:33:09 +05:30
Debjit Chattopadhyay
c7eba0ddc8 Revert "MNT-24776 adding if-else conditionals to avoid null values"
This reverts commit be02be5a8b.
2025-10-22 13:08:21 +05:30
alfresco-build
266094c0e1 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-19 00:08:29 +00:00
alfresco-build
e442b4acf0 [maven-release-plugin][skip ci] prepare release 25.3.0.60 2025-10-19 00:08:28 +00:00
Alfresco CI User
fd1028a685 [force] Force release for 2025-10-19. 2025-10-19 00:05:12 +00:00
alfresco-build
0a7e275a9c [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-17 06:19:01 +00:00
alfresco-build
d1bbba7286 [maven-release-plugin][skip ci] prepare release 25.3.0.59 2025-10-17 06:18:59 +00:00
Somnath-Deshmukh
e1baddebee Fix/mnt 25359 (#3613)
Prevent XSS attack during posting a comment
2025-10-17 10:58:02 +05:30
alfresco-build
3263dcaf2f [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-12 00:09:47 +00:00
alfresco-build
8926f7f9a7 [maven-release-plugin][skip ci] prepare release 25.3.0.58 2025-10-12 00:09:45 +00:00
Alfresco CI User
764a1b656c [force] Force release for 2025-10-12. 2025-10-12 00:04:45 +00:00
alfresco-build
cf265f2dea [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-10 08:32:39 +00:00
alfresco-build
fd0d5204eb [maven-release-plugin][skip ci] prepare release 25.3.0.57 2025-10-10 08:32:36 +00:00
Piotr Żurek
f9b8a4b42d Fix missing import 2025-10-10 09:44:45 +02:00
Piotr Żurek
fcdc1438e7 Fix formatting 2025-10-10 09:26:24 +02:00
Axel Faust
7cd1416561 Governance Serivces: LinkedHashSet for stable reader/writer authorities set order + hash code for in-place group names (#2664) 2025-10-10 08:39:23 +02:00
alfresco-build
f197757f94 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-09 09:23:06 +00:00
47 changed files with 974 additions and 159 deletions

View File

@@ -104,7 +104,7 @@ jobs:
mkdir temp-dir-for-sast
bash ./scripts/ci/remove-sast-exclusions.sh ./packaging/war/target/alfresco.war temp-dir-for-sast/reduced.war
- name: "Run SAST Scan"
uses: veracode/Veracode-pipeline-scan-action@v1.0.16
uses: veracode/Veracode-pipeline-scan-action@v1.0.20
with:
vid: ${{ secrets.VERACODE_API_ID }}
vkey: ${{ secrets.VERACODE_API_KEY }}

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-automation-community-repo</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<build>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -31,6 +31,7 @@ import static org.alfresco.service.cmr.security.PermissionService.GROUP_PREFIX;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
@@ -247,7 +248,7 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
*/
private Set<String> getAuthorities(String group)
{
Set<String> result = new HashSet<>();
Set<String> result = new LinkedHashSet<>();
result.addAll(authorityService.getContainedAuthorities(null, group, true));
return result;
}

View File

@@ -33,6 +33,7 @@ import java.io.Serializable;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
@@ -325,8 +326,8 @@ public class ExtendedPermissionServiceImpl extends PermissionServiceImpl impleme
return aclReaders;
}
HashSet<String> assigned = new HashSet<>();
HashSet<String> readers = new HashSet<>();
Set<String> assigned = new LinkedHashSet<>();
Set<String> readers = new LinkedHashSet<>();
for (AccessControlEntry ace : acl.getEntries())
{
@@ -412,8 +413,8 @@ public class ExtendedPermissionServiceImpl extends PermissionServiceImpl impleme
return aclWriters;
}
HashSet<String> assigned = new HashSet<>();
HashSet<String> readers = new HashSet<>();
Set<String> assigned = new LinkedHashSet<>();
Set<String> readers = new LinkedHashSet<>();
for (AccessControlEntry ace : acl.getEntries())
{
@@ -485,7 +486,7 @@ public class ExtendedPermissionServiceImpl extends PermissionServiceImpl impleme
Set<String> writers = getWriters(aclId);
// add the current owner to the list of extended writers
Set<String> modifiedWrtiers = new HashSet<>(writers);
Set<String> modifiedWrtiers = new LinkedHashSet<>(writers);
String owner = ownableService.getOwner(nodeRef);
if (StringUtils.isNotBlank(owner) &&
!owner.equals(OwnableService.NO_OWNER) &&

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<build>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -9,6 +9,6 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
</project>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -6,7 +6,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<organization>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -675,6 +675,11 @@ public class RestWrapper extends DSLWrapper<RestWrapper>
{
returnedResponse = onRequest().get(restRequest.getPath(), restRequest.getPathParams()).andReturn();
}
else if (HttpMethod.PATCH.equals(httpMethod))
{
returnedResponse = onRequest().body(restRequest.getBody())
.patch(restRequest.getPath(), restRequest.getPathParams()).andReturn();
}
else
{
returnedResponse = onRequest().get(restRequest.getPath(), restRequest.getPathParams()).andReturn();

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

12
pom.xml
View File

@@ -2,7 +2,7 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Alfresco Community Repo Parent</name>
@@ -51,8 +51,8 @@
<dependency.alfresco-server-root.version>7.0.2</dependency.alfresco-server-root.version>
<dependency.activiti-engine.version>5.23.0</dependency.activiti-engine.version>
<dependency.activiti.version>5.23.0</dependency.activiti.version>
<dependency.alfresco-transform-core.version>5.2.2</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-service.version>4.2.2</dependency.alfresco-transform-service.version>
<dependency.alfresco-transform-core.version>5.2.3-A.2</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-service.version>4.2.3-A.2</dependency.alfresco-transform-service.version>
<dependency.alfresco-greenmail.version>7.1</dependency.alfresco-greenmail.version>
<dependency.acs-event-model.version>1.0.11</dependency.acs-event-model.version>
@@ -86,8 +86,8 @@
<dependency.truezip.version>7.7.10</dependency.truezip.version>
<dependency.poi.version>5.4.0</dependency.poi.version>
<dependency.jboss.logging.version>3.5.0.Final</dependency.jboss.logging.version>
<dependency.camel.version>4.11.0</dependency.camel.version> <!-- when bumping this version, please keep track/sync with included netty.io dependencies -->
<dependency.netty.version>4.1.118.Final</dependency.netty.version> <!-- must be in sync with camels transitive dependencies, e.g.: netty-common -->
<dependency.camel.version>4.15.0</dependency.camel.version> <!-- when bumping this version, please keep track/sync with included netty.io dependencies -->
<dependency.netty.version>4.1.127.Final</dependency.netty.version> <!-- must be in sync with camels transitive dependencies, e.g.: netty-common -->
<dependency.activemq.version>5.18.6</dependency.activemq.version>
<dependency.apache-compress.version>1.27.1</dependency.apache-compress.version>
<dependency.awaitility.version>4.2.2</dependency.awaitility.version>
@@ -154,7 +154,7 @@
<connection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</connection>
<developerConnection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</developerConnection>
<url>https://github.com/Alfresco/alfresco-community-repo</url>
<tag>25.3.0.56</tag>
<tag>HEAD</tag>
</scm>
<distributionManagement>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* Copyright (C) 2005 - 2025 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -31,7 +31,10 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.json.simple.JSONObject;
import org.owasp.html.HtmlPolicyBuilder;
import org.owasp.html.PolicyFactory;
import org.springframework.extensions.webscripts.Cache;
import org.springframework.extensions.webscripts.Status;
import org.springframework.extensions.webscripts.WebScriptRequest;
@@ -67,6 +70,29 @@ public class CommentsPost extends AbstractCommentsWebScript
// get json object from request
JSONObject json = parseJSON(req);
// Validating and Sanitizing comment content to prevent XSS
String commentContent = getOrNull(json, "content");
if (StringUtils.isBlank(commentContent))
{
throw new IllegalArgumentException("Comment content must not be empty");
}
else
{
// Allowed HTML elements and attributes in comment content e.g. Text formatting ,Lists and Structure & Styling
String[] allowedElements = {"b", "i", "u", "strong", "em", "ul", "ol", "li", "p", "br", "span", "div"};
PolicyFactory policy = new HtmlPolicyBuilder()
.allowElements(allowedElements)
.allowAttributes("style")
.onElements("span", "div", "p", "ul")
.allowStyling()
.allowStandardUrlProtocols()
.toFactory();
String safeContent = policy.sanitize(commentContent);
json.replace("content", safeContent);
}
/* MNT-10231, MNT-9771 fix */
this.behaviourFilter.disableBehaviour(nodeRef, ContentModel.ASPECT_AUDITABLE);

View File

@@ -1,92 +1,83 @@
<#macro renderParent node indent=" ">
<#escape x as jsonUtils.encodeJSONString(x)>
${indent}"parent":
${indent}{
<#if (node != rootNode) && node.parent??>
<@renderParent node.parent indent+" " />
</#if>
${indent}"type": "${node.typeShort}",
${indent}"isContainer": ${node.isContainer?string},
${indent}"name": "${node.properties.name!""}",
${indent}"title": "${node.properties.title!""}",
${indent}"description": "${node.properties.description!""}",
<#if node.properties.modified??>${indent}"modified": "${xmldate(node.properties.modified)}",</#if>
<#if node.properties.modifier??>${indent}"modifier": "${node.properties.modifier}",</#if>
${indent}"displayPath": "${node.displayPath!""}",
${indent}"qnamePath": "${node.qnamePath!""}",
<#if node.aspects??>
${indent}"aspects":
${indent}[
<#list node.aspects as aspect>
"${shortQName(aspect)}"
<#if aspect_has_next>,</#if>
</#list>
${indent}],
</#if>
${indent}"nodeRef": "${node.nodeRef}"
${indent}},
</#escape>
</#macro>
<#macro pickerResultsJSON results>
<#escape x as jsonUtils.encodeJSONString(x)>
{
"data":
{
<#if parent??>
<@renderParent parent />
</#if>
"items":
[
<#list results as row>
{
"type": "${row.item.typeShort}",
"parentType": "${row.item.parentTypeShort!""}",
"isContainer": ${row.item.isContainer?string},
<#if row.container??>"container": "${row.container!""}",</#if>
<#if row.item.properties?? && row.item.properties.name??>
"name": "${row.item.properties.name!""}",
<#else>
"name": "${(row.item.name)!row.item?string!""}",
</#if>
<#if row.item.aspects??>
"aspects": [
<#list row.item.aspects as aspect>
"${shortQName(aspect)}"
<#if aspect_has_next>,</#if>
</#list>
],
</#if>
<#if row.item.properties??>
"title":<#if row.item.properties["lnk:title"]??>"${row.item.properties["lnk:title"]}",
<#elseif row.item.properties["ia:whatEvent"]??>"${row.item.properties["ia:whatEvent"]}",
<#else>"${row.item.properties.title!""}",</#if>
"description": "${row.item.properties.description!""}",
<#else>
"title": "${(row.item.name)!row.item?string!""}",
"description": "",
</#if>
<#if row.item.properties.modified??>"modified": "${xmldate(row.item.properties.modified)}",</#if>
<#if row.item.properties.modifier??>"modifier": "${row.item.properties.modifier}",</#if>
<#if row.item.siteShortName??>"site": "${row.item.siteShortName}",</#if>
<#if row.item.properties["ia:fromDate"]??>"fromDate": "${xmldate(row.item.properties["ia:fromDate"])}",</#if>
"displayPath": "${row.item.displayPath!""}",
"qnamePath": "${row.item.qnamePath!""}",
<#if row.item.typeShort != "cm:person" && row.item.typeShort != "cm:authorityContainer">
"userAccess":
{
"create": ${row.item.hasPermission("CreateChildren")?string},
"edit": ${row.item.hasPermission("Write")?string},
"delete": ${row.item.hasPermission("Delete")?string}
},
</#if>
"nodeRef": "${row.item.nodeRef}"<#if row.selectable?exists>,
"selectable" : ${row.selectable?string}</#if>
}<#if row_has_next>,</#if>
</#list>
]
}
}
</#escape>
<#macro renderParent node indent=" ">
<#escape x as jsonUtils.encodeJSONString(x)>
${indent}"parent":
${indent}{
<#if (node != rootNode) && node.parent??>
<@renderParent node.parent indent+" " />
</#if>
${indent}"type": "${node.typeShort}",
${indent}"isContainer": ${node.isContainer?string},
${indent}"name": "${node.properties.name!""}",
${indent}"title": "${node.properties.title!""}",
${indent}"description": "${node.properties.description!""}",
<#if node.properties.modified??>${indent}"modified": "${xmldate(node.properties.modified)}",</#if>
<#if node.properties.modifier??>${indent}"modifier": "${node.properties.modifier}",</#if>
${indent}"displayPath": "${node.displayPath!""}",
${indent}"qnamePath": "${node.qnamePath!""}",
<#if node.aspects??>
${indent}"aspects":
${indent}[
<#list node.aspects as aspect>
"${shortQName(aspect)}"
<#if aspect_has_next>,</#if>
</#list>
${indent}],
</#if>
${indent}"nodeRef": "${node.nodeRef}"
${indent}},
</#escape>
</#macro>
<#macro pickerResultsJSON results>
<#escape x as jsonUtils.encodeJSONString(x)>
{
"data":
{
<#if parent??>
<@renderParent parent />
</#if>
"items":
[
<#list results as row>
{
"type": "${row.item.typeShort}",
"parentType": "${row.item.parentTypeShort!""}",
"isContainer": ${row.item.isContainer?string},
<#if row.container??>"container": "${row.container!""}",</#if>
"name": "${row.item.properties.name!""}",
<#if row.item.aspects??>
"aspects": [
<#list row.item.aspects as aspect>
"${shortQName(aspect)}"
<#if aspect_has_next>,</#if>
</#list>
],
</#if>
"title":<#if row.item.properties["lnk:title"]??>"${row.item.properties["lnk:title"]}",
<#elseif row.item.properties["ia:whatEvent"]??>"${row.item.properties["ia:whatEvent"]}",
<#else>"${row.item.properties.title!""}",</#if>
"description": "${row.item.properties.description!""}",
<#if row.item.properties.modified??>"modified": "${xmldate(row.item.properties.modified)}",</#if>
<#if row.item.properties.modifier??>"modifier": "${row.item.properties.modifier}",</#if>
<#if row.item.siteShortName??>"site": "${row.item.siteShortName}",</#if>
<#if row.item.properties["ia:fromDate"]??>"fromDate": "${xmldate(row.item.properties["ia:fromDate"])}",</#if>
"displayPath": "${row.item.displayPath!""}",
"qnamePath": "${row.item.qnamePath!""}",
<#if row.item.typeShort != "cm:person" && row.item.typeShort != "cm:authorityContainer">
"userAccess":
{
"create": ${row.item.hasPermission("CreateChildren")?string},
"edit": ${row.item.hasPermission("Write")?string},
"delete": ${row.item.hasPermission("Delete")?string}
},
</#if>
"nodeRef": "${row.item.nodeRef}"<#if row.selectable?exists>,
"selectable" : ${row.selectable?string}</#if>
}<#if row_has_next>,</#if>
</#list>
]
}
}
</#escape>
</#macro>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.56</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -27,6 +27,13 @@ package org.alfresco.repo.cache.lookup;
import java.io.Serializable;
import java.sql.Savepoint;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.extensions.surf.util.ParameterCheck;
@@ -73,6 +80,19 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
*/
VK1 getValueKey(V1 value);
/**
* Resolve the given values into unique value keys that can be used to find an entity's ID. A return value should be small and efficient; don't return a value if this is not possible.
* <p/>
* Implementations will often return values themselves, provided that the values are both serializable and have good <code>equals</code> and <code>hashCode</code>.
* <p/>
* Were no adequate key can be generated for the value, then it should not be returned. In this case, the {@link #findByValue(Object) findByValue} method might not even do a search and just return <tt>null</tt> or nothing itself i.e. if it is difficult to look the value up in storage then it is probably difficult to generate a cache key from it, too.. In this scenario, the cache will be purely for key-based lookups
*
* @param values
* full values being keyed (never <tt>null</tt>)
* @return Returns the business keys representing the entities
*/
List<VK1> getValueKeys(List<V1> values);
/**
* Find an entity for a given key.
*
@@ -82,6 +102,15 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
*/
Pair<K1, V1> findByKey(K1 key);
/**
* Find entities for a list of given key.
*
* @param keys
* the keys (IDs) used to identify the entity (never <tt>null</tt>)
* @return Return a list of entities or <tt>null</tt> if no entities exists for the IDs
*/
List<Pair<K1, V1>> findByKeys(List<K1> keys);
/**
* Find and entity using the given value key. The <code>equals</code> and <code>hashCode</code> methods of the value object should respect case-sensitivity in the same way that this lookup treats case-sensitivity i.e. if the <code>equals</code> method is <b>case-sensitive</b> then this method should look the entity up using a <b>case-sensitive</b> search.
* <p/>
@@ -177,6 +206,16 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
return null;
}
/**
* This implementation does not find values and is backed by {@link #findByValue(Object)} returning nothing.
*
* @return Returns empty list always
*/
public List<VK2> getValueKeys(List<V2> values)
{
return Collections.emptyList();
}
/**
* Disallows the operation.
*
@@ -343,6 +382,98 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
return entityPair;
}
/**
* Find the entities associated with the given key list. The {@link EntityLookupCallbackDAO#findByKey(Serializable) entity callback} will be used if necessary.
* <p/>
* It is up to the client code to decide if a returned empty list indicates a concurrency violation or not; the former would normally result in a concurrency-related exception such as {@link ConcurrencyFailureException}.
*
* @param keys
* The entity keys, which may be valid or invalid (<tt>null</tt> not allowed)
* @return Returns a list of key-value pairs or an empty list if no keys reference any entities
*/
@SuppressWarnings("unchecked")
public List<Pair<K, V>> getByKeys(List<K> keys)
{
if (keys == null || keys.isEmpty())
{
throw new IllegalArgumentException("An entity lookup key list may not be null or empty");
}
// Create a defensive copy and remove any nulls for safety
List<K> filteredKeys = new ArrayList<>(keys.size());
for (K k : keys)
{
if (k != null)
{
filteredKeys.add(k);
}
}
// Handle missing cache
if (cache == null)
{
return entityLookup.findByKeys(filteredKeys);
}
List<Pair<K, V>> results = new ArrayList<>(filteredKeys.size());
Map<K, CacheRegionKey> keysToResolve = new HashMap<>();
for (K key : filteredKeys)
{
CacheRegionKey keyCacheKey = new CacheRegionKey(cacheRegion, key);
// Look in the cache
V value = (V) cache.get(keyCacheKey);
if (value != null)
{
if (value.equals(VALUE_NOT_FOUND))
{
// We checked before.
continue; // not costly...making it clear that we are moving to the next key
}
else if (value.equals(VALUE_NULL))
{
results.add(new Pair<K, V>(key, null));
}
else
{
results.add(new Pair<K, V>(key, value));
}
}
else
{
// Need to resolve this key
keysToResolve.put(key, keyCacheKey);
}
}
// Resolve any missing keys
List<Pair<K, V>> entityPairs = entityLookup.findByKeys(new ArrayList<>(keysToResolve.keySet()));
if (entityPairs != null && !entityPairs.isEmpty())
{
for (Pair<K, V> entityPair : entityPairs)
{
V value = entityPair.getSecond();
// Get the value key
VK valueKey = (value == null) ? (VK) VALUE_NULL : entityLookup.getValueKey(value);
// Check if the value has a good key
if (valueKey != null)
{
CacheRegionValueKey valueCacheKey = new CacheRegionValueKey(cacheRegion, valueKey);
// The key is good, so we can cache the value
cache.put(valueCacheKey, entityPair.getFirst());
}
cache.put(
new CacheRegionKey(cacheRegion, entityPair.getFirst()),
(value == null ? VALUE_NULL : value));
results.add(entityPair);
}
}
// Done
return results;
}
/**
* Find the entity associated with the given value. The {@link EntityLookupCallbackDAO#findByValue(Object) entity callback} will be used if no entry exists in the cache.
* <p/>
@@ -689,6 +820,22 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
// Done
}
public void setValues(Map<K, V> keyValues)
{
// Handle missing cache
if (cache == null)
{
return;
}
List<K> keys = keyValues.keySet().stream().collect(Collectors.toList());
// Remove entries for the keys (bidirectional removal removes the old values as well)
// but leave the keys as they will get updated
removeByKeys(keys, false);
}
/**
* Delete the entity associated with the given key. The {@link EntityLookupCallbackDAO#deleteByKey(Serializable)} callback will be used if necessary.
* <p/>
@@ -751,6 +898,20 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
removeByKey(key, true);
}
/**
* Cache-only operation: Remove all cache values associated with the given keys.
*/
public void removeByKeys(Collection<K> keys)
{
// Handle missing cache
if (cache == null)
{
return;
}
removeByKeys(keys, true);
}
/**
* Cache-only operation: Remove all cache values associated with the given key.
*
@@ -778,6 +939,46 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
}
}
/**
* Cache-only operation: Remove all cache values associated with the given keys.
*
* @param removeKey
* <tt>true</tt> to remove the given keys' entry
*/
@SuppressWarnings("unchecked")
private void removeByKeys(Collection<K> keys, boolean removeKey)
{
List<V> values = new ArrayList<>();
for (K key : keys)
{
CacheRegionKey keyCacheKey = new CacheRegionKey(cacheRegion, key);
V value = (V) cache.get(keyCacheKey);
if (value != null && !value.equals(VALUE_NOT_FOUND))
{
values.add(value);
}
if (removeKey)
{
cache.remove(keyCacheKey);
}
}
if (!values.isEmpty())
{
// Get the value key and remove it
List<VK> valueKeys = entityLookup.getValueKeys(values);
if (valueKeys != null && !valueKeys.isEmpty())
{
for (VK vk : valueKeys)
{
CacheRegionValueKey valueCacheKey = new CacheRegionValueKey(cacheRegion, vk);
cache.remove(valueCacheKey);
}
}
}
}
/**
* Cache-only operation: Remove all cache values associated with the given value
*

View File

@@ -26,10 +26,12 @@
package org.alfresco.repo.domain.contentdata;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -243,9 +245,11 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
public void cacheContentDataForNodes(Set<Long> nodeIds)
{
for (ContentDataEntity entity : getContentDataEntitiesForNodes(nodeIds))
List<ContentDataEntity> contentDataEntities = getContentDataEntitiesForNodes(nodeIds);
// We may need to add additional protections here
for (ContentDataEntity contentDataEntity : contentDataEntities)
{
contentDataCache.setValue(entity.getId(), makeContentData(entity));
contentDataCache.setValue(contentDataEntity.getId(), makeContentData(contentDataEntity));
}
}
@@ -299,6 +303,41 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
*/
private class ContentDataCallbackDAO extends EntityLookupCallbackDAOAdaptor<Long, ContentData, Serializable>
{
@Override
public Serializable getValueKey(ContentData value)
{
if (value == null)
{
throw new IllegalArgumentException("ContentData value cannot be null");
}
// It is a gross hack for now, but we need to find the entity based on the value
ContentDataEntity contentDataEntity = getContentDataEntities(Collections.singletonList(value)).stream().findFirst().orElse(null);
if (contentDataEntity == null)
{
return null;
}
return contentDataEntity.getId();
}
@Override
public List<Serializable> getValueKeys(List<ContentData> values)
{
if (values == null || values.isEmpty())
{
return Collections.emptyList();
}
List<ContentDataEntity> contentDataEntities = getContentDataEntities(values);
List<Serializable> result = new ArrayList<>(contentDataEntities.size());
for (ContentDataEntity contentDataEntity : contentDataEntities)
{
result.add(contentDataEntity.getId());
}
return result;
}
public Pair<Long, ContentData> createValue(ContentData value)
{
value = sanitizeMimetype(value);
@@ -319,6 +358,29 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
return new Pair<Long, ContentData>(key, contentData);
}
public List<Pair<Long, ContentData>> findByKeys(List<Long> keys)
{
if (keys == null || keys.isEmpty())
{
return null;
}
List<ContentDataEntity> contentDataEntities = getContentDataEntitiesForNodes(keys.stream().collect(Collectors.toSet()));
if (contentDataEntities == null || contentDataEntities.isEmpty())
{
return null;
}
List<Pair<Long, ContentData>> result = new ArrayList<>(contentDataEntities.size());
for (ContentDataEntity contentDataEntity : contentDataEntities)
{
ContentData contentData = makeContentData(contentDataEntity);
result.add(new Pair<Long, ContentData>(contentDataEntity.getId(), contentData));
}
return result;
}
@Override
public int updateValue(Long key, ContentData value)
{
@@ -351,6 +413,28 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
return value.getContentUrl();
}
@Override
public List<Pair<Long, ContentUrlEntity>> findByKeys(List<Long> keys)
{
if (keys == null || keys.isEmpty())
{
return null;
}
List<ContentUrlEntity> contentUrlEntities = getContentUrlEntities(keys);
if (contentUrlEntities == null || contentUrlEntities.isEmpty())
{
return null;
}
List<Pair<Long, ContentUrlEntity>> result = new ArrayList<>(contentUrlEntities.size());
for (ContentUrlEntity contentUrlEntity : contentUrlEntities)
{
result.add(new Pair<Long, ContentUrlEntity>(contentUrlEntity.getId(), contentUrlEntity));
}
return result;
}
/**
* Looks the entity up based on the ContentURL of the given node
*/
@@ -412,18 +496,60 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
{
// Decode content URL
Long contentUrlId = contentDataEntity.getContentUrlId();
String contentUrl = null;
Pair<Long, ContentUrlEntity> entityPair = null;
if (contentUrlId != null)
{
Pair<Long, ContentUrlEntity> entityPair = contentUrlCache.getByKey(contentUrlId);
if (entityPair == null)
{
throw new DataIntegrityViolationException("No ContentUrl value exists for ID " + contentUrlId);
}
ContentUrlEntity contentUrlEntity = entityPair.getSecond();
contentUrl = contentUrlEntity.getContentUrl();
entityPair = contentUrlCache.getByKey(contentUrlId);
}
return processContentDataEntity(entityPair, contentDataEntity);
}
/**
* Translates these instances into an externally-usable <code>ContentData</code> instances.
*/
private List<ContentData> makeContentData(List<ContentDataEntity> contentDataEntities)
{
List<ContentData> contentDataList = new ArrayList<>(contentDataEntities.size());
List<Long> contentUrlIds = new ArrayList<>();
List<Pair<Long, ContentUrlEntity>> entityPairs = new ArrayList<>(contentDataEntities.size());
for (ContentDataEntity contentDataEntity : contentDataEntities)
{
// Decode content URL
contentUrlIds.add(contentDataEntity.getContentUrlId());
}
if (!contentUrlIds.isEmpty())
{
entityPairs = contentUrlCache.getByKeys(contentUrlIds);
}
for (Pair<Long, ContentUrlEntity> pair : entityPairs)
{
ContentDataEntity contentDataEntity = contentDataEntities.stream()
.filter(cde -> cde.getContentUrlId().equals(pair.getFirst()))
.findFirst()
.orElse(null);
ContentData contentData = processContentDataEntity(pair, contentDataEntity);
contentDataList.add(contentData);
}
return contentDataList;
}
private ContentData processContentDataEntity(Pair<Long, ContentUrlEntity> entityPair, ContentDataEntity contentDataEntity)
{
// Decode content URL
Long contentUrlId = contentDataEntity.getContentUrlId();
String contentUrl = null;
if (entityPair == null)
{
throw new DataIntegrityViolationException("No ContentUrl value exists for ID " + contentUrlId);
}
ContentUrlEntity contentUrlEntity = entityPair.getSecond();
contentUrl = contentUrlEntity.getContentUrl();
long size = contentDataEntity.getSize() == null ? 0L : contentDataEntity.getSize().longValue();
// Decode mimetype
@@ -658,6 +784,13 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
*/
protected abstract ContentUrlEntity getContentUrlEntity(Long id);
/**
* @param ids
* the IDs of the <b>content urls</b> entities
* @return Return a list of entities or an empty list if there are none
*/
protected abstract List<ContentUrlEntity> getContentUrlEntities(List<Long> ids);
protected abstract ContentUrlEntity getContentUrlEntity(String contentUrl);
/**
@@ -703,6 +836,20 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
*/
protected abstract List<ContentDataEntity> getContentDataEntitiesForNodes(Set<Long> nodeIds);
/**
* @param contentData
* the content data
* @return Returns the entity or <tt>null</tt> if it doesn't exist
*/
protected abstract ContentDataEntity getContentDataEntity(ContentData contentData);
/**
* @param contentDataList
* the list of content data
* @return Returns the list of entities or <tt>null</tt> if none exist
*/
protected abstract List<ContentDataEntity> getContentDataEntities(List<ContentData> contentDataList);
/**
* Update an existing <b>alf_content_data</b> entity
*

View File

@@ -62,6 +62,7 @@ import org.alfresco.util.ParameterCheck;
public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
{
private static final String SELECT_CONTENT_URL_BY_ID = "alfresco.content.select_ContentUrlById";
private static final String SELECT_CONTENT_URLS_BY_IDS = "alfresco.content.select_ContentUrlsByIds";
private static final String SELECT_CONTENT_URL_BY_KEY = "alfresco.content.select_ContentUrlByKey";
private static final String SELECT_CONTENT_URL_BY_KEY_UNREFERENCED = "alfresco.content.select_ContentUrlByKeyUnreferenced";
private static final String SELECT_CONTENT_URLS_ORPHANED = "alfresco.content.select.select_ContentUrlsOrphaned";
@@ -69,6 +70,7 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
private static final String SELECT_CONTENT_DATA_BY_ID = "alfresco.content.select_ContentDataById";
private static final String SELECT_CONTENT_DATA_BY_NODE_AND_QNAME = "alfresco.content.select_ContentDataByNodeAndQName";
private static final String SELECT_CONTENT_DATA_BY_NODE_IDS = "alfresco.content.select_ContentDataByNodeIds";
private static final String SELECT_CONTENT_DATA_BY_CONTENT_DATA = "alfresco.content.select_ContentDataByContentData";
private static final String INSERT_CONTENT_URL = "alfresco.content.insert.insert_ContentUrl";
private static final String INSERT_CONTENT_DATA = "alfresco.content.insert.insert_ContentData";
private static final String UPDATE_CONTENT_URL_ORPHAN_TIME = "alfresco.content.update_ContentUrlOrphanTime";
@@ -132,6 +134,18 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
return contentUrlEntity;
}
@Override
protected List<ContentUrlEntity> getContentUrlEntities(List<Long> ids)
{
if (ids == null || ids.isEmpty())
{
return Collections.emptyList();
}
List<ContentUrlEntity> contentUrlEntities = template.selectList(SELECT_CONTENT_URLS_BY_IDS, ids);
// Done
return contentUrlEntities;
}
@Override
public ContentUrlEntity getContentUrlEntity(String contentUrl)
{
@@ -269,6 +283,30 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
return template.selectList(SELECT_CONTENT_DATA_BY_NODE_IDS, idsEntity);
}
@Override
protected ContentDataEntity getContentDataEntity(ContentData contentData)
{
if (contentData == null)
{
return null;
}
return template.selectOne(SELECT_CONTENT_DATA_BY_CONTENT_DATA, contentData);
// Done
}
@Override
protected List<ContentDataEntity> getContentDataEntities(List<ContentData> contentDataList)
{
if (contentDataList != null && !contentDataList.isEmpty())
{
return template.selectList(SELECT_CONTENT_DATA_BY_CONTENT_DATA, contentDataList);
}
// There will be no results
return Collections.emptyList();
}
@Override
protected int updateContentDataEntity(ContentDataEntity entity)
{

View File

@@ -25,6 +25,8 @@
*/
package org.alfresco.repo.domain.encoding;
import java.util.List;
import org.springframework.extensions.surf.util.ParameterCheck;
import org.alfresco.repo.cache.SimpleCache;
@@ -109,6 +111,12 @@ public abstract class AbstractEncodingDAOImpl implements EncodingDAO
}
}
@Override
public List<Pair<Long, String>> findByKeys(List<Long> ids)
{
throw new UnsupportedOperationException("Batch lookup not supported for encodings.");
}
@Override
public Pair<Long, String> findByValue(String encoding)
{

View File

@@ -25,6 +25,7 @@
*/
package org.alfresco.repo.domain.locale;
import java.util.List;
import java.util.Locale;
import org.springframework.dao.DataIntegrityViolationException;
@@ -239,6 +240,12 @@ public abstract class AbstractLocaleDAOImpl implements LocaleDAO
}
}
@Override
public List<Pair<Long, String>> findByKeys(List<Long> ids)
{
throw new UnsupportedOperationException("Batch lookup not supported for locales.");
}
@Override
public Pair<Long, String> findByValue(String localeStr)
{

View File

@@ -43,6 +43,7 @@ import java.util.Stack;
import java.util.TreeSet;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.Collectors;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -140,6 +141,9 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
private UsageDAO usageDAO;
private int cachingThreshold = 10;
private int batchSize = 256;
private boolean forceBatching = false;
private boolean preloadContentData = true;
/**
* Cache for the Store root nodes by StoreRef:<br/>
@@ -410,6 +414,36 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
this.childByNameCache = childByNameCache;
}
/**
* Set the batch size for batch operations
*
* @param batchSize
*/
public void setBatchSize(int batchSize)
{
this.batchSize = batchSize;
}
/**
* Set whether to force batching even for small sets
*
* @param forceBatching
*/
public void setForceBatching(boolean forceBatching)
{
this.forceBatching = forceBatching;
}
/**
* Set whether to preload content data for properties when bulk loading properties
*
* @param preloadContentData
*/
public void setPreloadContentData(boolean preloadContentData)
{
this.preloadContentData = preloadContentData;
}
/* Initialize */
public void init()
@@ -831,6 +865,15 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
NodeEntity node = selectStoreRootNode(storeRef);
return node == null ? null : new Pair<StoreRef, Node>(storeRef, node);
}
/**
* @throws UnsupportedOperationException
* Bulk root node lookup not supported
*/
public List<Pair<StoreRef, Node>> findByKeys(List<StoreRef> storeRefs)
{
throw new UnsupportedOperationException("Bulk root node lookup not supported: " + storeRefs);
}
}
/* Nodes */
@@ -873,6 +916,37 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
}
}
/**
* @param nodeIds
* list of node IDs keys
*/
@Override
public List<Pair<Long, Node>> findByKeys(List<Long> nodeIds)
{
if (nodeIds == null || nodeIds.size() == 0)
{
return new ArrayList<Pair<Long, Node>>(0);
}
List<Pair<Long, Node>> results = new ArrayList<Pair<Long, Node>>(nodeIds.size());
SortedSet<Long> uniqueNodeIds = new TreeSet<Long>(nodeIds);
List<Node> nodes = selectNodesByIds(uniqueNodeIds);
for (Node node : nodes)
{
// Shouldn't be null, but...
if (node != null)
{
// Lock it to prevent 'accidental' modification
node.lock();
results.add(new Pair<Long, Node>(node.getId(), node));
}
}
return results;
}
/**
* @return Returns the Node's NodeRef
*/
@@ -1151,6 +1225,71 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
}
}
/**
* Get node instances regardless of whether they are considered <b>live</b> or <b>deleted</b>
*
* @param nodeIds
* the node IDs to look for
* @param liveOnly
* <tt>true</tt> to ensure that only <b>live</b> nodes are retrieved
* @return nodes that will be <b>live</b> if requested. Nodes not found will be ignored.
*/
private List<Node> getNodesNotNull(List<Long> nodeIds, boolean liveOnly)
{
List<Pair<Long, Node>> pairs = nodesCache.getByKeys(nodeIds);
if (pairs.isEmpty())
{
// The nodes have no entry in the database
List<NodeEntity> dbNodes = selectNodesByIds(nodeIds);
nodesCache.removeByKeys(nodeIds);
logger.debug(
"No node rows exists: \n" +
" IDs: " + nodeIds + "\n" +
" DB rows: " + dbNodes);
return Collections.emptyList();
}
List<Long> deletedNodeIds = new ArrayList<>();
List<Node> liveNodes = new ArrayList<>();
for (Pair<Long, Node> pair : pairs)
{
// This might initially seem less performant but after the first iteration the qname will be cached if it is already not there
if (pair.getSecond().getDeleted(qnameDAO) && liveOnly)
{
deletedNodeIds.add(pair.getFirst());
}
else
{
// Keep the live node
liveNodes.add(pair.getSecond());
}
}
if (!deletedNodeIds.isEmpty())
{
// The node is not 'live' as was requested
List<NodeEntity> dbNode = selectNodesByIds(deletedNodeIds);
nodesCache.removeByKeys(deletedNodeIds);
// Now the pain of pruning dangling assocs for each deleted node...this could be slow if there are many deleted nodes
for (Long nodeId : deletedNodeIds)
{
pruneDanglingAssocs(nodeId);
// In the single node case we would force a retry on the transaction...we can't do that here so just log it
if (isDebugEnabled)
{
logger.debug(
"No node rows exists: \n" +
" IDs: " + nodeId + "\n" +
" DB rows: " + dbNode);
}
}
}
return liveNodes;
}
@Override
public QName getNodeType(Long nodeId)
{
@@ -1666,7 +1805,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
public int touchNodes(Long txnId, List<Long> nodeIds)
{
// limit in clause to 1000 node ids
int batchSize = 1000;
var batchSize = 1000;
int touched = 0;
ArrayList<Long> batch = new ArrayList<Long>(batchSize);
@@ -2595,6 +2734,16 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
// Done
return new Pair<NodeVersionKey, Map<QName, Serializable>>(nodeVersionKey, Collections.unmodifiableMap(props));
}
/**
* Batch lookup is not supported
*
* @throws UnsupportedOperationException
*/
public List<Pair<NodeVersionKey, Map<QName, Serializable>>> findByKeys(List<NodeVersionKey> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for node properties.");
}
}
/* Aspects */
@@ -2831,6 +2980,24 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
aspectsCache.setValue(nodeVersionKey, Collections.unmodifiableSet(aspects));
}
/**
* Update the node aspects cache. The incoming set will be wrapped to be unmodifiable.
*/
private void setNodeAspectsCached(Map<Long, Set<QName>> nodeAspects)
{
List<Long> nodeIds = nodeAspects.keySet().stream().toList();
List<NodeVersionKey> nodeVersionKeys = getNodesNotNull(nodeIds, false).stream()
.map(Node::getNodeVersionKey)
.collect(Collectors.toList());
// Should have mimimal impact
for (NodeVersionKey nodeVersionKey : nodeVersionKeys)
{
aspectsCache.setValue(nodeVersionKey, Collections.unmodifiableSet(nodeAspects.get(nodeVersionKey.getNodeId())));
}
}
/**
* Helper method to copy cache values from one key to another
*/
@@ -2882,6 +3049,16 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
// Done
return new Pair<NodeVersionKey, Set<QName>>(nodeVersionKey, Collections.unmodifiableSet(nodeAspectQNames));
}
/**
* Batch lookup is not supported
*
* @throws UnsupportedOperationException
*/
public List<Pair<NodeVersionKey, Set<QName>>> findByKeys(List<NodeVersionKey> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for node aspects.");
}
}
/* Node assocs */
@@ -4558,12 +4735,11 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
int foundCacheEntryCount = 0;
int missingCacheEntryCount = 0;
boolean forceBatch = false;
List<Long> batchLoadNodeIds = new ArrayList<Long>(nodeIds.size());
for (Long nodeId : nodeIds)
{
if (!forceBatch)
if (!forceBatching)
{
// Is this node in the cache?
if (nodesCache.getValue(nodeId) != null)
@@ -4578,7 +4754,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
if (foundCacheEntryCount + missingCacheEntryCount % 100 == 0)
{
// We force the batch if the number of hits drops below the number of misses
forceBatch = foundCacheEntryCount < missingCacheEntryCount;
forceBatching = foundCacheEntryCount < missingCacheEntryCount;
}
}
@@ -4672,7 +4848,6 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
StoreEntity store = getStoreNotNull(storeRef);
Long storeId = store.getId();
int batchSize = 256;
SortedSet<String> batch = new TreeSet<String>();
for (String uuid : uuids)
{
@@ -4690,12 +4865,12 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
{
List<Node> nodes = selectNodesByUuids(storeId, batch);
cacheNodesNoBatch(nodes);
logger.info("Batch size may be too small " + batch.size() + " nodes.");
}
}
private void cacheNodesBatch(List<Long> nodeIds)
{
int batchSize = 256;
SortedSet<Long> batch = new TreeSet<Long>();
for (Long nodeId : nodeIds)
{
@@ -4713,6 +4888,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
{
List<Node> nodes = selectNodesByIds(batch);
cacheNodesNoBatch(nodes);
logger.info("Batch size may be too small " + batch.size() + " nodes.");
}
}
@@ -4749,22 +4925,40 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
}
Map<NodeVersionKey, Set<QName>> nodeAspects = selectNodeAspects(aspectNodeIds);
Map<Long, Set<QName>> aspectsMappedByNodeId = new HashMap<Long, Set<QName>>(aspectNodeIds.size());
Map<Long, Set<QName>> nodesWithNoAspects = new HashMap<Long, Set<QName>>(aspectNodeIds.size());
for (Map.Entry<NodeVersionKey, Set<QName>> entry : nodeAspects.entrySet())
{
NodeVersionKey nodeVersionKeyFromDb = entry.getKey();
Long nodeId = nodeVersionKeyFromDb.getNodeId();
Set<QName> qnames = entry.getValue();
setNodeAspectsCached(nodeId, qnames);
aspectNodeIds.remove(nodeId);
NodeVersionKey oldKey = entry.getKey();
Long newKey = oldKey.getNodeId();
Set<QName> value = entry.getValue();
aspectsMappedByNodeId.put(newKey, value);
// Remove the nodeIds from the original Set
aspectNodeIds.remove(newKey);
}
if (!aspectsMappedByNodeId.isEmpty())
{
setNodeAspectsCached(aspectsMappedByNodeId);
}
// Cache the absence of aspects too!
for (Long nodeId : aspectNodeIds)
{
setNodeAspectsCached(nodeId, Collections.<QName> emptySet());
nodesWithNoAspects.put(nodeId, Collections.<QName> emptySet());
}
if (!nodesWithNoAspects.isEmpty())
{
setNodeAspectsCached(nodesWithNoAspects);
}
// First ensure all content data are pre-cached, so we don't have to load them individually when converting properties
contentDataDAO.cacheContentDataForNodes(propertiesNodeIds);
if (preloadContentData && !propertiesNodeIds.isEmpty())
{
contentDataDAO.cacheContentDataForNodes(propertiesNodeIds);
}
// Now bulk load the properties
Map<NodeVersionKey, Map<NodePropertyKey, NodePropertyValue>> propsByNodeId = selectNodeProperties(propertiesNodeIds);
@@ -4774,7 +4968,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
Map<NodePropertyKey, NodePropertyValue> propertyValues = entry.getValue();
Map<QName, Serializable> props = nodePropertyHelper.convertToPublicProperties(propertyValues);
setNodePropertiesCached(nodeId, props);
}
} // Rework the above .... it is not the best approach .... post processing approach is better
}
/**
@@ -4943,6 +5137,8 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
protected abstract NodeEntity selectNodeById(Long id);
protected abstract List<NodeEntity> selectNodesByIds(List<Long> ids);
protected abstract NodeEntity selectNodeByNodeRef(NodeRef nodeRef);
protected abstract List<Node> selectNodesByUuids(Long storeId, SortedSet<String> uuids);

View File

@@ -419,6 +419,19 @@ public class NodeDAOImpl extends AbstractNodeDAOImpl
return template.selectOne(SELECT_NODE_BY_ID, node);
}
@Override
protected List<NodeEntity> selectNodesByIds(List<Long> ids)
{
List<NodeEntity> nodes = new ArrayList<>();
ids.forEach(id -> {
NodeEntity node = new NodeEntity();
node.setId(id);
nodes.add(node);
});
return template.selectList(SELECT_NODES_BY_IDS, nodes);
}
@Override
protected NodeEntity selectNodeByNodeRef(NodeRef nodeRef)
{

View File

@@ -310,6 +310,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return null;
}
@Override
public List<Serializable> getValueKeys(List<AclEntity> values)
{
throw new UnsupportedOperationException("Batch lookup not supported for ACLs.");
}
public Pair<Long, AclEntity> createValue(AclEntity value)
{
AclEntity entity = createAclEntity(value);
@@ -322,6 +328,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, AclEntity>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for ACLs.");
}
public Pair<Long, AclEntity> findByValue(AclEntity value)
{
if ((value != null) && (value.getId() != null))
@@ -816,6 +828,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return value;
}
@Override
public List<PermissionEntity> getValueKeys(List<PermissionEntity> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for permissions.");
}
public Pair<Long, PermissionEntity> createValue(PermissionEntity value)
{
PermissionEntity entity = createPermissionEntity(value);
@@ -828,6 +846,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, PermissionEntity>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for permissions.");
}
public Pair<Long, PermissionEntity> findByValue(PermissionEntity value)
{
if ((value == null) || (value.getName() == null) || (value.getTypeQNameId() == null))
@@ -999,6 +1023,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return value.getAuthority();
}
@Override
public List<String> getValueKeys(List<AuthorityEntity> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for authorities.");
}
public Pair<Long, AuthorityEntity> createValue(AuthorityEntity value)
{
AuthorityEntity entity = createAuthorityEntity(value);
@@ -1011,6 +1041,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, AuthorityEntity>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for authorities.");
}
public Pair<Long, AuthorityEntity> findByValue(AuthorityEntity value)
{
if ((value == null) || (value.getAuthority() == null))

View File

@@ -373,6 +373,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, Class<?>>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property classes.");
}
public Pair<Long, Class<?>> findByValue(Class<?> value)
{
PropertyClassEntity entity = findClassByValue(value);
@@ -465,6 +471,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, Date>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property date values.");
}
public Pair<Long, Date> findByValue(Date value)
{
PropertyDateValueEntity entity = findDateValueByValue(value);
@@ -566,6 +578,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
}
}
@Override
public List<Pair<Long, String>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property string values.");
}
public Pair<Long, String> findByValue(String value)
{
Long key = findStringValueByValue(value);
@@ -658,6 +676,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, Double>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property double values.");
}
public Pair<Long, Double> findByValue(Double value)
{
PropertyDoubleValueEntity entity = findDoubleValueByValue(value);
@@ -727,6 +751,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
PropertySerializableValueEntity entity = findSerializableValueById(key);
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, Serializable>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property serializable values.");
}
}
protected abstract PropertySerializableValueEntity findSerializableValueById(Long id);
@@ -833,6 +863,11 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
return convertEntityToPair(entity);
}
public List<Pair<Long, Serializable>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property values.");
}
public Pair<Long, Serializable> findByValue(Serializable value)
{
PropertyValueEntity entity = findPropertyValueByValue(value);
@@ -937,6 +972,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
return new Pair<Long, Serializable>(key, value);
}
@Override
public List<Pair<Long, Serializable>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for properties.");
}
/**
* Updates a property. The <b>alf_prop_root</b> entity is updated to ensure concurrent modification is detected.
*

View File

@@ -27,6 +27,7 @@ package org.alfresco.repo.domain.qname;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -191,6 +192,12 @@ public abstract class AbstractQNameDAOImpl implements QNameDAO
}
}
@Override
public List<Pair<Long, String>> findByKeys(List<Long> ids)
{
throw new UnsupportedOperationException("Batch lookup not supported for namespaces.");
}
@Override
public Pair<Long, String> findByValue(String uri)
{
@@ -351,6 +358,12 @@ public abstract class AbstractQNameDAOImpl implements QNameDAO
}
}
@Override
public List<Pair<Long, QName>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for QNames.");
}
@Override
public Pair<Long, QName> findByValue(QName qname)
{

View File

@@ -210,6 +210,12 @@ public abstract class AbstractTenantAdminDAOImpl implements TenantAdminDAO
return null;
}
@Override
public List<Serializable> getValueKeys(List<TenantEntity> values)
{
throw new UnsupportedOperationException("Batch lookup not supported for tenants.");
}
@Override
public Pair<String, TenantEntity> createValue(TenantEntity value)
{
@@ -224,6 +230,12 @@ public abstract class AbstractTenantAdminDAOImpl implements TenantAdminDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<String, TenantEntity>> findByKeys(List<String> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for tenants.");
}
@Override
public Pair<String, TenantEntity> findByValue(TenantEntity value)
{

View File

@@ -565,6 +565,12 @@ public class DBQueryEngine implements QueryEngine
return null;
}
@Override
public List<Pair<Long, Node>> findByKeys(List<Long> nodeIds)
{
throw new UnsupportedOperationException("Batch lookup not supported for Nodes.");
}
@Override
public NodeRef getValueKey(Node value)
{

View File

@@ -140,6 +140,9 @@
<property name="parentAssocsCacheLimitFactor" value="${system.cache.parentAssocs.limitFactor}"/>
<property name="childByNameCache" ref="node.childByNameCache"/>
<property name="cachingThreshold" value="${nodes.bulkLoad.cachingThreshold}"/>
<property name="batchSize" value="${nodes.bulkLoad.batchSize:256}"/>
<property name="forceBatching" value="${nodes.bulkLoad.forceBatching:false}"/>
<property name="preloadContentData" value="${nodes.bulkLoad.preloadContentData:true}"/>
</bean>
<bean id="nodeDAO.org.alfresco.repo.domain.dialect.Dialect" class="org.alfresco.repo.domain.node.ibatis.NodeDAOImpl" parent="nodeDAObase" />

View File

@@ -235,6 +235,31 @@
u.id = #{id}
</select>
<!-- Get the content URL entities by IDs -->
<select id="select_ContentUrlsByIds" parameterType="list" resultMap="result_ContentUrl">
select
u.id as id,
u.content_url as content_url,
u.content_url_short as content_url_short,
u.content_url_crc as content_url_crc,
u.content_size as content_size,
u.orphan_time as orphan_time,
ce.algorithm as algorithm,
ce.key_size as key_size,
ce.encrypted_key as encrypted_key,
ce.master_keystore_id as master_keystore_id,
ce.master_key_alias as master_key_alias,
ce.unencrypted_file_size as unencrypted_file_size
from
alf_content_url u
left join alf_content_url_encryption ce on (u.id = ce.content_url_id)
where
u.id in
<foreach item="item" index="index" collection="list" open="(" separator="," close=")">
#{item}
</foreach>
</select>
<!-- Get the content URL entity by unique key -->
<select id="select_ContentUrlByKey" parameterType="ContentUrl" resultMap="result_ContentUrl">
select
@@ -373,6 +398,28 @@
and (np.actual_type_n = 3 or np.actual_type_n = 21)
</select>
<!-- Get ContentData entities by Content Data -->
<select id="select_ContentDataByContentData" parameterType="ContentData" resultMap="result_ContentData">
select
cd.id as id,
cd.version as version,
cd.content_url_id as content_url_id,
cu.content_size as content_size,
cd.content_mimetype_id as content_mimetype_id,
cd.content_encoding_id as content_encoding_id,
cd.content_locale_id as content_locale_id
from
alf_content_data cd
join alf_node_properties np on (cd.id = np.long_value)
left join alf_content_url cu on (cd.content_url_id = cu.id)
where
cu.content_url in
<foreach item="item" index="index" collection="list" open="(" separator="," close=")">
#{item.contentUrl}
</foreach>
and (np.actual_type_n = 3 or np.actual_type_n = 21)
</select>
<!-- Get the ContentData entity by Node and property QName -->
<select id="select_ContentDataByNodeAndQName" parameterType="Ids" resultType="long">
select

View File

@@ -762,13 +762,15 @@
alf_node node
join alf_node_aspects aspects on (aspects.node_id = node.id)
<where>
<if test="nodeId != null">aspects.node_id = #{nodeId}</if>
<if test="nodeIds != null">
and aspects.node_id in
<foreach item="item" index="index" collection="nodeIds" open="(" separator="," close=")">
#{item}
</foreach>
</if>
<choose>
<when test="nodeId != null">aspects.node_id = #{nodeId}</when>
<when test="nodeIds != null">
aspects.node_id in
<foreach item="item" index="index" collection="nodeIds" open="(" separator="," close=")">
#{item}
</foreach>
</when>
</choose>
</where>
</select>

View File

@@ -918,6 +918,9 @@ mail.service.corePoolSize=8
mail.service.maximumPoolSize=20
nodes.bulkLoad.cachingThreshold=10
nodes.bulkLoad.batchSize=256
nodes.bulkLoad.forceBatching=false
nodes.bulkLoad.preloadContentData=true
# Multi-Tenancy

View File

@@ -28,6 +28,8 @@ package org.alfresco.repo.cache.lookup;
import static org.junit.Assert.*;
import java.sql.Savepoint;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
@@ -332,6 +334,16 @@ public class EntityLookupCacheTest implements EntityLookupCallbackDAO<Long, Obje
return dbValue;
}
public List<String> getValueKeys(List<Object> values)
{
List<String> keys = new ArrayList<>(values.size());
for (Object value : values)
{
keys.add(getValueKey(value));
}
return keys;
}
public Pair<Long, Object> findByKey(Long key)
{
assertNotNull(key);
@@ -346,6 +358,12 @@ public class EntityLookupCacheTest implements EntityLookupCallbackDAO<Long, Obje
return new Pair<Long, Object>(key, value);
}
@Override
public List<Pair<Long, Object>> findByKeys(List<Long> key)
{
throw new UnsupportedOperationException("Batch lookup not supported in test DAO.");
}
public Pair<Long, Object> findByValue(Object value)
{
assertTrue(value == null || value instanceof TestValue);