Compare commits

..

13 Commits

Author SHA1 Message Date
Jared Ottley
d9c56bbc79 [MNT-25404] [LFG] Query Performance - High performance cost in retrieving nodes/node properties for large result sets
- Bulkified queries on the preload for a query.
- Added additional configuraiton properties around the code changes
- New Properties are (with the defaults):
nodes.bulkLoad.batchSize=256
nodes.bulkLoad.forceBatching=false
nodes.bulkLoad.preloadContentData=true
2025-11-04 21:49:33 -07:00
alfresco-build
a16473100d [maven-release-plugin][skip ci] prepare for next development iteration 2025-11-03 12:46:19 +00:00
alfresco-build
6b2fafac45 [maven-release-plugin][skip ci] prepare release 25.3.0.67 2025-11-03 12:46:17 +00:00
Damian Ujma
3508e17907 ACS-10456 Bump Netty and Camel (#3632) 2025-11-03 12:03:17 +01:00
alfresco-build
b9d0773989 [maven-release-plugin][skip ci] prepare for next development iteration 2025-11-02 00:10:06 +00:00
alfresco-build
df4a70b61e [maven-release-plugin][skip ci] prepare release 25.3.0.66 2025-11-02 00:10:04 +00:00
Alfresco CI User
331464f106 [force] Force release for 2025-11-02. 2025-11-02 00:05:03 +00:00
alfresco-build
d21fdb09b5 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-30 14:32:25 +00:00
alfresco-build
38a4da7413 [maven-release-plugin][skip ci] prepare release 25.3.0.65 2025-10-30 14:32:23 +00:00
Belal Ansari
920285b209 ACS-10404 bump ATS (#3627) 2025-10-30 18:38:51 +05:30
alfresco-build
4ab8e36170 [maven-release-plugin][skip ci] prepare for next development iteration 2025-10-30 09:16:50 +00:00
alfresco-build
9860cf63ae [maven-release-plugin][skip ci] prepare release 25.3.0.64 2025-10-30 09:16:47 +00:00
Somnath-Deshmukh
a49e0b2ae3 MNT-25422 Bulleted list, numbered list, and underline are not working properly when adding comment in the file. (#3624) 2025-10-30 13:26:32 +05:30
46 changed files with 863 additions and 124 deletions

View File

@@ -15,7 +15,6 @@ on:
workflow_dispatch:
env:
JAVA_VERSION: '21'
DOCKERHUB_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
DOCKERHUB_USERNAME: ${{ secrets.DOCKER_USERNAME }}
GITHUB_ACTIONS_DEPLOY_TIMEOUT: 60
@@ -45,8 +44,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- uses: Alfresco/alfresco-build-tools/.github/actions/pre-commit@v8.24.1
- name: "Init"
run: bash ./scripts/ci/init.sh
@@ -68,8 +65,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Init"
run: bash ./scripts/ci/init.sh
- uses: Alfresco/alfresco-build-tools/.github/actions/veracode@v8.24.1
@@ -93,8 +88,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- uses: Alfresco/alfresco-build-tools/.github/actions/github-download-file@v8.24.1
with:
token: ${{ secrets.BOT_GITHUB_TOKEN }}
@@ -151,8 +144,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- uses: Alfresco/ya-pmd-scan@v4.3.0
with:
classpath-build-command: "mvn test-compile -ntp -Pags -pl \"-:alfresco-community-repo-docker\""
@@ -186,8 +177,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run tests"
@@ -225,8 +214,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -262,8 +249,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: Run MariaDB ${{ matrix.version }} database
@@ -291,8 +276,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run MariaDB 10.11 database"
@@ -320,8 +303,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run MySQL 8 database"
@@ -348,8 +329,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run PostgreSQL 14.15 database"
@@ -376,8 +355,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run PostgreSQL 15.10 database"
@@ -404,8 +381,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run PostgreSQL 16.6 database"
@@ -430,8 +405,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run ActiveMQ"
@@ -488,8 +461,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Set transformers tag"
@@ -560,8 +531,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -601,8 +570,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run Postgres 16.6 database"
@@ -633,8 +600,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -667,8 +632,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -697,8 +660,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -745,8 +706,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |

View File

@@ -7,7 +7,6 @@ on:
- release/**
env:
JAVA_VERSION: '21'
GIT_USERNAME: ${{ secrets.BOT_GITHUB_USERNAME }}
GIT_EMAIL: ${{ secrets.BOT_GITHUB_EMAIL }}
GIT_PASSWORD: ${{ secrets.BOT_GITHUB_TOKEN }}
@@ -38,8 +37,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Init"
run: bash ./scripts/ci/init.sh
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v8.24.1
@@ -69,8 +66,6 @@ jobs:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.24.1
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.24.1
with:
java-version: ${{ env.JAVA_VERSION }}
- name: "Init"
run: bash ./scripts/ci/init.sh
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v8.24.1

View File

@@ -133,7 +133,7 @@
"filename": ".github/workflows/master_release.yml",
"hashed_secret": "3e26d6750975d678acb8fa35a0f69237881576b0",
"is_verified": false,
"line_number": 25,
"line_number": 24,
"is_secret": false
}
],

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-automation-community-repo</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<build>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<build>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -9,6 +9,6 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
</project>

View File

@@ -1,5 +1,5 @@
# More infos about this image: https://github.com/Alfresco/alfresco-docker-base-tomcat
FROM alfresco/alfresco-base-tomcat:tomcat10-jre21-rockylinux9@sha256:ed568167f4c28efc9db4c5bc44a882ee117c475463b526b21ada99e1b6d568dd
FROM alfresco/alfresco-base-tomcat:tomcat10-jre17-rockylinux9@sha256:00d89fb84bda7bb37c17b0117adb2cfe4f7cbddcd6c1e42b0a67ea8dbb41a734
# Set default docker_context.
ARG resource_path=target

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -6,7 +6,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<organization>
@@ -16,11 +16,11 @@
</organization>
<properties>
<maven.build.sourceVersion>21</maven.build.sourceVersion>
<maven.build.sourceVersion>17</maven.build.sourceVersion>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven-jar-plugin.version>3.1.1</maven-jar-plugin.version>
<maven-release.version>2.5.3</maven-release.version>
<java.version>21</java.version>
<java.version>17</java.version>
<suiteXmlFile>${project.basedir}/src/test/resources/cmis-suite.xml</suiteXmlFile>
<cmis.binding />
<cmis.basePath />

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -8,18 +8,18 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>
<suiteXmlFile>${project.basedir}/src/test/resources/restapi-suite.xml</suiteXmlFile>
<maven.build.sourceVersion>21</maven.build.sourceVersion>
<maven.build.sourceVersion>17</maven.build.sourceVersion>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<rest.api.explorer.branch>master</rest.api.explorer.branch>
<httpclient-osgi-version>4.5.6</httpclient-osgi-version>
<commons-lang3.version>3.18.0</commons-lang3.version>
<scribejava-apis.version>8.3.3</scribejava-apis.version>
<java.version>21</java.version>
<java.version>17</java.version>
</properties>
<profiles>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<properties>

12
pom.xml
View File

@@ -2,7 +2,7 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Alfresco Community Repo Parent</name>
@@ -38,7 +38,7 @@
<builder.name>entitled-builder</builder.name>
<local.registry>127.0.0.1:5000</local.registry>
<java.version>21</java.version>
<java.version>17</java.version>
<maven.compiler.source>${java.version}</maven.compiler.source>
<maven.compiler.target>${java.version}</maven.compiler.target>
<maven.build.sourceVersion>${java.version}</maven.build.sourceVersion>
@@ -51,8 +51,8 @@
<dependency.alfresco-server-root.version>7.0.2</dependency.alfresco-server-root.version>
<dependency.activiti-engine.version>5.23.0</dependency.activiti-engine.version>
<dependency.activiti.version>5.23.0</dependency.activiti.version>
<dependency.alfresco-transform-core.version>5.2.2</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-service.version>4.2.2</dependency.alfresco-transform-service.version>
<dependency.alfresco-transform-core.version>5.2.3-A.2</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-service.version>4.2.3-A.2</dependency.alfresco-transform-service.version>
<dependency.alfresco-greenmail.version>7.1</dependency.alfresco-greenmail.version>
<dependency.acs-event-model.version>1.0.11</dependency.acs-event-model.version>
@@ -86,8 +86,8 @@
<dependency.truezip.version>7.7.10</dependency.truezip.version>
<dependency.poi.version>5.4.0</dependency.poi.version>
<dependency.jboss.logging.version>3.5.0.Final</dependency.jboss.logging.version>
<dependency.camel.version>4.11.0</dependency.camel.version> <!-- when bumping this version, please keep track/sync with included netty.io dependencies -->
<dependency.netty.version>4.1.118.Final</dependency.netty.version> <!-- must be in sync with camels transitive dependencies, e.g.: netty-common -->
<dependency.camel.version>4.15.0</dependency.camel.version> <!-- when bumping this version, please keep track/sync with included netty.io dependencies -->
<dependency.netty.version>4.1.127.Final</dependency.netty.version> <!-- must be in sync with camels transitive dependencies, e.g.: netty-common -->
<dependency.activemq.version>5.18.6</dependency.activemq.version>
<dependency.apache-compress.version>1.27.1</dependency.apache-compress.version>
<dependency.awaitility.version>4.2.2</dependency.awaitility.version>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -83,15 +83,9 @@ public class CommentsPost extends AbstractCommentsWebScript
PolicyFactory policy = new HtmlPolicyBuilder()
.allowElements(allowedElements)
.allowAttributes("style").matching((elementName, attributeName, value) -> {
String lowerValue = value.toLowerCase();
if (lowerValue.matches("(?s).*(color\\s*:\\s*[^;]+).*") ||
lowerValue.matches("(?s).*(background-color\\s*:\\s*[^;]+).*"))
{
return value;
}
return null;
}).onElements("span", "div", "p")
.allowAttributes("style")
.onElements("span", "div", "p", "ul")
.allowStyling()
.allowStandardUrlProtocols()
.toFactory();

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.3.0.64-SNAPSHOT</version>
<version>25.3.0.68-SNAPSHOT</version>
</parent>
<dependencies>
@@ -840,12 +840,12 @@
</execution>
</executions>
<configuration>
<complianceLevel>${java.version}</complianceLevel>
<complianceLevel>17</complianceLevel>
<outxml>false</outxml>
<verbose>true</verbose>
<showWeaveInfo>true</showWeaveInfo>
<source>${java.version}</source>
<target>${java.version}</target>
<source>17</source>
<target>17</target>
<additionalCompilerArgs>
<arg>-parameters</arg>
</additionalCompilerArgs>

View File

@@ -27,6 +27,13 @@ package org.alfresco.repo.cache.lookup;
import java.io.Serializable;
import java.sql.Savepoint;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.extensions.surf.util.ParameterCheck;
@@ -73,6 +80,19 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
*/
VK1 getValueKey(V1 value);
/**
* Resolve the given values into unique value keys that can be used to find an entity's ID. A return value should be small and efficient; don't return a value if this is not possible.
* <p/>
* Implementations will often return values themselves, provided that the values are both serializable and have good <code>equals</code> and <code>hashCode</code>.
* <p/>
* Were no adequate key can be generated for the value, then it should not be returned. In this case, the {@link #findByValue(Object) findByValue} method might not even do a search and just return <tt>null</tt> or nothing itself i.e. if it is difficult to look the value up in storage then it is probably difficult to generate a cache key from it, too.. In this scenario, the cache will be purely for key-based lookups
*
* @param values
* full values being keyed (never <tt>null</tt>)
* @return Returns the business keys representing the entities
*/
List<VK1> getValueKeys(List<V1> values);
/**
* Find an entity for a given key.
*
@@ -82,6 +102,15 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
*/
Pair<K1, V1> findByKey(K1 key);
/**
* Find entities for a list of given key.
*
* @param keys
* the keys (IDs) used to identify the entity (never <tt>null</tt>)
* @return Return a list of entities or <tt>null</tt> if no entities exists for the IDs
*/
List<Pair<K1, V1>> findByKeys(List<K1> keys);
/**
* Find and entity using the given value key. The <code>equals</code> and <code>hashCode</code> methods of the value object should respect case-sensitivity in the same way that this lookup treats case-sensitivity i.e. if the <code>equals</code> method is <b>case-sensitive</b> then this method should look the entity up using a <b>case-sensitive</b> search.
* <p/>
@@ -177,6 +206,16 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
return null;
}
/**
* This implementation does not find values and is backed by {@link #findByValue(Object)} returning nothing.
*
* @return Returns empty list always
*/
public List<VK2> getValueKeys(List<V2> values)
{
return Collections.emptyList();
}
/**
* Disallows the operation.
*
@@ -343,6 +382,98 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
return entityPair;
}
/**
* Find the entities associated with the given key list. The {@link EntityLookupCallbackDAO#findByKey(Serializable) entity callback} will be used if necessary.
* <p/>
* It is up to the client code to decide if a returned empty list indicates a concurrency violation or not; the former would normally result in a concurrency-related exception such as {@link ConcurrencyFailureException}.
*
* @param keys
* The entity keys, which may be valid or invalid (<tt>null</tt> not allowed)
* @return Returns a list of key-value pairs or an empty list if no keys reference any entities
*/
@SuppressWarnings("unchecked")
public List<Pair<K, V>> getByKeys(List<K> keys)
{
if (keys == null || keys.isEmpty())
{
throw new IllegalArgumentException("An entity lookup key list may not be null or empty");
}
// Create a defensive copy and remove any nulls for safety
List<K> filteredKeys = new ArrayList<>(keys.size());
for (K k : keys)
{
if (k != null)
{
filteredKeys.add(k);
}
}
// Handle missing cache
if (cache == null)
{
return entityLookup.findByKeys(filteredKeys);
}
List<Pair<K, V>> results = new ArrayList<>(filteredKeys.size());
Map<K, CacheRegionKey> keysToResolve = new HashMap<>();
for (K key : filteredKeys)
{
CacheRegionKey keyCacheKey = new CacheRegionKey(cacheRegion, key);
// Look in the cache
V value = (V) cache.get(keyCacheKey);
if (value != null)
{
if (value.equals(VALUE_NOT_FOUND))
{
// We checked before.
continue; // not costly...making it clear that we are moving to the next key
}
else if (value.equals(VALUE_NULL))
{
results.add(new Pair<K, V>(key, null));
}
else
{
results.add(new Pair<K, V>(key, value));
}
}
else
{
// Need to resolve this key
keysToResolve.put(key, keyCacheKey);
}
}
// Resolve any missing keys
List<Pair<K, V>> entityPairs = entityLookup.findByKeys(new ArrayList<>(keysToResolve.keySet()));
if (entityPairs != null && !entityPairs.isEmpty())
{
for (Pair<K, V> entityPair : entityPairs)
{
V value = entityPair.getSecond();
// Get the value key
VK valueKey = (value == null) ? (VK) VALUE_NULL : entityLookup.getValueKey(value);
// Check if the value has a good key
if (valueKey != null)
{
CacheRegionValueKey valueCacheKey = new CacheRegionValueKey(cacheRegion, valueKey);
// The key is good, so we can cache the value
cache.put(valueCacheKey, entityPair.getFirst());
}
cache.put(
new CacheRegionKey(cacheRegion, entityPair.getFirst()),
(value == null ? VALUE_NULL : value));
results.add(entityPair);
}
}
// Done
return results;
}
/**
* Find the entity associated with the given value. The {@link EntityLookupCallbackDAO#findByValue(Object) entity callback} will be used if no entry exists in the cache.
* <p/>
@@ -689,6 +820,22 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
// Done
}
public void setValues(Map<K, V> keyValues)
{
// Handle missing cache
if (cache == null)
{
return;
}
List<K> keys = keyValues.keySet().stream().collect(Collectors.toList());
// Remove entries for the keys (bidirectional removal removes the old values as well)
// but leave the keys as they will get updated
removeByKeys(keys, false);
}
/**
* Delete the entity associated with the given key. The {@link EntityLookupCallbackDAO#deleteByKey(Serializable)} callback will be used if necessary.
* <p/>
@@ -751,6 +898,20 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
removeByKey(key, true);
}
/**
* Cache-only operation: Remove all cache values associated with the given keys.
*/
public void removeByKeys(Collection<K> keys)
{
// Handle missing cache
if (cache == null)
{
return;
}
removeByKeys(keys, true);
}
/**
* Cache-only operation: Remove all cache values associated with the given key.
*
@@ -778,6 +939,46 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
}
}
/**
* Cache-only operation: Remove all cache values associated with the given keys.
*
* @param removeKey
* <tt>true</tt> to remove the given keys' entry
*/
@SuppressWarnings("unchecked")
private void removeByKeys(Collection<K> keys, boolean removeKey)
{
List<V> values = new ArrayList<>();
for (K key : keys)
{
CacheRegionKey keyCacheKey = new CacheRegionKey(cacheRegion, key);
V value = (V) cache.get(keyCacheKey);
if (value != null && !value.equals(VALUE_NOT_FOUND))
{
values.add(value);
}
if (removeKey)
{
cache.remove(keyCacheKey);
}
}
if (!values.isEmpty())
{
// Get the value key and remove it
List<VK> valueKeys = entityLookup.getValueKeys(values);
if (valueKeys != null && !valueKeys.isEmpty())
{
for (VK vk : valueKeys)
{
CacheRegionValueKey valueCacheKey = new CacheRegionValueKey(cacheRegion, vk);
cache.remove(valueCacheKey);
}
}
}
}
/**
* Cache-only operation: Remove all cache values associated with the given value
*

View File

@@ -26,10 +26,12 @@
package org.alfresco.repo.domain.contentdata;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -243,9 +245,11 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
public void cacheContentDataForNodes(Set<Long> nodeIds)
{
for (ContentDataEntity entity : getContentDataEntitiesForNodes(nodeIds))
List<ContentDataEntity> contentDataEntities = getContentDataEntitiesForNodes(nodeIds);
// We may need to add additional protections here
for (ContentDataEntity contentDataEntity : contentDataEntities)
{
contentDataCache.setValue(entity.getId(), makeContentData(entity));
contentDataCache.setValue(contentDataEntity.getId(), makeContentData(contentDataEntity));
}
}
@@ -299,6 +303,41 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
*/
private class ContentDataCallbackDAO extends EntityLookupCallbackDAOAdaptor<Long, ContentData, Serializable>
{
@Override
public Serializable getValueKey(ContentData value)
{
if (value == null)
{
throw new IllegalArgumentException("ContentData value cannot be null");
}
// It is a gross hack for now, but we need to find the entity based on the value
ContentDataEntity contentDataEntity = getContentDataEntities(Collections.singletonList(value)).stream().findFirst().orElse(null);
if (contentDataEntity == null)
{
return null;
}
return contentDataEntity.getId();
}
@Override
public List<Serializable> getValueKeys(List<ContentData> values)
{
if (values == null || values.isEmpty())
{
return Collections.emptyList();
}
List<ContentDataEntity> contentDataEntities = getContentDataEntities(values);
List<Serializable> result = new ArrayList<>(contentDataEntities.size());
for (ContentDataEntity contentDataEntity : contentDataEntities)
{
result.add(contentDataEntity.getId());
}
return result;
}
public Pair<Long, ContentData> createValue(ContentData value)
{
value = sanitizeMimetype(value);
@@ -319,6 +358,29 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
return new Pair<Long, ContentData>(key, contentData);
}
public List<Pair<Long, ContentData>> findByKeys(List<Long> keys)
{
if (keys == null || keys.isEmpty())
{
return null;
}
List<ContentDataEntity> contentDataEntities = getContentDataEntitiesForNodes(keys.stream().collect(Collectors.toSet()));
if (contentDataEntities == null || contentDataEntities.isEmpty())
{
return null;
}
List<Pair<Long, ContentData>> result = new ArrayList<>(contentDataEntities.size());
for (ContentDataEntity contentDataEntity : contentDataEntities)
{
ContentData contentData = makeContentData(contentDataEntity);
result.add(new Pair<Long, ContentData>(contentDataEntity.getId(), contentData));
}
return result;
}
@Override
public int updateValue(Long key, ContentData value)
{
@@ -351,6 +413,28 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
return value.getContentUrl();
}
@Override
public List<Pair<Long, ContentUrlEntity>> findByKeys(List<Long> keys)
{
if (keys == null || keys.isEmpty())
{
return null;
}
List<ContentUrlEntity> contentUrlEntities = getContentUrlEntities(keys);
if (contentUrlEntities == null || contentUrlEntities.isEmpty())
{
return null;
}
List<Pair<Long, ContentUrlEntity>> result = new ArrayList<>(contentUrlEntities.size());
for (ContentUrlEntity contentUrlEntity : contentUrlEntities)
{
result.add(new Pair<Long, ContentUrlEntity>(contentUrlEntity.getId(), contentUrlEntity));
}
return result;
}
/**
* Looks the entity up based on the ContentURL of the given node
*/
@@ -412,18 +496,60 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
{
// Decode content URL
Long contentUrlId = contentDataEntity.getContentUrlId();
String contentUrl = null;
Pair<Long, ContentUrlEntity> entityPair = null;
if (contentUrlId != null)
{
Pair<Long, ContentUrlEntity> entityPair = contentUrlCache.getByKey(contentUrlId);
if (entityPair == null)
{
throw new DataIntegrityViolationException("No ContentUrl value exists for ID " + contentUrlId);
}
ContentUrlEntity contentUrlEntity = entityPair.getSecond();
contentUrl = contentUrlEntity.getContentUrl();
entityPair = contentUrlCache.getByKey(contentUrlId);
}
return processContentDataEntity(entityPair, contentDataEntity);
}
/**
* Translates these instances into an externally-usable <code>ContentData</code> instances.
*/
private List<ContentData> makeContentData(List<ContentDataEntity> contentDataEntities)
{
List<ContentData> contentDataList = new ArrayList<>(contentDataEntities.size());
List<Long> contentUrlIds = new ArrayList<>();
List<Pair<Long, ContentUrlEntity>> entityPairs = new ArrayList<>(contentDataEntities.size());
for (ContentDataEntity contentDataEntity : contentDataEntities)
{
// Decode content URL
contentUrlIds.add(contentDataEntity.getContentUrlId());
}
if (!contentUrlIds.isEmpty())
{
entityPairs = contentUrlCache.getByKeys(contentUrlIds);
}
for (Pair<Long, ContentUrlEntity> pair : entityPairs)
{
ContentDataEntity contentDataEntity = contentDataEntities.stream()
.filter(cde -> cde.getContentUrlId().equals(pair.getFirst()))
.findFirst()
.orElse(null);
ContentData contentData = processContentDataEntity(pair, contentDataEntity);
contentDataList.add(contentData);
}
return contentDataList;
}
private ContentData processContentDataEntity(Pair<Long, ContentUrlEntity> entityPair, ContentDataEntity contentDataEntity)
{
// Decode content URL
Long contentUrlId = contentDataEntity.getContentUrlId();
String contentUrl = null;
if (entityPair == null)
{
throw new DataIntegrityViolationException("No ContentUrl value exists for ID " + contentUrlId);
}
ContentUrlEntity contentUrlEntity = entityPair.getSecond();
contentUrl = contentUrlEntity.getContentUrl();
long size = contentDataEntity.getSize() == null ? 0L : contentDataEntity.getSize().longValue();
// Decode mimetype
@@ -658,6 +784,13 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
*/
protected abstract ContentUrlEntity getContentUrlEntity(Long id);
/**
* @param ids
* the IDs of the <b>content urls</b> entities
* @return Return a list of entities or an empty list if there are none
*/
protected abstract List<ContentUrlEntity> getContentUrlEntities(List<Long> ids);
protected abstract ContentUrlEntity getContentUrlEntity(String contentUrl);
/**
@@ -703,6 +836,20 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
*/
protected abstract List<ContentDataEntity> getContentDataEntitiesForNodes(Set<Long> nodeIds);
/**
* @param contentData
* the content data
* @return Returns the entity or <tt>null</tt> if it doesn't exist
*/
protected abstract ContentDataEntity getContentDataEntity(ContentData contentData);
/**
* @param contentDataList
* the list of content data
* @return Returns the list of entities or <tt>null</tt> if none exist
*/
protected abstract List<ContentDataEntity> getContentDataEntities(List<ContentData> contentDataList);
/**
* Update an existing <b>alf_content_data</b> entity
*

View File

@@ -62,6 +62,7 @@ import org.alfresco.util.ParameterCheck;
public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
{
private static final String SELECT_CONTENT_URL_BY_ID = "alfresco.content.select_ContentUrlById";
private static final String SELECT_CONTENT_URLS_BY_IDS = "alfresco.content.select_ContentUrlsByIds";
private static final String SELECT_CONTENT_URL_BY_KEY = "alfresco.content.select_ContentUrlByKey";
private static final String SELECT_CONTENT_URL_BY_KEY_UNREFERENCED = "alfresco.content.select_ContentUrlByKeyUnreferenced";
private static final String SELECT_CONTENT_URLS_ORPHANED = "alfresco.content.select.select_ContentUrlsOrphaned";
@@ -69,6 +70,7 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
private static final String SELECT_CONTENT_DATA_BY_ID = "alfresco.content.select_ContentDataById";
private static final String SELECT_CONTENT_DATA_BY_NODE_AND_QNAME = "alfresco.content.select_ContentDataByNodeAndQName";
private static final String SELECT_CONTENT_DATA_BY_NODE_IDS = "alfresco.content.select_ContentDataByNodeIds";
private static final String SELECT_CONTENT_DATA_BY_CONTENT_DATA = "alfresco.content.select_ContentDataByContentData";
private static final String INSERT_CONTENT_URL = "alfresco.content.insert.insert_ContentUrl";
private static final String INSERT_CONTENT_DATA = "alfresco.content.insert.insert_ContentData";
private static final String UPDATE_CONTENT_URL_ORPHAN_TIME = "alfresco.content.update_ContentUrlOrphanTime";
@@ -132,6 +134,18 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
return contentUrlEntity;
}
@Override
protected List<ContentUrlEntity> getContentUrlEntities(List<Long> ids)
{
if (ids == null || ids.isEmpty())
{
return Collections.emptyList();
}
List<ContentUrlEntity> contentUrlEntities = template.selectList(SELECT_CONTENT_URLS_BY_IDS, ids);
// Done
return contentUrlEntities;
}
@Override
public ContentUrlEntity getContentUrlEntity(String contentUrl)
{
@@ -269,6 +283,30 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
return template.selectList(SELECT_CONTENT_DATA_BY_NODE_IDS, idsEntity);
}
@Override
protected ContentDataEntity getContentDataEntity(ContentData contentData)
{
if (contentData == null)
{
return null;
}
return template.selectOne(SELECT_CONTENT_DATA_BY_CONTENT_DATA, contentData);
// Done
}
@Override
protected List<ContentDataEntity> getContentDataEntities(List<ContentData> contentDataList)
{
if (contentDataList != null && !contentDataList.isEmpty())
{
return template.selectList(SELECT_CONTENT_DATA_BY_CONTENT_DATA, contentDataList);
}
// There will be no results
return Collections.emptyList();
}
@Override
protected int updateContentDataEntity(ContentDataEntity entity)
{

View File

@@ -25,6 +25,8 @@
*/
package org.alfresco.repo.domain.encoding;
import java.util.List;
import org.springframework.extensions.surf.util.ParameterCheck;
import org.alfresco.repo.cache.SimpleCache;
@@ -109,6 +111,12 @@ public abstract class AbstractEncodingDAOImpl implements EncodingDAO
}
}
@Override
public List<Pair<Long, String>> findByKeys(List<Long> ids)
{
throw new UnsupportedOperationException("Batch lookup not supported for encodings.");
}
@Override
public Pair<Long, String> findByValue(String encoding)
{

View File

@@ -25,6 +25,7 @@
*/
package org.alfresco.repo.domain.locale;
import java.util.List;
import java.util.Locale;
import org.springframework.dao.DataIntegrityViolationException;
@@ -239,6 +240,12 @@ public abstract class AbstractLocaleDAOImpl implements LocaleDAO
}
}
@Override
public List<Pair<Long, String>> findByKeys(List<Long> ids)
{
throw new UnsupportedOperationException("Batch lookup not supported for locales.");
}
@Override
public Pair<Long, String> findByValue(String localeStr)
{

View File

@@ -43,6 +43,7 @@ import java.util.Stack;
import java.util.TreeSet;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.Collectors;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -140,6 +141,9 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
private UsageDAO usageDAO;
private int cachingThreshold = 10;
private int batchSize = 256;
private boolean forceBatching = false;
private boolean preloadContentData = true;
/**
* Cache for the Store root nodes by StoreRef:<br/>
@@ -410,6 +414,36 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
this.childByNameCache = childByNameCache;
}
/**
* Set the batch size for batch operations
*
* @param batchSize
*/
public void setBatchSize(int batchSize)
{
this.batchSize = batchSize;
}
/**
* Set whether to force batching even for small sets
*
* @param forceBatching
*/
public void setForceBatching(boolean forceBatching)
{
this.forceBatching = forceBatching;
}
/**
* Set whether to preload content data for properties when bulk loading properties
*
* @param preloadContentData
*/
public void setPreloadContentData(boolean preloadContentData)
{
this.preloadContentData = preloadContentData;
}
/* Initialize */
public void init()
@@ -831,6 +865,15 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
NodeEntity node = selectStoreRootNode(storeRef);
return node == null ? null : new Pair<StoreRef, Node>(storeRef, node);
}
/**
* @throws UnsupportedOperationException
* Bulk root node lookup not supported
*/
public List<Pair<StoreRef, Node>> findByKeys(List<StoreRef> storeRefs)
{
throw new UnsupportedOperationException("Bulk root node lookup not supported: " + storeRefs);
}
}
/* Nodes */
@@ -873,6 +916,37 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
}
}
/**
* @param nodeIds
* list of node IDs keys
*/
@Override
public List<Pair<Long, Node>> findByKeys(List<Long> nodeIds)
{
if (nodeIds == null || nodeIds.size() == 0)
{
return new ArrayList<Pair<Long, Node>>(0);
}
List<Pair<Long, Node>> results = new ArrayList<Pair<Long, Node>>(nodeIds.size());
SortedSet<Long> uniqueNodeIds = new TreeSet<Long>(nodeIds);
List<Node> nodes = selectNodesByIds(uniqueNodeIds);
for (Node node : nodes)
{
// Shouldn't be null, but...
if (node != null)
{
// Lock it to prevent 'accidental' modification
node.lock();
results.add(new Pair<Long, Node>(node.getId(), node));
}
}
return results;
}
/**
* @return Returns the Node's NodeRef
*/
@@ -1151,6 +1225,71 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
}
}
/**
* Get node instances regardless of whether they are considered <b>live</b> or <b>deleted</b>
*
* @param nodeIds
* the node IDs to look for
* @param liveOnly
* <tt>true</tt> to ensure that only <b>live</b> nodes are retrieved
* @return nodes that will be <b>live</b> if requested. Nodes not found will be ignored.
*/
private List<Node> getNodesNotNull(List<Long> nodeIds, boolean liveOnly)
{
List<Pair<Long, Node>> pairs = nodesCache.getByKeys(nodeIds);
if (pairs.isEmpty())
{
// The nodes have no entry in the database
List<NodeEntity> dbNodes = selectNodesByIds(nodeIds);
nodesCache.removeByKeys(nodeIds);
logger.debug(
"No node rows exists: \n" +
" IDs: " + nodeIds + "\n" +
" DB rows: " + dbNodes);
return Collections.emptyList();
}
List<Long> deletedNodeIds = new ArrayList<>();
List<Node> liveNodes = new ArrayList<>();
for (Pair<Long, Node> pair : pairs)
{
// This might initially seem less performant but after the first iteration the qname will be cached if it is already not there
if (pair.getSecond().getDeleted(qnameDAO) && liveOnly)
{
deletedNodeIds.add(pair.getFirst());
}
else
{
// Keep the live node
liveNodes.add(pair.getSecond());
}
}
if (!deletedNodeIds.isEmpty())
{
// The node is not 'live' as was requested
List<NodeEntity> dbNode = selectNodesByIds(deletedNodeIds);
nodesCache.removeByKeys(deletedNodeIds);
// Now the pain of pruning dangling assocs for each deleted node...this could be slow if there are many deleted nodes
for (Long nodeId : deletedNodeIds)
{
pruneDanglingAssocs(nodeId);
// In the single node case we would force a retry on the transaction...we can't do that here so just log it
if (isDebugEnabled)
{
logger.debug(
"No node rows exists: \n" +
" IDs: " + nodeId + "\n" +
" DB rows: " + dbNode);
}
}
}
return liveNodes;
}
@Override
public QName getNodeType(Long nodeId)
{
@@ -1666,7 +1805,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
public int touchNodes(Long txnId, List<Long> nodeIds)
{
// limit in clause to 1000 node ids
int batchSize = 1000;
var batchSize = 1000;
int touched = 0;
ArrayList<Long> batch = new ArrayList<Long>(batchSize);
@@ -2595,6 +2734,16 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
// Done
return new Pair<NodeVersionKey, Map<QName, Serializable>>(nodeVersionKey, Collections.unmodifiableMap(props));
}
/**
* Batch lookup is not supported
*
* @throws UnsupportedOperationException
*/
public List<Pair<NodeVersionKey, Map<QName, Serializable>>> findByKeys(List<NodeVersionKey> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for node properties.");
}
}
/* Aspects */
@@ -2831,6 +2980,24 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
aspectsCache.setValue(nodeVersionKey, Collections.unmodifiableSet(aspects));
}
/**
* Update the node aspects cache. The incoming set will be wrapped to be unmodifiable.
*/
private void setNodeAspectsCached(Map<Long, Set<QName>> nodeAspects)
{
List<Long> nodeIds = nodeAspects.keySet().stream().toList();
List<NodeVersionKey> nodeVersionKeys = getNodesNotNull(nodeIds, false).stream()
.map(Node::getNodeVersionKey)
.collect(Collectors.toList());
// Should have mimimal impact
for (NodeVersionKey nodeVersionKey : nodeVersionKeys)
{
aspectsCache.setValue(nodeVersionKey, Collections.unmodifiableSet(nodeAspects.get(nodeVersionKey.getNodeId())));
}
}
/**
* Helper method to copy cache values from one key to another
*/
@@ -2882,6 +3049,16 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
// Done
return new Pair<NodeVersionKey, Set<QName>>(nodeVersionKey, Collections.unmodifiableSet(nodeAspectQNames));
}
/**
* Batch lookup is not supported
*
* @throws UnsupportedOperationException
*/
public List<Pair<NodeVersionKey, Set<QName>>> findByKeys(List<NodeVersionKey> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for node aspects.");
}
}
/* Node assocs */
@@ -4558,12 +4735,11 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
int foundCacheEntryCount = 0;
int missingCacheEntryCount = 0;
boolean forceBatch = false;
List<Long> batchLoadNodeIds = new ArrayList<Long>(nodeIds.size());
for (Long nodeId : nodeIds)
{
if (!forceBatch)
if (!forceBatching)
{
// Is this node in the cache?
if (nodesCache.getValue(nodeId) != null)
@@ -4578,7 +4754,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
if (foundCacheEntryCount + missingCacheEntryCount % 100 == 0)
{
// We force the batch if the number of hits drops below the number of misses
forceBatch = foundCacheEntryCount < missingCacheEntryCount;
forceBatching = foundCacheEntryCount < missingCacheEntryCount;
}
}
@@ -4672,7 +4848,6 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
StoreEntity store = getStoreNotNull(storeRef);
Long storeId = store.getId();
int batchSize = 256;
SortedSet<String> batch = new TreeSet<String>();
for (String uuid : uuids)
{
@@ -4690,12 +4865,12 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
{
List<Node> nodes = selectNodesByUuids(storeId, batch);
cacheNodesNoBatch(nodes);
logger.info("Batch size may be too small " + batch.size() + " nodes.");
}
}
private void cacheNodesBatch(List<Long> nodeIds)
{
int batchSize = 256;
SortedSet<Long> batch = new TreeSet<Long>();
for (Long nodeId : nodeIds)
{
@@ -4713,6 +4888,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
{
List<Node> nodes = selectNodesByIds(batch);
cacheNodesNoBatch(nodes);
logger.info("Batch size may be too small " + batch.size() + " nodes.");
}
}
@@ -4749,22 +4925,40 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
}
Map<NodeVersionKey, Set<QName>> nodeAspects = selectNodeAspects(aspectNodeIds);
Map<Long, Set<QName>> aspectsMappedByNodeId = new HashMap<Long, Set<QName>>(aspectNodeIds.size());
Map<Long, Set<QName>> nodesWithNoAspects = new HashMap<Long, Set<QName>>(aspectNodeIds.size());
for (Map.Entry<NodeVersionKey, Set<QName>> entry : nodeAspects.entrySet())
{
NodeVersionKey nodeVersionKeyFromDb = entry.getKey();
Long nodeId = nodeVersionKeyFromDb.getNodeId();
Set<QName> qnames = entry.getValue();
setNodeAspectsCached(nodeId, qnames);
aspectNodeIds.remove(nodeId);
NodeVersionKey oldKey = entry.getKey();
Long newKey = oldKey.getNodeId();
Set<QName> value = entry.getValue();
aspectsMappedByNodeId.put(newKey, value);
// Remove the nodeIds from the original Set
aspectNodeIds.remove(newKey);
}
if (!aspectsMappedByNodeId.isEmpty())
{
setNodeAspectsCached(aspectsMappedByNodeId);
}
// Cache the absence of aspects too!
for (Long nodeId : aspectNodeIds)
{
setNodeAspectsCached(nodeId, Collections.<QName> emptySet());
nodesWithNoAspects.put(nodeId, Collections.<QName> emptySet());
}
if (!nodesWithNoAspects.isEmpty())
{
setNodeAspectsCached(nodesWithNoAspects);
}
// First ensure all content data are pre-cached, so we don't have to load them individually when converting properties
contentDataDAO.cacheContentDataForNodes(propertiesNodeIds);
if (preloadContentData && !propertiesNodeIds.isEmpty())
{
contentDataDAO.cacheContentDataForNodes(propertiesNodeIds);
}
// Now bulk load the properties
Map<NodeVersionKey, Map<NodePropertyKey, NodePropertyValue>> propsByNodeId = selectNodeProperties(propertiesNodeIds);
@@ -4774,7 +4968,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
Map<NodePropertyKey, NodePropertyValue> propertyValues = entry.getValue();
Map<QName, Serializable> props = nodePropertyHelper.convertToPublicProperties(propertyValues);
setNodePropertiesCached(nodeId, props);
}
} // Rework the above .... it is not the best approach .... post processing approach is better
}
/**
@@ -4943,6 +5137,8 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
protected abstract NodeEntity selectNodeById(Long id);
protected abstract List<NodeEntity> selectNodesByIds(List<Long> ids);
protected abstract NodeEntity selectNodeByNodeRef(NodeRef nodeRef);
protected abstract List<Node> selectNodesByUuids(Long storeId, SortedSet<String> uuids);

View File

@@ -419,6 +419,19 @@ public class NodeDAOImpl extends AbstractNodeDAOImpl
return template.selectOne(SELECT_NODE_BY_ID, node);
}
@Override
protected List<NodeEntity> selectNodesByIds(List<Long> ids)
{
List<NodeEntity> nodes = new ArrayList<>();
ids.forEach(id -> {
NodeEntity node = new NodeEntity();
node.setId(id);
nodes.add(node);
});
return template.selectList(SELECT_NODES_BY_IDS, nodes);
}
@Override
protected NodeEntity selectNodeByNodeRef(NodeRef nodeRef)
{

View File

@@ -310,6 +310,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return null;
}
@Override
public List<Serializable> getValueKeys(List<AclEntity> values)
{
throw new UnsupportedOperationException("Batch lookup not supported for ACLs.");
}
public Pair<Long, AclEntity> createValue(AclEntity value)
{
AclEntity entity = createAclEntity(value);
@@ -322,6 +328,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, AclEntity>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for ACLs.");
}
public Pair<Long, AclEntity> findByValue(AclEntity value)
{
if ((value != null) && (value.getId() != null))
@@ -816,6 +828,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return value;
}
@Override
public List<PermissionEntity> getValueKeys(List<PermissionEntity> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for permissions.");
}
public Pair<Long, PermissionEntity> createValue(PermissionEntity value)
{
PermissionEntity entity = createPermissionEntity(value);
@@ -828,6 +846,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, PermissionEntity>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for permissions.");
}
public Pair<Long, PermissionEntity> findByValue(PermissionEntity value)
{
if ((value == null) || (value.getName() == null) || (value.getTypeQNameId() == null))
@@ -999,6 +1023,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return value.getAuthority();
}
@Override
public List<String> getValueKeys(List<AuthorityEntity> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for authorities.");
}
public Pair<Long, AuthorityEntity> createValue(AuthorityEntity value)
{
AuthorityEntity entity = createAuthorityEntity(value);
@@ -1011,6 +1041,12 @@ public abstract class AbstractAclCrudDAOImpl implements AclCrudDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, AuthorityEntity>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for authorities.");
}
public Pair<Long, AuthorityEntity> findByValue(AuthorityEntity value)
{
if ((value == null) || (value.getAuthority() == null))

View File

@@ -373,6 +373,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, Class<?>>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property classes.");
}
public Pair<Long, Class<?>> findByValue(Class<?> value)
{
PropertyClassEntity entity = findClassByValue(value);
@@ -465,6 +471,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, Date>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property date values.");
}
public Pair<Long, Date> findByValue(Date value)
{
PropertyDateValueEntity entity = findDateValueByValue(value);
@@ -566,6 +578,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
}
}
@Override
public List<Pair<Long, String>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property string values.");
}
public Pair<Long, String> findByValue(String value)
{
Long key = findStringValueByValue(value);
@@ -658,6 +676,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, Double>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property double values.");
}
public Pair<Long, Double> findByValue(Double value)
{
PropertyDoubleValueEntity entity = findDoubleValueByValue(value);
@@ -727,6 +751,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
PropertySerializableValueEntity entity = findSerializableValueById(key);
return convertEntityToPair(entity);
}
@Override
public List<Pair<Long, Serializable>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property serializable values.");
}
}
protected abstract PropertySerializableValueEntity findSerializableValueById(Long id);
@@ -833,6 +863,11 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
return convertEntityToPair(entity);
}
public List<Pair<Long, Serializable>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for property values.");
}
public Pair<Long, Serializable> findByValue(Serializable value)
{
PropertyValueEntity entity = findPropertyValueByValue(value);
@@ -937,6 +972,12 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO
return new Pair<Long, Serializable>(key, value);
}
@Override
public List<Pair<Long, Serializable>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for properties.");
}
/**
* Updates a property. The <b>alf_prop_root</b> entity is updated to ensure concurrent modification is detected.
*

View File

@@ -27,6 +27,7 @@ package org.alfresco.repo.domain.qname;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -191,6 +192,12 @@ public abstract class AbstractQNameDAOImpl implements QNameDAO
}
}
@Override
public List<Pair<Long, String>> findByKeys(List<Long> ids)
{
throw new UnsupportedOperationException("Batch lookup not supported for namespaces.");
}
@Override
public Pair<Long, String> findByValue(String uri)
{
@@ -351,6 +358,12 @@ public abstract class AbstractQNameDAOImpl implements QNameDAO
}
}
@Override
public List<Pair<Long, QName>> findByKeys(List<Long> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for QNames.");
}
@Override
public Pair<Long, QName> findByValue(QName qname)
{

View File

@@ -210,6 +210,12 @@ public abstract class AbstractTenantAdminDAOImpl implements TenantAdminDAO
return null;
}
@Override
public List<Serializable> getValueKeys(List<TenantEntity> values)
{
throw new UnsupportedOperationException("Batch lookup not supported for tenants.");
}
@Override
public Pair<String, TenantEntity> createValue(TenantEntity value)
{
@@ -224,6 +230,12 @@ public abstract class AbstractTenantAdminDAOImpl implements TenantAdminDAO
return convertEntityToPair(entity);
}
@Override
public List<Pair<String, TenantEntity>> findByKeys(List<String> keys)
{
throw new UnsupportedOperationException("Batch lookup not supported for tenants.");
}
@Override
public Pair<String, TenantEntity> findByValue(TenantEntity value)
{

View File

@@ -565,6 +565,12 @@ public class DBQueryEngine implements QueryEngine
return null;
}
@Override
public List<Pair<Long, Node>> findByKeys(List<Long> nodeIds)
{
throw new UnsupportedOperationException("Batch lookup not supported for Nodes.");
}
@Override
public NodeRef getValueKey(Node value)
{

View File

@@ -140,6 +140,9 @@
<property name="parentAssocsCacheLimitFactor" value="${system.cache.parentAssocs.limitFactor}"/>
<property name="childByNameCache" ref="node.childByNameCache"/>
<property name="cachingThreshold" value="${nodes.bulkLoad.cachingThreshold}"/>
<property name="batchSize" value="${nodes.bulkLoad.batchSize:256}"/>
<property name="forceBatching" value="${nodes.bulkLoad.forceBatching:false}"/>
<property name="preloadContentData" value="${nodes.bulkLoad.preloadContentData:true}"/>
</bean>
<bean id="nodeDAO.org.alfresco.repo.domain.dialect.Dialect" class="org.alfresco.repo.domain.node.ibatis.NodeDAOImpl" parent="nodeDAObase" />

View File

@@ -235,6 +235,31 @@
u.id = #{id}
</select>
<!-- Get the content URL entities by IDs -->
<select id="select_ContentUrlsByIds" parameterType="list" resultMap="result_ContentUrl">
select
u.id as id,
u.content_url as content_url,
u.content_url_short as content_url_short,
u.content_url_crc as content_url_crc,
u.content_size as content_size,
u.orphan_time as orphan_time,
ce.algorithm as algorithm,
ce.key_size as key_size,
ce.encrypted_key as encrypted_key,
ce.master_keystore_id as master_keystore_id,
ce.master_key_alias as master_key_alias,
ce.unencrypted_file_size as unencrypted_file_size
from
alf_content_url u
left join alf_content_url_encryption ce on (u.id = ce.content_url_id)
where
u.id in
<foreach item="item" index="index" collection="list" open="(" separator="," close=")">
#{item}
</foreach>
</select>
<!-- Get the content URL entity by unique key -->
<select id="select_ContentUrlByKey" parameterType="ContentUrl" resultMap="result_ContentUrl">
select
@@ -373,6 +398,28 @@
and (np.actual_type_n = 3 or np.actual_type_n = 21)
</select>
<!-- Get ContentData entities by Content Data -->
<select id="select_ContentDataByContentData" parameterType="ContentData" resultMap="result_ContentData">
select
cd.id as id,
cd.version as version,
cd.content_url_id as content_url_id,
cu.content_size as content_size,
cd.content_mimetype_id as content_mimetype_id,
cd.content_encoding_id as content_encoding_id,
cd.content_locale_id as content_locale_id
from
alf_content_data cd
join alf_node_properties np on (cd.id = np.long_value)
left join alf_content_url cu on (cd.content_url_id = cu.id)
where
cu.content_url in
<foreach item="item" index="index" collection="list" open="(" separator="," close=")">
#{item.contentUrl}
</foreach>
and (np.actual_type_n = 3 or np.actual_type_n = 21)
</select>
<!-- Get the ContentData entity by Node and property QName -->
<select id="select_ContentDataByNodeAndQName" parameterType="Ids" resultType="long">
select

View File

@@ -762,13 +762,15 @@
alf_node node
join alf_node_aspects aspects on (aspects.node_id = node.id)
<where>
<if test="nodeId != null">aspects.node_id = #{nodeId}</if>
<if test="nodeIds != null">
and aspects.node_id in
<foreach item="item" index="index" collection="nodeIds" open="(" separator="," close=")">
#{item}
</foreach>
</if>
<choose>
<when test="nodeId != null">aspects.node_id = #{nodeId}</when>
<when test="nodeIds != null">
aspects.node_id in
<foreach item="item" index="index" collection="nodeIds" open="(" separator="," close=")">
#{item}
</foreach>
</when>
</choose>
</where>
</select>

View File

@@ -918,6 +918,9 @@ mail.service.corePoolSize=8
mail.service.maximumPoolSize=20
nodes.bulkLoad.cachingThreshold=10
nodes.bulkLoad.batchSize=256
nodes.bulkLoad.forceBatching=false
nodes.bulkLoad.preloadContentData=true
# Multi-Tenancy

View File

@@ -28,6 +28,8 @@ package org.alfresco.repo.cache.lookup;
import static org.junit.Assert.*;
import java.sql.Savepoint;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
@@ -332,6 +334,16 @@ public class EntityLookupCacheTest implements EntityLookupCallbackDAO<Long, Obje
return dbValue;
}
public List<String> getValueKeys(List<Object> values)
{
List<String> keys = new ArrayList<>(values.size());
for (Object value : values)
{
keys.add(getValueKey(value));
}
return keys;
}
public Pair<Long, Object> findByKey(Long key)
{
assertNotNull(key);
@@ -346,6 +358,12 @@ public class EntityLookupCacheTest implements EntityLookupCallbackDAO<Long, Obje
return new Pair<Long, Object>(key, value);
}
@Override
public List<Pair<Long, Object>> findByKeys(List<Long> key)
{
throw new UnsupportedOperationException("Batch lookup not supported in test DAO.");
}
public Pair<Long, Object> findByValue(Object value)
{
assertTrue(value == null || value instanceof TestValue);