mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-09-24 14:32:01 +00:00
Compare commits
143 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
187646895c | ||
|
f9515e336f | ||
|
828dd20576 | ||
|
3372e20c35 | ||
|
64b5cace27 | ||
|
83acf26cf4 | ||
|
b3be0f2b7f | ||
|
7a6ebb9a05 | ||
|
fa0f239618 | ||
|
43799408a8 | ||
|
e7305006f0 | ||
|
40c30411af | ||
|
91f8b43237 | ||
|
6fccf828e1 | ||
|
3fac3373c9 | ||
|
ee857ce1de | ||
|
483d7fab21 | ||
|
590209b299 | ||
|
376514df67 | ||
|
7144a2dd94 | ||
|
b4da3d8c20 | ||
|
62de9ff0c0 | ||
|
a11acce720 | ||
|
1128011e15 | ||
|
d0cb45de0d | ||
|
2b48195896 | ||
|
fbb95d6a7f | ||
|
502427e852 | ||
|
3ff2d79641 | ||
|
f274b88ece | ||
|
21550ec30b | ||
|
8665267225 | ||
|
984b0bc719 | ||
|
5b89fc0be7 | ||
|
bf3a3382fd | ||
|
14d007fae8 | ||
|
79317ddc9d | ||
|
c0e762fe5e | ||
|
5109b99520 | ||
|
dfc6306331 | ||
|
731f98921f | ||
|
0b21dbdc0a | ||
|
dd928356b8 | ||
|
1844d8bdb9 | ||
|
17eef66f5c | ||
|
1d1f269a70 | ||
|
2ccf6044b8 | ||
|
bdd09784e1 | ||
|
de5d70be46 | ||
|
8cacba0988 | ||
|
60187bf9a2 | ||
|
ff4634be19 | ||
|
9c64b45908 | ||
|
d97d8fba04 | ||
|
368b571d9c | ||
|
e9da7d222b | ||
|
8c059460f9 | ||
|
bd0aaa08b3 | ||
|
93d678dc30 | ||
|
9648189827 | ||
|
9bfd274127 | ||
|
dcf9f65f6b | ||
|
784fae5834 | ||
|
eddd8a1065 | ||
|
0941746518 | ||
|
a1f0f35f60 | ||
|
25d96a50cd | ||
|
3c7f024fed | ||
|
6a1a197701 | ||
|
5cff5092a0 | ||
|
b50f912df2 | ||
|
b4f00dddb8 | ||
|
02c103f39a | ||
|
3c23fa20c5 | ||
|
576b6faac9 | ||
|
d86415401d | ||
|
460cc1f2cd | ||
|
370fef10fd | ||
|
efadc239d4 | ||
|
de90e37578 | ||
|
6e438d2e4f | ||
|
a86fa21880 | ||
|
db74a6e7f2 | ||
|
8c773ac97c | ||
|
5a2b3cf64d | ||
|
8ab910d2b1 | ||
|
fa70f1cd45 | ||
|
75a2e0f901 | ||
|
84997bcf86 | ||
|
536b3ddd6d | ||
|
bb86c97b11 | ||
|
558f117f24 | ||
|
bbae71658d | ||
|
9ee9653463 | ||
|
44570cec8a | ||
|
33eb0354fa | ||
|
0d3e2dc8bb | ||
|
ac62c52a33 | ||
|
0d30d40d8f | ||
|
87e365df7e | ||
|
6ee01e808b | ||
|
57427f4765 | ||
|
f6a12760c9 | ||
|
560a050af3 | ||
|
5ed82930d2 | ||
|
f8a32022c3 | ||
|
137df0ff4c | ||
|
23bd8c064c | ||
|
e3384eaee4 | ||
|
c7e5716a4a | ||
|
7fa0d30100 | ||
|
6d33e57d05 | ||
|
f0bfc647e4 | ||
|
757dbbbb1f | ||
|
8c555b62d4 | ||
|
d0138c9702 | ||
|
71849cd4ac | ||
|
dba27bb86c | ||
|
6a37bc93c0 | ||
|
686ffcb19c | ||
|
b36a1a9364 | ||
|
a12a31120d | ||
|
bf2e53344d | ||
|
aa4570c895 | ||
|
6e2e3f705f | ||
|
462625165f | ||
|
d341a3bab8 | ||
|
ecf658dd82 | ||
|
feba13d274 | ||
|
b2023bed63 | ||
|
2c1203f1f5 | ||
|
0edebd7df1 | ||
|
e059aa060a | ||
|
bd92569cec | ||
|
2b521fd5a7 | ||
|
1ecfa6c18e | ||
|
60eb7d2630 | ||
|
65a70c64c5 | ||
|
d23279227a | ||
|
117462a480 | ||
|
be47b189dc | ||
|
7ccc2f02f5 | ||
|
456fa58c11 |
58
.github/workflows/ci.yml
vendored
58
.github/workflows/ci.yml
vendored
@@ -47,6 +47,10 @@ jobs:
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.16.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.16.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.16.0
|
||||
- id: changed-files
|
||||
uses: Alfresco/alfresco-build-tools/.github/actions/github-list-changes@v8.16.0
|
||||
with:
|
||||
write-list-to-env: true
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/pre-commit@v8.16.0
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
@@ -102,16 +106,12 @@ jobs:
|
||||
run: |
|
||||
bash ./scripts/ci/init.sh
|
||||
bash ./scripts/ci/build.sh
|
||||
- name: "Remove excluded files"
|
||||
run: |
|
||||
mkdir temp-dir-for-sast
|
||||
bash ./scripts/ci/remove-sast-exclusions.sh ./packaging/war/target/alfresco.war temp-dir-for-sast/reduced.war
|
||||
- name: "Run SAST Scan"
|
||||
uses: veracode/Veracode-pipeline-scan-action@v1.0.16
|
||||
with:
|
||||
vid: ${{ secrets.VERACODE_API_ID }}
|
||||
vkey: ${{ secrets.VERACODE_API_KEY }}
|
||||
file: "temp-dir-for-sast/reduced.war"
|
||||
file: "packaging/war/target/alfresco.war"
|
||||
fail_build: true
|
||||
project_name: alfresco-community-repo
|
||||
issue_details: true
|
||||
@@ -129,8 +129,6 @@ jobs:
|
||||
with:
|
||||
name: Veracode Pipeline-Scan Results (Human Readable)
|
||||
path: readable_output.zip
|
||||
- name: "Remove temporary directory"
|
||||
run: rm -rfv temp-dir-for-sast
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
|
||||
@@ -333,7 +331,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version: ['10.5', '10.6']
|
||||
version: ['10.2.18', '10.4', '10.5']
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.16.0
|
||||
@@ -394,8 +392,8 @@ jobs:
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
|
||||
repository_mariadb_10_11_tests:
|
||||
name: "Repository - MariaDB 10.11 tests"
|
||||
repository_mariadb_10_6_tests:
|
||||
name: "Repository - MariaDB 10.6 tests"
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare]
|
||||
if: >
|
||||
@@ -412,10 +410,10 @@ jobs:
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.16.0
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- name: "Run MariaDB 10.11 database"
|
||||
- name: "Run MariaDB 10.6 database"
|
||||
run: docker compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile mariadb up -d
|
||||
env:
|
||||
MARIADB_VERSION: 10.11
|
||||
MARIADB_VERSION: 10.6
|
||||
- name: "Prepare Report Portal"
|
||||
if: github.ref_name == 'master'
|
||||
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.16.0
|
||||
@@ -511,7 +509,7 @@ jobs:
|
||||
RP_OPTS: ${{ github.ref_name == 'master' && steps.rp-prepare.outputs.mvn-opts || '' }}
|
||||
run: |
|
||||
eval "args=($RP_OPTS)"
|
||||
mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=com.mysql.cj.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco "${args[@]}"
|
||||
mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=com.mysql.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco "${args[@]}"
|
||||
continue-on-error: true
|
||||
- name: "Update GitHub Step Summary"
|
||||
if: github.ref_name == 'master'
|
||||
@@ -536,8 +534,8 @@ jobs:
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
|
||||
repository_postgresql_14_15_tests:
|
||||
name: "Repository - PostgreSQL 14.15 tests"
|
||||
repository_postgresql_13_12_tests:
|
||||
name: "Repository - PostgreSQL 13.12 tests"
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare]
|
||||
if: >
|
||||
@@ -553,10 +551,10 @@ jobs:
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.16.0
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- name: "Run PostgreSQL 14.15 database"
|
||||
- name: "Run PostgreSQL 13.12 database"
|
||||
run: docker compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile postgres up -d
|
||||
env:
|
||||
POSTGRES_VERSION: 14.15
|
||||
POSTGRES_VERSION: 13.12
|
||||
- name: "Prepare Report Portal"
|
||||
if: github.ref_name == 'master'
|
||||
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.16.0
|
||||
@@ -606,16 +604,16 @@ jobs:
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
|
||||
repository_postgresql_15_10_tests:
|
||||
name: "Repository - PostgreSQL 15.10 tests"
|
||||
repository_postgresql_14_9_tests:
|
||||
name: "Repository - PostgreSQL 14.9 tests"
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare]
|
||||
if: >
|
||||
(((github.ref_name == 'master' || startsWith(github.ref_name, 'release/')) && github.event_name != 'pull_request' &&
|
||||
!contains(github.event.head_commit.message, '[skip db]')) ||
|
||||
contains(github.event.head_commit.message, '[db]')) &&
|
||||
!contains(github.event.head_commit.message, '[skip tests]') &&
|
||||
!contains(github.event.head_commit.message, '[force')
|
||||
!contains(github.event.head_commit.message, '[skip db]')) ||
|
||||
contains(github.event.head_commit.message, '[db]')) &&
|
||||
!contains(github.event.head_commit.message, '[skip tests]') &&
|
||||
!contains(github.event.head_commit.message, '[force')
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.16.0
|
||||
@@ -623,10 +621,10 @@ jobs:
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.16.0
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- name: "Run PostgreSQL 15.10 database"
|
||||
- name: "Run PostgreSQL 14.9 database"
|
||||
run: docker compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile postgres up -d
|
||||
env:
|
||||
POSTGRES_VERSION: 15.10
|
||||
POSTGRES_VERSION: 14.9
|
||||
- name: "Prepare Report Portal"
|
||||
if: github.ref_name == 'master'
|
||||
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.16.0
|
||||
@@ -676,8 +674,8 @@ jobs:
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
|
||||
repository_postgresql_16_6_tests:
|
||||
name: "Repository - PostgreSQL 16.6 tests"
|
||||
repository_postgresql_15_4_tests:
|
||||
name: "Repository - PostgreSQL 15.4 tests"
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare]
|
||||
if: >
|
||||
@@ -693,10 +691,10 @@ jobs:
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.16.0
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- name: "Run PostgreSQL 16.6 database"
|
||||
- name: "Run PostgreSQL 15.4 database"
|
||||
run: docker compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile postgres up -d
|
||||
env:
|
||||
POSTGRES_VERSION: 16.6
|
||||
POSTGRES_VERSION: 15.4
|
||||
- name: "Prepare Report Portal"
|
||||
if: github.ref_name == 'master'
|
||||
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.16.0
|
||||
@@ -1057,7 +1055,7 @@ jobs:
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.16.0
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- name: "Run Postgres 16.6 database"
|
||||
- name: "Run Postgres 15.4 database"
|
||||
run: docker compose -f ./scripts/ci/docker-compose/docker-compose.yaml --profile postgres up -d
|
||||
- name: "Prepare Report Portal"
|
||||
if: github.ref_name == 'master'
|
||||
|
16
.github/workflows/master_release.yml
vendored
16
.github/workflows/master_release.yml
vendored
@@ -34,12 +34,12 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.16.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.16.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.16.0
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v8.13.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v8.16.0
|
||||
with:
|
||||
username: ${{ env.GIT_USERNAME }}
|
||||
email: ${{ env.GIT_EMAIL }}
|
||||
@@ -63,12 +63,12 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.16.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.16.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.16.0
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v8.13.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v8.16.0
|
||||
with:
|
||||
username: ${{ env.GIT_USERNAME }}
|
||||
email: ${{ env.GIT_EMAIL }}
|
||||
|
@@ -133,21 +133,21 @@
|
||||
"filename": ".github/workflows/ci.yml",
|
||||
"hashed_secret": "b86dc2f033a63f2b7b9e7d270ab806d2910d7572",
|
||||
"is_verified": false,
|
||||
"line_number": 295
|
||||
"line_number": 293
|
||||
},
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": ".github/workflows/ci.yml",
|
||||
"hashed_secret": "1bfb0e20f886150ba59b853bcd49dea893e00966",
|
||||
"is_verified": false,
|
||||
"line_number": 370
|
||||
"line_number": 368
|
||||
},
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": ".github/workflows/ci.yml",
|
||||
"hashed_secret": "128f14373ccfaff49e3664045d3a11b50cbb7b39",
|
||||
"is_verified": false,
|
||||
"line_number": 904
|
||||
"line_number": 902
|
||||
}
|
||||
],
|
||||
".github/workflows/master_release.yml": [
|
||||
@@ -1273,7 +1273,7 @@
|
||||
"filename": "repository/src/main/resources/alfresco/repository.properties",
|
||||
"hashed_secret": "84551ae5442affc9f1a2d3b4c86ae8b24860149d",
|
||||
"is_verified": false,
|
||||
"line_number": 770,
|
||||
"line_number": 771,
|
||||
"is_secret": false
|
||||
}
|
||||
],
|
||||
@@ -1377,7 +1377,7 @@
|
||||
"filename": "repository/src/test/java/org/alfresco/repo/imap/ImapMessageTest.java",
|
||||
"hashed_secret": "d033e22ae348aeb5660fc2140aec35850c4da997",
|
||||
"is_verified": false,
|
||||
"line_number": 116,
|
||||
"line_number": 118,
|
||||
"is_secret": false
|
||||
}
|
||||
],
|
||||
@@ -1431,6 +1431,26 @@
|
||||
"is_secret": false
|
||||
}
|
||||
],
|
||||
"repository/src/test/java/org/alfresco/repo/lock/LockBehaviourImplTest.java": [
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": "repository/src/test/java/org/alfresco/repo/lock/LockBehaviourImplTest.java",
|
||||
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
|
||||
"is_verified": false,
|
||||
"line_number": 112,
|
||||
"is_secret": false
|
||||
}
|
||||
],
|
||||
"repository/src/test/java/org/alfresco/repo/lock/LockServiceImplTest.java": [
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": "repository/src/test/java/org/alfresco/repo/lock/LockServiceImplTest.java",
|
||||
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
|
||||
"is_verified": false,
|
||||
"line_number": 103,
|
||||
"is_secret": false
|
||||
}
|
||||
],
|
||||
"repository/src/test/java/org/alfresco/repo/management/JmxDumpUtilTest.java": [
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
@@ -1519,7 +1539,7 @@
|
||||
"filename": "repository/src/test/java/org/alfresco/repo/rendition2/AbstractRenditionIntegrationTest.java",
|
||||
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
|
||||
"is_verified": false,
|
||||
"line_number": 127,
|
||||
"line_number": 130,
|
||||
"is_secret": false
|
||||
}
|
||||
],
|
||||
@@ -1868,5 +1888,5 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"generated_at": "2025-05-13T13:17:41Z"
|
||||
"generated_at": "2025-05-15T21:47:13Z"
|
||||
}
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-amps</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-parent</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-automation-community-repo</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<build>
|
||||
|
@@ -200,3 +200,4 @@ public class RuleDefinition
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -71,3 +71,4 @@ public class FilesAPI extends RMModelRequest<FilesAPI>
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -45,7 +45,7 @@ import com.github.dockerjava.netty.NettyDockerCmdExecFactory;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import org.alfresco.utility.Utility;
|
||||
import org.apache.commons.lang.SystemUtils;
|
||||
import org.apache.commons.lang3.SystemUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@@ -117,3 +117,4 @@ public class RecordCategoryAuditLogTest extends BaseRMRestTest {
|
||||
auditLog.clearAuditLog(getAdminUser().getUsername(), getAdminUser().getPassword());
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -26,13 +26,6 @@
|
||||
*/
|
||||
package org.alfresco.rest.rm.community.hold;
|
||||
|
||||
import static org.alfresco.rest.rm.community.base.TestData.HOLD_DESCRIPTION;
|
||||
import static org.alfresco.rest.rm.community.base.TestData.HOLD_REASON;
|
||||
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentAlias.FILE_PLAN_ALIAS;
|
||||
import static org.alfresco.rest.rm.community.model.user.UserPermissions.PERMISSION_FILING;
|
||||
import static org.alfresco.rest.rm.community.model.user.UserPermissions.PERMISSION_READ_RECORDS;
|
||||
import static org.alfresco.rest.rm.community.util.CommonTestUtils.generateTestPrefix;
|
||||
import static org.alfresco.utility.report.log.Step.STEP;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
@@ -44,12 +37,25 @@ import static org.springframework.http.HttpStatus.NOT_FOUND;
|
||||
import static org.springframework.http.HttpStatus.OK;
|
||||
import static org.springframework.http.HttpStatus.UNAUTHORIZED;
|
||||
|
||||
import static org.alfresco.rest.rm.community.base.TestData.HOLD_DESCRIPTION;
|
||||
import static org.alfresco.rest.rm.community.base.TestData.HOLD_REASON;
|
||||
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentAlias.FILE_PLAN_ALIAS;
|
||||
import static org.alfresco.rest.rm.community.model.user.UserPermissions.PERMISSION_FILING;
|
||||
import static org.alfresco.rest.rm.community.model.user.UserPermissions.PERMISSION_READ_RECORDS;
|
||||
import static org.alfresco.rest.rm.community.util.CommonTestUtils.generateTestPrefix;
|
||||
import static org.alfresco.utility.report.log.Step.STEP;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.testng.annotations.AfterClass;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import org.alfresco.dataprep.CMISUtil;
|
||||
import org.alfresco.dataprep.ContentActions;
|
||||
import org.alfresco.rest.rm.community.base.BaseRMRestTest;
|
||||
@@ -71,10 +77,6 @@ import org.alfresco.utility.constants.UserRole;
|
||||
import org.alfresco.utility.model.FileModel;
|
||||
import org.alfresco.utility.model.FolderModel;
|
||||
import org.alfresco.utility.model.UserModel;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.testng.annotations.AfterClass;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
/**
|
||||
* API tests for adding items to holds via the bulk process
|
||||
@@ -82,7 +84,7 @@ import org.testng.annotations.Test;
|
||||
public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
{
|
||||
private static final String ACCESS_DENIED_ERROR_MESSAGE = "Access Denied. You do not have the appropriate " +
|
||||
"permissions to perform this operation.";
|
||||
"permissions to perform this operation.";
|
||||
private static final int NUMBER_OF_FILES = 5;
|
||||
private final List<FileModel> addedFiles = new ArrayList<>();
|
||||
private final List<UserModel> users = new ArrayList<>();
|
||||
@@ -102,8 +104,9 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
{
|
||||
STEP("Create a hold.");
|
||||
hold = getRestAPIFactory().getFilePlansAPI(getAdminUser()).createHold(
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(), FILE_PLAN_ALIAS);
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(),
|
||||
FILE_PLAN_ALIAS);
|
||||
holds.add(hold);
|
||||
|
||||
STEP("Create test files.");
|
||||
@@ -117,8 +120,8 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
for (int i = 0; i < NUMBER_OF_FILES; i++)
|
||||
{
|
||||
FileModel documentHeld = dataContent.usingAdmin()
|
||||
.usingResource(i % 2 == 0 ? folder1 : folder2)
|
||||
.createContent(CMISUtil.DocumentType.TEXT_PLAIN);
|
||||
.usingResource(i % 2 == 0 ? folder1 : folder2)
|
||||
.createContent(CMISUtil.DocumentType.TEXT_PLAIN);
|
||||
addedFiles.add(documentHeld);
|
||||
}
|
||||
|
||||
@@ -128,29 +131,37 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
|
||||
STEP("Wait until all files are searchable.");
|
||||
await().atMost(30, TimeUnit.SECONDS)
|
||||
.until(() -> getRestAPIFactory().getSearchAPI(null).search(searchRequest).getPagination()
|
||||
.getTotalItems() == NUMBER_OF_FILES);
|
||||
.until(() -> getRestAPIFactory().getSearchAPI(null).search(searchRequest).getPagination()
|
||||
.getTotalItems() == NUMBER_OF_FILES);
|
||||
|
||||
RestRequestQueryModel ancestorReq = getContentFromFolderAndAllSubfoldersQuery(rootFolder.getNodeRefWithoutVersion());
|
||||
SearchRequest ancestorSearchRequest = new SearchRequest();
|
||||
ancestorSearchRequest.setQuery(ancestorReq);
|
||||
|
||||
STEP("Wait until paths are indexed.");
|
||||
// to improve stability on CI - seems that sometimes during big load we need to wait longer for the condition
|
||||
await().atMost(120, TimeUnit.SECONDS)
|
||||
.until(() -> getRestAPIFactory().getSearchAPI(null).search(ancestorSearchRequest).getPagination()
|
||||
.getTotalItems() == NUMBER_OF_FILES);
|
||||
|
||||
holdBulkOperation = HoldBulkOperation.builder()
|
||||
.query(queryReq)
|
||||
.op(HoldBulkOperationType.ADD).build();
|
||||
.query(queryReq)
|
||||
.op(HoldBulkOperationType.ADD).build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* Then the content is added to the hold and the status of the bulk operation is DONE
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API Then the content is added to the hold and the status of the bulk operation is DONE
|
||||
*/
|
||||
@Test
|
||||
public void addContentFromTestSiteToHoldUsingBulkAPI()
|
||||
{
|
||||
UserModel userAddHoldPermission = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole.SiteCollaborator, hold.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
UserRole.SiteCollaborator, hold.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
users.add(userAddHoldPermission);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
HoldBulkOperationEntry bulkOperationEntry = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
|
||||
// Verify the status code
|
||||
assertStatusCode(ACCEPTED);
|
||||
@@ -158,50 +169,49 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
|
||||
STEP("Wait until all files are added to the hold.");
|
||||
await().atMost(20, TimeUnit.SECONDS).until(
|
||||
() -> getRestAPIFactory().getHoldsAPI(getAdminUser()).getChildren(hold.getId()).getEntries().size()
|
||||
== NUMBER_OF_FILES);
|
||||
() -> getRestAPIFactory().getHoldsAPI(getAdminUser()).getChildren(hold.getId()).getEntries().size() == NUMBER_OF_FILES);
|
||||
List<String> holdChildrenNodeRefs = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getChildren(hold.getId()).getEntries().stream().map(HoldChildEntry::getEntry).map(
|
||||
HoldChild::getId).toList();
|
||||
.getChildren(hold.getId()).getEntries().stream().map(HoldChildEntry::getEntry).map(
|
||||
HoldChild::getId)
|
||||
.toList();
|
||||
assertEquals(addedFiles.stream().map(FileModel::getNodeRefWithoutVersion).sorted().toList(),
|
||||
holdChildrenNodeRefs.stream().sorted().toList());
|
||||
holdChildrenNodeRefs.stream().sorted().toList());
|
||||
|
||||
STEP("Check the bulk status.");
|
||||
HoldBulkStatus holdBulkStatus = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatus(hold.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
.getBulkStatus(hold.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
assertBulkProcessStatus(holdBulkStatus, NUMBER_OF_FILES, 0, null, holdBulkOperation);
|
||||
|
||||
STEP("Check the bulk statuses.");
|
||||
HoldBulkStatusCollection holdBulkStatusCollection = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatuses(hold.getId());
|
||||
.getBulkStatuses(hold.getId());
|
||||
assertEquals(Arrays.asList(holdBulkStatus),
|
||||
holdBulkStatusCollection.getEntries().stream().map(HoldBulkStatusEntry::getEntry).toList());
|
||||
holdBulkStatusCollection.getEntries().stream().map(HoldBulkStatusEntry::getEntry).toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a folder and all subfolders to a hold using the bulk API
|
||||
* Then the content is added to the hold and the status of the bulk operation is DONE
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a folder and all subfolders to a hold using the bulk API Then the content is added to the hold and the status of the bulk operation is DONE
|
||||
*/
|
||||
@Test
|
||||
public void addContentFromFolderAndAllSubfoldersToHoldUsingBulkAPI()
|
||||
{
|
||||
hold3 = getRestAPIFactory().getFilePlansAPI(getAdminUser()).createHold(
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(), FILE_PLAN_ALIAS);
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(),
|
||||
FILE_PLAN_ALIAS);
|
||||
holds.add(hold3);
|
||||
|
||||
UserModel userAddHoldPermission = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole.SiteCollaborator, hold3.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
UserRole.SiteCollaborator, hold3.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
users.add(userAddHoldPermission);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
// Get content from folder and all subfolders of the root folder
|
||||
HoldBulkOperation bulkOperation = HoldBulkOperation.builder()
|
||||
.query(getContentFromFolderAndAllSubfoldersQuery(rootFolder.getNodeRefWithoutVersion()))
|
||||
.op(HoldBulkOperationType.ADD).build();
|
||||
.query(getContentFromFolderAndAllSubfoldersQuery(rootFolder.getNodeRefWithoutVersion()))
|
||||
.op(HoldBulkOperationType.ADD).build();
|
||||
HoldBulkOperationEntry bulkOperationEntry = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.startBulkProcess(bulkOperation, hold3.getId());
|
||||
.startBulkProcess(bulkOperation, hold3.getId());
|
||||
|
||||
// Verify the status code
|
||||
assertStatusCode(ACCEPTED);
|
||||
@@ -209,43 +219,40 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
|
||||
STEP("Wait until all files are added to the hold.");
|
||||
await().atMost(20, TimeUnit.SECONDS).until(
|
||||
() -> getRestAPIFactory().getHoldsAPI(getAdminUser()).getChildren(hold3.getId()).getEntries().size()
|
||||
== NUMBER_OF_FILES);
|
||||
() -> getRestAPIFactory().getHoldsAPI(getAdminUser()).getChildren(hold3.getId()).getEntries().size() == NUMBER_OF_FILES);
|
||||
List<String> holdChildrenNodeRefs = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getChildren(hold3.getId()).getEntries().stream().map(HoldChildEntry::getEntry).map(
|
||||
HoldChild::getId).toList();
|
||||
.getChildren(hold3.getId()).getEntries().stream().map(HoldChildEntry::getEntry).map(
|
||||
HoldChild::getId)
|
||||
.toList();
|
||||
assertEquals(addedFiles.stream().map(FileModel::getNodeRefWithoutVersion).sorted().toList(),
|
||||
holdChildrenNodeRefs.stream().sorted().toList());
|
||||
holdChildrenNodeRefs.stream().sorted().toList());
|
||||
|
||||
STEP("Check the bulk status.");
|
||||
HoldBulkStatus holdBulkStatus = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatus(hold3.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
.getBulkStatus(hold3.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
assertBulkProcessStatus(holdBulkStatus, NUMBER_OF_FILES, 0, null, bulkOperation);
|
||||
|
||||
STEP("Check the bulk statuses.");
|
||||
HoldBulkStatusCollection holdBulkStatusCollection = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatuses(hold3.getId());
|
||||
.getBulkStatuses(hold3.getId());
|
||||
assertEquals(List.of(holdBulkStatus),
|
||||
holdBulkStatusCollection.getEntries().stream().map(HoldBulkStatusEntry::getEntry).toList());
|
||||
holdBulkStatusCollection.getEntries().stream().map(HoldBulkStatusEntry::getEntry).toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user without the add to hold capability
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* Then the user receives access denied error
|
||||
* Given a user without the add to hold capability When the user adds content from a site to a hold using the bulk API Then the user receives access denied error
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessWithUserWithoutAddToHoldCapability()
|
||||
{
|
||||
UserModel userWithoutAddToHoldCapability = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole
|
||||
.SiteCollaborator,
|
||||
hold.getId(), UserRoles.ROLE_RM_POWER_USER, PERMISSION_FILING);
|
||||
UserRole.SiteCollaborator,
|
||||
hold.getId(), UserRoles.ROLE_RM_POWER_USER, PERMISSION_FILING);
|
||||
users.add(userWithoutAddToHoldCapability);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
getRestAPIFactory().getHoldsAPI(userWithoutAddToHoldCapability)
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
|
||||
STEP("Verify the response status code and the error message.");
|
||||
assertStatusCode(FORBIDDEN);
|
||||
@@ -253,21 +260,19 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user without the filing permission on a hold
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* Then the user receives access denied error
|
||||
* Given a user without the filing permission on a hold When the user adds content from a site to a hold using the bulk API Then the user receives access denied error
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessWithUserWithoutFilingPermissionOnAHold()
|
||||
{
|
||||
// User without filing permission on a hold
|
||||
UserModel userWithoutPermission = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole.SiteCollaborator, hold.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_READ_RECORDS);
|
||||
UserRole.SiteCollaborator, hold.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_READ_RECORDS);
|
||||
users.add(userWithoutPermission);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
getRestAPIFactory().getHoldsAPI(userWithoutPermission)
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
|
||||
STEP("Verify the response status code and the error message.");
|
||||
assertStatusCode(FORBIDDEN);
|
||||
@@ -276,68 +281,63 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user without the write permission on all the content
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* Then all processed items are marked as errors and the last error message contains access denied error
|
||||
* Given a user without the write permission on all the content When the user adds content from a site to a hold using the bulk API Then all processed items are marked as errors and the last error message contains access denied error
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessWithUserWithoutWritePermissionOnTheContent()
|
||||
{
|
||||
// User without write permission on the content
|
||||
UserModel userWithoutPermission = roleService.createUserWithSiteRoleRMRoleAndPermission(
|
||||
testSite, UserRole.SiteConsumer,
|
||||
hold.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
testSite, UserRole.SiteConsumer,
|
||||
hold.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
users.add(userWithoutPermission);
|
||||
|
||||
// Wait until permissions are reverted
|
||||
SearchRequest searchRequest = new SearchRequest();
|
||||
searchRequest.setQuery(holdBulkOperation.getQuery());
|
||||
await().atMost(30, TimeUnit.SECONDS)
|
||||
.until(() -> getRestAPIFactory().getSearchAPI(userWithoutPermission).search(searchRequest).getPagination()
|
||||
.getTotalItems() == NUMBER_OF_FILES);
|
||||
.until(() -> getRestAPIFactory().getSearchAPI(userWithoutPermission).search(searchRequest).getPagination()
|
||||
.getTotalItems() == NUMBER_OF_FILES);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
HoldBulkOperationEntry bulkOperationEntry = getRestAPIFactory().getHoldsAPI(
|
||||
userWithoutPermission).startBulkProcess(holdBulkOperation, hold.getId());
|
||||
userWithoutPermission).startBulkProcess(holdBulkOperation, hold.getId());
|
||||
|
||||
STEP("Verify the response.");
|
||||
assertStatusCode(ACCEPTED);
|
||||
|
||||
await().atMost(20, TimeUnit.SECONDS).until(() ->
|
||||
Objects.equals(getRestAPIFactory().getHoldsAPI(userWithoutPermission)
|
||||
await().atMost(20, TimeUnit.SECONDS).until(() -> Objects.equals(getRestAPIFactory().getHoldsAPI(userWithoutPermission)
|
||||
.getBulkStatus(hold.getId(), bulkOperationEntry.getBulkStatusId()).getStatus(), "DONE"));
|
||||
|
||||
HoldBulkStatus holdBulkStatus = getRestAPIFactory().getHoldsAPI(userWithoutPermission)
|
||||
.getBulkStatus(hold.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
.getBulkStatus(hold.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
assertBulkProcessStatus(holdBulkStatus, NUMBER_OF_FILES, NUMBER_OF_FILES, ACCESS_DENIED_ERROR_MESSAGE,
|
||||
holdBulkOperation);
|
||||
holdBulkOperation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user without the write permission on one file
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* Then all processed items are added to the hold except the one that the user does not have write permission
|
||||
* And the status of the bulk operation is DONE, contains the error message and the number of errors is 1
|
||||
* Given a user without the write permission on one file When the user adds content from a site to a hold using the bulk API Then all processed items are added to the hold except the one that the user does not have write permission And the status of the bulk operation is DONE, contains the error message and the number of errors is 1
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessWithUserWithoutWritePermissionOnOneFile()
|
||||
{
|
||||
hold2 = getRestAPIFactory().getFilePlansAPI(getAdminUser()).createHold(
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(), FILE_PLAN_ALIAS);
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(),
|
||||
FILE_PLAN_ALIAS);
|
||||
holds.add(hold2);
|
||||
|
||||
UserModel userAddHoldPermission = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole.SiteCollaborator, hold2.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
UserRole.SiteCollaborator, hold2.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
users.add(userAddHoldPermission);
|
||||
|
||||
contentActions.setPermissionForUser(getAdminUser().getUsername(), getAdminUser().getPassword(),
|
||||
testSite.getId(), addedFiles.get(0).getName(), userAddHoldPermission.getUsername(),
|
||||
UserRole.SiteConsumer.getRoleId(), false);
|
||||
testSite.getId(), addedFiles.get(0).getName(), userAddHoldPermission.getUsername(),
|
||||
UserRole.SiteConsumer.getRoleId(), false);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
HoldBulkOperationEntry bulkOperationEntry = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.startBulkProcess(holdBulkOperation, hold2.getId());
|
||||
.startBulkProcess(holdBulkOperation, hold2.getId());
|
||||
|
||||
// Verify the status code
|
||||
assertStatusCode(ACCEPTED);
|
||||
@@ -345,56 +345,50 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
|
||||
STEP("Wait until all files are added to the hold.");
|
||||
await().atMost(30, TimeUnit.SECONDS).until(
|
||||
() -> getRestAPIFactory().getHoldsAPI(getAdminUser()).getChildren(hold2.getId()).getEntries().size()
|
||||
== NUMBER_OF_FILES - 1);
|
||||
() -> getRestAPIFactory().getHoldsAPI(getAdminUser()).getChildren(hold2.getId()).getEntries().size() == NUMBER_OF_FILES - 1);
|
||||
await().atMost(30, TimeUnit.SECONDS).until(
|
||||
() -> getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatus(hold2.getId(), bulkOperationEntry.getBulkStatusId()).getProcessedItems()
|
||||
== NUMBER_OF_FILES);
|
||||
() -> getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatus(hold2.getId(), bulkOperationEntry.getBulkStatusId()).getProcessedItems() == NUMBER_OF_FILES);
|
||||
List<String> holdChildrenNodeRefs = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getChildren(hold2.getId()).getEntries().stream().map(HoldChildEntry::getEntry).map(
|
||||
HoldChild::getId).toList();
|
||||
.getChildren(hold2.getId()).getEntries().stream().map(HoldChildEntry::getEntry).map(
|
||||
HoldChild::getId)
|
||||
.toList();
|
||||
assertEquals(addedFiles.stream().skip(1).map(FileModel::getNodeRefWithoutVersion).sorted().toList(),
|
||||
holdChildrenNodeRefs.stream().sorted().toList());
|
||||
holdChildrenNodeRefs.stream().sorted().toList());
|
||||
|
||||
STEP("Check the bulk status.");
|
||||
HoldBulkStatus holdBulkStatus = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatus(hold2.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
.getBulkStatus(hold2.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
assertBulkProcessStatus(holdBulkStatus, NUMBER_OF_FILES, 1, ACCESS_DENIED_ERROR_MESSAGE, holdBulkOperation);
|
||||
|
||||
STEP("Check the bulk statuses.");
|
||||
HoldBulkStatusCollection holdBulkStatusCollection = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatuses(hold2.getId());
|
||||
.getBulkStatuses(hold2.getId());
|
||||
assertEquals(List.of(holdBulkStatus),
|
||||
holdBulkStatusCollection.getEntries().stream().map(HoldBulkStatusEntry::getEntry).toList());
|
||||
holdBulkStatusCollection.getEntries().stream().map(HoldBulkStatusEntry::getEntry).toList());
|
||||
|
||||
// Revert the permissions
|
||||
contentActions.setPermissionForUser(getAdminUser().getUsername(), getAdminUser().getPassword(),
|
||||
testSite.getId(), addedFiles.get(0).getName(), userAddHoldPermission.getUsername(),
|
||||
UserRole.SiteCollaborator.getRoleId(), true);
|
||||
testSite.getId(), addedFiles.get(0).getName(), userAddHoldPermission.getUsername(),
|
||||
UserRole.SiteCollaborator.getRoleId(), true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an unauthenticated user
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* Then the user receives unauthorized error
|
||||
* Given an unauthenticated user When the user adds content from a site to a hold using the bulk API Then the user receives unauthorized error
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessAsUnauthenticatedUser()
|
||||
{
|
||||
STEP("Start bulk process as unauthenticated user");
|
||||
getRestAPIFactory().getHoldsAPI(new UserModel(getAdminUser().getUsername(), "wrongPassword"))
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
|
||||
STEP("Verify the response status code.");
|
||||
assertStatusCode(UNAUTHORIZED);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* And the hold does not exist
|
||||
* Then the user receives not found error
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API And the hold does not exist Then the user receives not found error
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessForNonExistentHold()
|
||||
@@ -407,10 +401,7 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* and the bulk operation is invalid
|
||||
* Then the user receives bad request error
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API and the bulk operation is invalid Then the user receives bad request error
|
||||
*/
|
||||
@Test
|
||||
public void testGetBulkStatusesForInvalidOperation()
|
||||
@@ -418,7 +409,7 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
STEP("Start bulk process for non existent hold");
|
||||
|
||||
HoldBulkOperation invalidHoldBulkOperation = HoldBulkOperation.builder().op(null)
|
||||
.query(holdBulkOperation.getQuery()).build();
|
||||
.query(holdBulkOperation.getQuery()).build();
|
||||
getRestAPIFactory().getHoldsAPI(getAdminUser()).startBulkProcess(invalidHoldBulkOperation, hold.getId());
|
||||
|
||||
STEP("Verify the response status code.");
|
||||
@@ -426,10 +417,7 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* And the hold does not exist
|
||||
* Then the user receives not found error
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API And the hold does not exist Then the user receives not found error
|
||||
*/
|
||||
@Test
|
||||
public void testGetBulkStatusForNonExistentHold()
|
||||
@@ -442,10 +430,7 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* And the bulk status does not exist
|
||||
* Then the user receives not found error
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API And the bulk status does not exist Then the user receives not found error
|
||||
*/
|
||||
@Test
|
||||
public void testGetBulkStatusForNonExistentBulkStatus()
|
||||
@@ -458,10 +443,7 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* And the hold does not exist
|
||||
* Then the user receives not found error
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API And the hold does not exist Then the user receives not found error
|
||||
*/
|
||||
@Test
|
||||
public void testGetBulkStatusesForNonExistentHold()
|
||||
@@ -474,9 +456,7 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from all sites to a hold using the bulk API to exceed the limit (30 items)
|
||||
* Then the user receives bad request error
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from all sites to a hold using the bulk API to exceed the limit (30 items) Then the user receives bad request error
|
||||
*/
|
||||
@Test
|
||||
public void testExceedingBulkOperationLimit()
|
||||
@@ -486,8 +466,8 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
queryReq.setLanguage("afts");
|
||||
|
||||
HoldBulkOperation exceedLimitOp = HoldBulkOperation.builder()
|
||||
.query(queryReq)
|
||||
.op(HoldBulkOperationType.ADD).build();
|
||||
.query(queryReq)
|
||||
.op(HoldBulkOperationType.ADD).build();
|
||||
|
||||
STEP("Start bulk process to exceed the limit");
|
||||
getRestAPIFactory().getHoldsAPI(getAdminUser()).startBulkProcess(exceedLimitOp, hold.getId());
|
||||
@@ -497,26 +477,24 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* And then the user cancels the bulk operation
|
||||
* Then the user receives OK status code
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API And then the user cancels the bulk operation Then the user receives OK status code
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessCancellationWithAllowedUser()
|
||||
{
|
||||
Hold hold4 = getRestAPIFactory().getFilePlansAPI(getAdminUser()).createHold(
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(), FILE_PLAN_ALIAS);
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(),
|
||||
FILE_PLAN_ALIAS);
|
||||
holds.add(hold4);
|
||||
|
||||
UserModel userAddHoldPermission = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole.SiteCollaborator, hold4.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
UserRole.SiteCollaborator, hold4.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
users.add(userAddHoldPermission);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
HoldBulkOperationEntry bulkOperationEntry = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.startBulkProcess(holdBulkOperation, hold4.getId());
|
||||
.startBulkProcess(holdBulkOperation, hold4.getId());
|
||||
|
||||
// Verify the status code
|
||||
assertStatusCode(ACCEPTED);
|
||||
@@ -524,47 +502,44 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
|
||||
STEP("Cancel the bulk operation.");
|
||||
getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.cancelBulkOperation(hold4.getId(), bulkOperationEntry.getBulkStatusId(), new BulkBodyCancel());
|
||||
.cancelBulkOperation(hold4.getId(), bulkOperationEntry.getBulkStatusId(), new BulkBodyCancel());
|
||||
|
||||
// Verify the status code
|
||||
assertStatusCode(OK);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* And a 2nd user without the add to hold capability cancels the bulk operation
|
||||
* Then the 2nd user receives access denied error
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API And a 2nd user without the add to hold capability cancels the bulk operation Then the 2nd user receives access denied error
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessCancellationWithUserWithoutAddToHoldCapability()
|
||||
{
|
||||
Hold hold5 = getRestAPIFactory().getFilePlansAPI(getAdminUser()).createHold(
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(), FILE_PLAN_ALIAS);
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(),
|
||||
FILE_PLAN_ALIAS);
|
||||
holds.add(hold5);
|
||||
|
||||
UserModel userAddHoldPermission = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole.SiteCollaborator, hold5.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
UserRole.SiteCollaborator, hold5.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
users.add(userAddHoldPermission);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
HoldBulkOperationEntry bulkOperationEntry = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.startBulkProcess(holdBulkOperation, hold5.getId());
|
||||
.startBulkProcess(holdBulkOperation, hold5.getId());
|
||||
|
||||
// Verify the status code
|
||||
assertStatusCode(ACCEPTED);
|
||||
assertEquals(NUMBER_OF_FILES, bulkOperationEntry.getTotalItems());
|
||||
|
||||
UserModel userWithoutAddToHoldCapability = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole
|
||||
.SiteCollaborator,
|
||||
hold5.getId(), UserRoles.ROLE_RM_POWER_USER, PERMISSION_FILING);
|
||||
UserRole.SiteCollaborator,
|
||||
hold5.getId(), UserRoles.ROLE_RM_POWER_USER, PERMISSION_FILING);
|
||||
users.add(userWithoutAddToHoldCapability);
|
||||
|
||||
STEP("Cancel the bulk operation.");
|
||||
getRestAPIFactory().getHoldsAPI(userWithoutAddToHoldCapability)
|
||||
.cancelBulkOperation(hold5.getId(), bulkOperationEntry.getBulkStatusId(), new BulkBodyCancel());
|
||||
.cancelBulkOperation(hold5.getId(), bulkOperationEntry.getBulkStatusId(), new BulkBodyCancel());
|
||||
|
||||
STEP("Verify the response status code and the error message.");
|
||||
assertStatusCode(FORBIDDEN);
|
||||
@@ -572,7 +547,7 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
private void assertBulkProcessStatus(HoldBulkStatus holdBulkStatus, long expectedProcessedItems,
|
||||
int expectedErrorsCount, String expectedErrorMessage, HoldBulkOperation holdBulkOperation)
|
||||
int expectedErrorsCount, String expectedErrorMessage, HoldBulkOperation holdBulkOperation)
|
||||
{
|
||||
assertEquals("DONE", holdBulkStatus.getStatus());
|
||||
assertEquals(expectedProcessedItems, holdBulkStatus.getTotalItems());
|
||||
|
@@ -42,7 +42,7 @@ import org.alfresco.rest.v0.RMRolesAndActionsAPI;
|
||||
import org.alfresco.rest.v0.RecordsAPI;
|
||||
import org.alfresco.rest.v0.RecordCategoriesAPI;
|
||||
import org.alfresco.test.AlfrescoTest;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.json.JSONObject;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.testng.annotations.Test;
|
||||
|
@@ -144,3 +144,5 @@ public class CreateElectronicRecordsTests extends BaseRMRestTest {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@@ -234,3 +234,4 @@ public class MoveToRuleOnFoldersTest extends BaseRMRestTest{
|
||||
assertStatusCode(OK);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -44,7 +44,7 @@ import org.alfresco.rest.v0.service.DispositionScheduleService;
|
||||
import org.alfresco.test.AlfrescoTest;
|
||||
import org.alfresco.utility.model.RepoTestModel;
|
||||
import org.alfresco.utility.model.UserModel;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.HttpStatus;
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-parent</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -1,3 +1,3 @@
|
||||
SOLR6_TAG=2.0.15
|
||||
POSTGRES_TAG=16.6
|
||||
SOLR6_TAG=2.0.13
|
||||
POSTGRES_TAG=15.4
|
||||
ACTIVEMQ_TAG=5.18.3-jre17-rockylinux8
|
||||
|
@@ -119,6 +119,11 @@ rm.patch.v35.holdNewChildAssocPatch.batchSize=1000
|
||||
rm.haspermissionmap.read=Read
|
||||
rm.haspermissionmap.write=WriteProperties,AddChildren,ReadContent
|
||||
|
||||
# Extended Permissions
|
||||
# Enable matching the given username with the correct casing username when retrieving an IPR group.
|
||||
# Only needs to be used if there are owners that don't have the username in the correct casing.
|
||||
rm.extendedSecurity.enableUsernameNormalization=false
|
||||
|
||||
#
|
||||
# Extended auto-version behaviour. If true and other auto-version properties are satisfied, then
|
||||
# a document will be auto-versioned when its type is changed.
|
||||
|
@@ -34,4 +34,7 @@
|
||||
<!-- content cleanser -->
|
||||
<bean id="contentCleanser.522022M" class="org.alfresco.module.org_alfresco_module_rm.content.cleanser.ContentCleanser522022M"/>
|
||||
|
||||
<!-- content cleanser -->
|
||||
<bean id="contentCleanser.SevenPass" class="org.alfresco.module.org_alfresco_module_rm.content.cleanser.ContentCleanserSevenPass"/>
|
||||
|
||||
</beans>
|
||||
|
@@ -15,6 +15,13 @@
|
||||
<parameter property="end" jdbcType="BIGINT" javaType="java.lang.Long"/>
|
||||
</parameterMap>
|
||||
|
||||
<parameterMap id="parameter_NodeIdsWhichReferenceContentUrl" type="map">
|
||||
<parameter property="contentUrlShort" jdbcType="VARCHAR" javaType="java.lang.String"/>
|
||||
<parameter property="contentUrlCrc" jdbcType="BIGINT" javaType="java.lang.Long"/>
|
||||
<parameter property="localName" jdbcType="VARCHAR" javaType="java.lang.String"/>
|
||||
<parameter property="uri" jdbcType="VARCHAR" javaType="java.lang.String"/>
|
||||
</parameterMap>
|
||||
|
||||
<resultMap id="result_NodeRefEntity" type="org.alfresco.module.org_alfresco_module_rm.query.NodeRefEntity">
|
||||
<result property="row" column="row" jdbcType="BIGINT" javaType="java.lang.Long"/>
|
||||
<result property="protocol" column="protocol" jdbcType="VARCHAR" javaType="java.lang.String"/>
|
||||
@@ -55,18 +62,21 @@
|
||||
|
||||
<!-- Get list of node ids which reference given content url -->
|
||||
<select id="select_NodeIdsWhichReferenceContentUrl"
|
||||
parameterType="ContentUrl"
|
||||
parameterMap="parameter_NodeIdsWhichReferenceContentUrl"
|
||||
resultMap="result_NodeIds">
|
||||
select
|
||||
p.node_id
|
||||
from
|
||||
alf_content_url cu
|
||||
LEFT OUTER JOIN alf_content_data cd ON (cd.content_url_id = cu.id)
|
||||
LEFT OUTER JOIN alf_node_properties p ON (p.long_value = cd.id)
|
||||
WHERE
|
||||
content_url_short = #{contentUrlShort} and
|
||||
content_url_crc = #{contentUrlCrc}
|
||||
|
||||
left outer join alf_content_data cd ON (cd.content_url_id = cu.id)
|
||||
left outer join alf_node_properties p ON (p.long_value = cd.id)
|
||||
left outer join alf_qname q ON (q.id = p.qname_id)
|
||||
left outer join alf_namespace n ON (n.id = q.ns_id)
|
||||
where
|
||||
cu.content_url_short = ? and
|
||||
cu.content_url_crc = ? and
|
||||
q.local_name = ? and
|
||||
n.uri = ?
|
||||
</select>
|
||||
|
||||
<select id="select_RecordFoldersWithSchedules"
|
||||
|
@@ -611,6 +611,7 @@
|
||||
<property name="authorityService" ref="authorityService"/>
|
||||
<property name="permissionService" ref="permissionService"/>
|
||||
<property name="transactionService" ref="transactionService"/>
|
||||
<property name="enableUsernameNormalization" value="${rm.extendedSecurity.enableUsernameNormalization}" />
|
||||
</bean>
|
||||
|
||||
<bean id="ExtendedSecurityService" class="org.springframework.aop.framework.ProxyFactoryBean">
|
||||
|
@@ -35,3 +35,4 @@
|
||||
}
|
||||
</#escape>
|
||||
</#macro>
|
||||
|
||||
|
@@ -219,3 +219,4 @@ function getRecordFolder(recordFolder, parentPath)
|
||||
|
||||
// Start webscript
|
||||
main();
|
||||
|
||||
|
@@ -8,7 +8,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
@@ -427,7 +427,7 @@
|
||||
<configuration>
|
||||
<images>
|
||||
<image>
|
||||
<name>postgres:16.6</name>
|
||||
<name>postgres:15.4</name>
|
||||
<run>
|
||||
<ports>
|
||||
<port>${postgresql.tests.port}:${postgresql.port}</port>
|
||||
|
@@ -190,3 +190,4 @@ public final class NamePathDataExtractor extends AbstractDataExtractor
|
||||
return Objects.hash(nodeService, filePlanService, ruleService);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -120,3 +120,4 @@ public class RecordsManagementBootstrap extends AbstractLifecycleBean
|
||||
// NOOP
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -43,3 +43,4 @@ public record BulkOperation(Query searchQuery, String operationType) implements
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -88,3 +88,4 @@ public class AssocPolicy extends AbstractBasePolicy
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@@ -1,43 +1,51 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* Alfresco Records Management Module
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* -
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* -
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* -
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* -
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.rest.api.model;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.length;
|
||||
package org.alfresco.module.org_alfresco_module_rm.content.cleanser;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
/**
|
||||
* An object representing user authorization key request body.
|
||||
* DoD 5220-22M Seven Pass data cleansing implementation.
|
||||
*
|
||||
*/
|
||||
public record AuthKey(String authorizationKey)
|
||||
public class ContentCleanserSevenPass extends ContentCleanser522022M
|
||||
{
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.content.cleanser.ContentCleanser#cleanse(java.io.File)
|
||||
*/
|
||||
@Override
|
||||
public String toString()
|
||||
public void cleanse(File file)
|
||||
{
|
||||
// for security reasons the key content should be never logged
|
||||
return "AuthKey[" +
|
||||
"authorizationKeyLength=" + length(authorizationKey) +
|
||||
']';
|
||||
super.cleanse(file);
|
||||
overwrite(file, overwriteZeros);
|
||||
overwrite(file, overwriteZeros);
|
||||
overwrite(file, overwriteOnes);
|
||||
overwrite(file, overwriteRandom);
|
||||
|
||||
}
|
||||
}
|
@@ -141,3 +141,5 @@ public class NotifyOfRecordsDueForReviewJobExecuter extends RecordsManagementJob
|
||||
} // end of execute method
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@@ -103,3 +103,4 @@ public class RMv23SavedSearchesPatch extends AbstractModulePatch
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -58,3 +58,4 @@ public class PropertyValuesOfChildrenQueryParams
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@@ -27,7 +27,6 @@
|
||||
|
||||
package org.alfresco.module.org_alfresco_module_rm.query;
|
||||
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
@@ -36,6 +35,11 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.mybatis.spring.SqlSessionTemplate;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
|
||||
import org.alfresco.repo.domain.contentdata.ContentUrlEntity;
|
||||
import org.alfresco.repo.domain.node.NodeDAO;
|
||||
@@ -47,9 +51,6 @@ import org.alfresco.service.cmr.repository.InvalidNodeRefException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.mybatis.spring.SqlSessionTemplate;
|
||||
|
||||
/**
|
||||
* Records management query DAO implementation
|
||||
@@ -89,7 +90,8 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
|
||||
protected TenantService tenantService;
|
||||
|
||||
/**
|
||||
* @param sqlSessionTemplate SQL session template
|
||||
* @param sqlSessionTemplate
|
||||
* SQL session template
|
||||
*/
|
||||
public final void setSqlSessionTemplate(SqlSessionTemplate sqlSessionTemplate)
|
||||
{
|
||||
@@ -97,7 +99,8 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
|
||||
}
|
||||
|
||||
/**
|
||||
* @param qnameDAO qname DAO
|
||||
* @param qnameDAO
|
||||
* qname DAO
|
||||
*/
|
||||
public final void setQnameDAO(QNameDAO qnameDAO)
|
||||
{
|
||||
@@ -173,8 +176,9 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
|
||||
/**
|
||||
* Get a set of node reference which reference the provided content URL
|
||||
*
|
||||
* @param String contentUrl content URL
|
||||
* @return Set<NodeRef> set of nodes that reference the provided content URL
|
||||
* @param String
|
||||
* contentUrl content URL
|
||||
* @return Set<NodeRef> set of nodes that reference the provided content URL
|
||||
*/
|
||||
@Override
|
||||
public Set<NodeRef> getNodeRefsWhichReferenceContentUrl(String contentUrl)
|
||||
@@ -188,13 +192,19 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
|
||||
ContentUrlEntity contentUrlEntity = new ContentUrlEntity();
|
||||
contentUrlEntity.setContentUrl(contentUrl.toLowerCase());
|
||||
|
||||
Map<String, Object> params = new HashMap<>(4);
|
||||
params.put("contentUrlShort", contentUrlEntity.getContentUrlShort());
|
||||
params.put("contentUrlCrc", contentUrlEntity.getContentUrlCrc());
|
||||
params.put("localName", ContentModel.PROP_CONTENT.getLocalName());
|
||||
params.put("uri", ContentModel.PROP_CONTENT.getNamespaceURI());
|
||||
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Executing query " + SELECT_NODE_IDS_WHICH_REFERENCE_CONTENT_URL);
|
||||
}
|
||||
|
||||
// Get all the node ids which reference the given content url
|
||||
List<Long> nodeIds = template.selectList(SELECT_NODE_IDS_WHICH_REFERENCE_CONTENT_URL, contentUrlEntity);
|
||||
List<Long> nodeIds = template.selectList(SELECT_NODE_IDS_WHICH_REFERENCE_CONTENT_URL, params);
|
||||
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
@@ -224,7 +234,7 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logMessage.append(nodeRefToAdd)
|
||||
.append(" (from version)");
|
||||
.append(" (from version)");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -232,7 +242,7 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
|
||||
else
|
||||
{
|
||||
nodeRefToAdd = nodeDAO.getNodeIdStatus(nodeId)
|
||||
.getNodeRef();
|
||||
.getNodeRef();
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logMessage.append(nodeRefToAdd);
|
||||
@@ -266,9 +276,9 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
|
||||
{
|
||||
Map<String, Object> params = new HashMap<>(2);
|
||||
params.put("processed", qnameDAO.getQName(ASPECT_DISPOSITION_PROCESSED)
|
||||
.getFirst());
|
||||
.getFirst());
|
||||
params.put("folderQnameId", qnameDAO.getQName(TYPE_RECORD_FOLDER)
|
||||
.getFirst());
|
||||
.getFirst());
|
||||
params.put("start", start);
|
||||
params.put("end", end);
|
||||
|
||||
@@ -280,7 +290,7 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
|
||||
for (NodeRefEntity nodeRefEntity : entities)
|
||||
{
|
||||
results.add(
|
||||
new NodeRef(nodeRefEntity.getProtocol(), nodeRefEntity.getIdentifier(), nodeRefEntity.getUuid()));
|
||||
new NodeRef(nodeRefEntity.getProtocol(), nodeRefEntity.getIdentifier(), nodeRefEntity.getUuid()));
|
||||
}
|
||||
|
||||
return results;
|
||||
@@ -289,7 +299,8 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.query.RecordsManagementQueryDAO#getPropertyStringValueEntity(String stringValue)
|
||||
*/
|
||||
public PropertyStringValueEntity getPropertyStringValueEntity(String stringValue){
|
||||
public PropertyStringValueEntity getPropertyStringValueEntity(String stringValue)
|
||||
{
|
||||
|
||||
PropertyStringValueEntity propertyStringValueEntity = new PropertyStringValueEntity();
|
||||
propertyStringValueEntity.setValue(stringValue);
|
||||
|
@@ -34,6 +34,12 @@ import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.context.event.ContextRefreshedEvent;
|
||||
import org.springframework.dao.ConcurrencyFailureException;
|
||||
import org.springframework.extensions.webscripts.ui.common.StringUtils;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.model.RenditionModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.capability.RMPermissionModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService;
|
||||
@@ -42,7 +48,10 @@ import org.alfresco.module.org_alfresco_module_rm.role.FilePlanRoleService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.util.ServiceBaseImpl;
|
||||
import org.alfresco.query.PagingRequest;
|
||||
import org.alfresco.query.PagingResults;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.security.authority.RMAuthority;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
@@ -54,12 +63,6 @@ import org.alfresco.service.namespace.RegexQNamePattern;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.alfresco.util.ParameterCheck;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.context.event.ContextRefreshedEvent;
|
||||
import org.springframework.extensions.webscripts.ui.common.StringUtils;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
|
||||
/**
|
||||
* Extended security service implementation.
|
||||
@@ -68,9 +71,9 @@ import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransacti
|
||||
* @since 2.1
|
||||
*/
|
||||
public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
implements ExtendedSecurityService,
|
||||
RecordsManagementModel,
|
||||
ApplicationListener<ContextRefreshedEvent>
|
||||
implements ExtendedSecurityService,
|
||||
RecordsManagementModel,
|
||||
ApplicationListener<ContextRefreshedEvent>
|
||||
{
|
||||
/** ipr group names */
|
||||
static final String ROOT_IPR_GROUP = "INPLACE_RECORD_MANAGEMENT";
|
||||
@@ -95,8 +98,11 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/** transaction service */
|
||||
private TransactionService transactionService;
|
||||
|
||||
private boolean enableUsernameNormalization;
|
||||
|
||||
/**
|
||||
* @param filePlanService file plan service
|
||||
* @param filePlanService
|
||||
* file plan service
|
||||
*/
|
||||
public void setFilePlanService(FilePlanService filePlanService)
|
||||
{
|
||||
@@ -104,7 +110,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
}
|
||||
|
||||
/**
|
||||
* @param filePlanRoleService file plan role service
|
||||
* @param filePlanRoleService
|
||||
* file plan role service
|
||||
*/
|
||||
public void setFilePlanRoleService(FilePlanRoleService filePlanRoleService)
|
||||
{
|
||||
@@ -112,7 +119,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
}
|
||||
|
||||
/**
|
||||
* @param authorityService authority service
|
||||
* @param authorityService
|
||||
* authority service
|
||||
*/
|
||||
public void setAuthorityService(AuthorityService authorityService)
|
||||
{
|
||||
@@ -120,7 +128,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
}
|
||||
|
||||
/**
|
||||
* @param permissionService permission service
|
||||
* @param permissionService
|
||||
* permission service
|
||||
*/
|
||||
public void setPermissionService(PermissionService permissionService)
|
||||
{
|
||||
@@ -128,13 +137,23 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
}
|
||||
|
||||
/**
|
||||
* @param transactionService transaction service
|
||||
* @param transactionService
|
||||
* transaction service
|
||||
*/
|
||||
public void setTransactionService(TransactionService transactionService)
|
||||
{
|
||||
this.transactionService = transactionService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param enableUsernameNormalization
|
||||
* enable username normalization to ensure correct casing
|
||||
*/
|
||||
public void setEnableUsernameNormalization(boolean enableUsernameNormalization)
|
||||
{
|
||||
this.enableUsernameNormalization = enableUsernameNormalization;
|
||||
}
|
||||
|
||||
/**
|
||||
* Application context refresh event handler
|
||||
*/
|
||||
@@ -142,19 +161,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
public void onApplicationEvent(ContextRefreshedEvent contextRefreshedEvent)
|
||||
{
|
||||
// run as System on bootstrap
|
||||
AuthenticationUtil.runAs(new RunAsWork<Object>()
|
||||
{
|
||||
AuthenticationUtil.runAs(new RunAsWork<Object>() {
|
||||
public Object doWork()
|
||||
{
|
||||
RetryingTransactionCallback<Void> callback = new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
RetryingTransactionCallback<Void> callback = new RetryingTransactionCallback<Void>() {
|
||||
public Void execute()
|
||||
{
|
||||
// if the root group doesn't exist then create it
|
||||
if (!authorityService.authorityExists(getRootIRPGroup()))
|
||||
{
|
||||
authorityService.createAuthority(AuthorityType.GROUP, ROOT_IPR_GROUP, ROOT_IPR_GROUP,
|
||||
Collections.singleton(RMAuthority.ZONE_APP_RM));
|
||||
Collections.singleton(RMAuthority.ZONE_APP_RM));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -174,7 +191,7 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
return GROUP_PREFIX + ROOT_IPR_GROUP;
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService#hasExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef)
|
||||
*/
|
||||
@Override
|
||||
@@ -224,8 +241,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Helper to get authorities for a given group
|
||||
*
|
||||
* @param group group name
|
||||
* @return Set<String> immediate authorities
|
||||
* @param group
|
||||
* group name
|
||||
* @return Set<String> immediate authorities
|
||||
*/
|
||||
private Set<String> getAuthorities(String group)
|
||||
{
|
||||
@@ -284,8 +302,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
* <p>
|
||||
* Return null if none found.
|
||||
*
|
||||
* @param nodeRef node reference
|
||||
* @return Pair<String, String> where first is the read group and second if the write group, null if none found
|
||||
* @param nodeRef
|
||||
* node reference
|
||||
* @return Pair<String, String> where first is the read group and second if the write group, null if none found
|
||||
*/
|
||||
private Pair<String, String> getIPRGroups(NodeRef nodeRef)
|
||||
{
|
||||
@@ -321,17 +340,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Given a set of readers and writers find or create the appropriate IPR groups.
|
||||
* <p>
|
||||
* The IPR groups are named with hashes of the authority lists in order to reduce
|
||||
* the set of groups that require exact match. A further index is used to handle
|
||||
* a situation where there is a hash clash, but a difference in the authority lists.
|
||||
* The IPR groups are named with hashes of the authority lists in order to reduce the set of groups that require exact match. A further index is used to handle a situation where there is a hash clash, but a difference in the authority lists.
|
||||
* <p>
|
||||
* When no match is found the groups are created. Once created
|
||||
* When no match is found the groups are created. Once created
|
||||
*
|
||||
* @param filePlan file plan
|
||||
* @param readers authorities with read
|
||||
* @param writers authorities with write
|
||||
* @return Pair<String, String> where first is the full name of the read group and
|
||||
* second is the full name of the write group
|
||||
* @param filePlan
|
||||
* file plan
|
||||
* @param readers
|
||||
* authorities with read
|
||||
* @param writers
|
||||
* authorities with write
|
||||
* @return Pair<String, String> where first is the full name of the read group and second is the full name of the write group
|
||||
*/
|
||||
private Pair<String, String> createOrFindIPRGroups(Set<String> readers, Set<String> writers)
|
||||
{
|
||||
@@ -343,20 +362,28 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Create or find an IPR group based on the provided prefix and authorities.
|
||||
*
|
||||
* @param groupPrefix group prefix
|
||||
* @param authorities authorities
|
||||
* @return String full group name
|
||||
* @param groupPrefix
|
||||
* group prefix
|
||||
* @param authorities
|
||||
* authorities
|
||||
* @return String full group name
|
||||
*/
|
||||
private String createOrFindIPRGroup(String groupPrefix, Set<String> authorities)
|
||||
{
|
||||
String group = null;
|
||||
|
||||
// If enabled, the authorities are forced to match the correct casing of the usernames in case they were set
|
||||
// with the incorrect casing.
|
||||
// If not, it will just use the authorities as they are.
|
||||
// In normal circumstances, the authorities are in the correct casing, so this is disabled by default.
|
||||
Set<String> authoritySet = normalizeAuthorities(authorities);
|
||||
|
||||
// find group or determine what the next index is if no group exists or there is a clash
|
||||
Pair<String, Integer> groupResult = findIPRGroup(groupPrefix, authorities);
|
||||
Pair<String, Integer> groupResult = findIPRGroup(groupPrefix, authoritySet);
|
||||
|
||||
if (groupResult.getFirst() == null)
|
||||
{
|
||||
group = createIPRGroup(groupPrefix, authorities, groupResult.getSecond());
|
||||
group = createIPRGroup(groupPrefix, authoritySet, groupResult.getSecond());
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -369,13 +396,13 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Given a group name prefix and the authorities, finds the exact match existing group.
|
||||
* <p>
|
||||
* If the group does not exist then the group returned is null and the index shows the next available
|
||||
* group index for creation.
|
||||
* If the group does not exist then the group returned is null and the index shows the next available group index for creation.
|
||||
*
|
||||
* @param groupPrefix group name prefix
|
||||
* @param authorities authorities
|
||||
* @return Pair<String, Integer> where first is the name of the found group, null if none found and second
|
||||
* if the next available create index
|
||||
* @param groupPrefix
|
||||
* group name prefix
|
||||
* @param authorities
|
||||
* authorities
|
||||
* @return Pair<String, Integer> where first is the name of the found group, null if none found and second if the next available create index
|
||||
*/
|
||||
private Pair<String, Integer> findIPRGroup(String groupPrefix, Set<String> authorities)
|
||||
{
|
||||
@@ -391,12 +418,13 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
while (hasMoreItems == true)
|
||||
{
|
||||
// get matching authorities
|
||||
PagingResults<String> results = authorityService.getAuthorities(AuthorityType.GROUP,
|
||||
RMAuthority.ZONE_APP_RM,
|
||||
groupShortNamePrefix,
|
||||
false,
|
||||
false,
|
||||
new PagingRequest(MAX_ITEMS*pageCount, MAX_ITEMS));
|
||||
PagingResults<String> results = authorityService.getAuthorities(
|
||||
AuthorityType.GROUP,
|
||||
RMAuthority.ZONE_APP_RM,
|
||||
groupShortNamePrefix,
|
||||
false,
|
||||
false,
|
||||
new PagingRequest(MAX_ITEMS * pageCount, MAX_ITEMS));
|
||||
|
||||
// record the total count
|
||||
nextGroupIndex = nextGroupIndex + results.getPage().size();
|
||||
@@ -413,29 +441,88 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
|
||||
// determine if there are any more pages to inspect
|
||||
hasMoreItems = results.hasMoreItems();
|
||||
pageCount ++;
|
||||
pageCount++;
|
||||
}
|
||||
|
||||
return new Pair<>(iprGroup, nextGroupIndex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a set of authorities, normalizes the authority names to ensure correct casing.
|
||||
*
|
||||
* @param authNames
|
||||
* @return
|
||||
*/
|
||||
private Set<String> normalizeAuthorities(Set<String> authNames)
|
||||
{
|
||||
// If disabled or no authorities, return as is
|
||||
if (!enableUsernameNormalization || authNames == null || authNames.isEmpty())
|
||||
{
|
||||
return authNames;
|
||||
}
|
||||
|
||||
Set<String> normalizedAuthorities = new HashSet<>();
|
||||
for (String authorityName : authNames)
|
||||
{
|
||||
normalizedAuthorities.add(normalizeAuthorityName(authorityName));
|
||||
}
|
||||
return normalizedAuthorities;
|
||||
}
|
||||
|
||||
/**
|
||||
* Usernames are case insensitive but affect the IPR group matching when set with different casing. For a given authority of type user, this method normalizes the authority name. If group, it returns the name as-is.
|
||||
*
|
||||
* @param authorityName
|
||||
* the authority name to normalize
|
||||
* @return the normalized authority name
|
||||
*/
|
||||
private String normalizeAuthorityName(String authorityName)
|
||||
{
|
||||
if (authorityName == null || authorityName.startsWith(GROUP_PREFIX))
|
||||
{
|
||||
return authorityName;
|
||||
}
|
||||
|
||||
// For users, attempt to get the correct casing from the username property of the user node
|
||||
if (authorityService.authorityExists(authorityName))
|
||||
{
|
||||
try
|
||||
{
|
||||
NodeRef authorityNodeRef = authorityService.getAuthorityNodeRef(authorityName);
|
||||
if (authorityNodeRef != null)
|
||||
{
|
||||
String username = (String) nodeService.getProperty(authorityNodeRef, ContentModel.PROP_USERNAME);
|
||||
return username != null ? username : authorityName;
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
// If anything goes wrong, fallback to the original name
|
||||
}
|
||||
}
|
||||
|
||||
return authorityName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether a group exactly matches a list of authorities.
|
||||
*
|
||||
* @param authorities list of authorities
|
||||
* @param group group
|
||||
* @param authorities
|
||||
* list of authorities
|
||||
* @param group
|
||||
* group
|
||||
* @return
|
||||
*/
|
||||
private boolean isIPRGroupTrueMatch(String group, Set<String> authorities)
|
||||
{
|
||||
//Remove GROUP_EVERYONE for proper comparison as GROUP_EVERYONE is never included in an IPR group
|
||||
// Remove GROUP_EVERYONE for proper comparison as GROUP_EVERYONE is never included in an IPR group
|
||||
Set<String> plainAuthorities = new HashSet<String>();
|
||||
if (authorities != null)
|
||||
{
|
||||
plainAuthorities.addAll(authorities);
|
||||
plainAuthorities.remove(PermissionService.ALL_AUTHORITIES);
|
||||
}
|
||||
Set<String> contained = authorityService.getContainedAuthorities(null, group, true);
|
||||
Set<String> contained = authorityService.getContainedAuthorities(null, group, true);
|
||||
return contained.equals(plainAuthorities);
|
||||
}
|
||||
|
||||
@@ -444,15 +531,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
* <p>
|
||||
* 'package' scope to help testing.
|
||||
*
|
||||
* @param prefix prefix
|
||||
* @param authorities authorities
|
||||
* @return String group prefix short name
|
||||
* @param prefix
|
||||
* prefix
|
||||
* @param authorities
|
||||
* authorities
|
||||
* @return String group prefix short name
|
||||
*/
|
||||
/*package*/ String getIPRGroupPrefixShortName(String prefix, Set<String> authorities)
|
||||
/* package */ String getIPRGroupPrefixShortName(String prefix, Set<String> authorities)
|
||||
{
|
||||
StringBuilder builder = new StringBuilder(128)
|
||||
.append(prefix)
|
||||
.append(getAuthoritySetHashCode(authorities));
|
||||
.append(prefix)
|
||||
.append(getAuthoritySetHashCode(authorities));
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
@@ -464,13 +553,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
* <p>
|
||||
* 'package' scope to help testing.
|
||||
*
|
||||
* @param prefix prefix
|
||||
* @param readers read authorities
|
||||
* @param writers write authorities
|
||||
* @param index group index
|
||||
* @return String group short name
|
||||
* @param prefix
|
||||
* prefix
|
||||
* @param readers
|
||||
* read authorities
|
||||
* @param writers
|
||||
* write authorities
|
||||
* @param index
|
||||
* group index
|
||||
* @return String group short name
|
||||
*/
|
||||
/*package*/ String getIPRGroupShortName(String prefix, Set<String> authorities, int index)
|
||||
/* package */ String getIPRGroupShortName(String prefix, Set<String> authorities, int index)
|
||||
{
|
||||
return getIPRGroupShortName(prefix, authorities, Integer.toString(index));
|
||||
}
|
||||
@@ -480,17 +573,21 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
* <p>
|
||||
* Note this excludes the "GROUP_" prefix.
|
||||
*
|
||||
* @param prefix prefix
|
||||
* @param readers read authorities
|
||||
* @param writers write authorities
|
||||
* @param index group index
|
||||
* @return String group short name
|
||||
* @param prefix
|
||||
* prefix
|
||||
* @param readers
|
||||
* read authorities
|
||||
* @param writers
|
||||
* write authorities
|
||||
* @param index
|
||||
* group index
|
||||
* @return String group short name
|
||||
*/
|
||||
private String getIPRGroupShortName(String prefix, Set<String> authorities, String index)
|
||||
{
|
||||
StringBuilder builder = new StringBuilder(128)
|
||||
.append(getIPRGroupPrefixShortName(prefix, authorities))
|
||||
.append(index);
|
||||
.append(getIPRGroupPrefixShortName(prefix, authorities))
|
||||
.append(index);
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
@@ -498,8 +595,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Gets the hashcode value of a set of authorities.
|
||||
*
|
||||
* @param authorities set of authorities
|
||||
* @return int hash code
|
||||
* @param authorities
|
||||
* set of authorities
|
||||
* @return int hash code
|
||||
*/
|
||||
private int getAuthoritySetHashCode(Set<String> authorities)
|
||||
{
|
||||
@@ -514,10 +612,13 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Creates a new IPR group.
|
||||
*
|
||||
* @param groupNamePrefix group name prefix
|
||||
* @param children child authorities
|
||||
* @param index group index
|
||||
* @return String full name of created group
|
||||
* @param groupNamePrefix
|
||||
* group name prefix
|
||||
* @param children
|
||||
* child authorities
|
||||
* @param index
|
||||
* group index
|
||||
* @return String full name of created group
|
||||
*/
|
||||
private String createIPRGroup(String groupNamePrefix, Set<String> children, int index)
|
||||
{
|
||||
@@ -547,10 +648,10 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
}
|
||||
}
|
||||
}
|
||||
catch(DuplicateChildNodeNameException ex)
|
||||
catch (DuplicateChildNodeNameException ex)
|
||||
{
|
||||
// the group was concurrently created
|
||||
group = authorityService.getName(AuthorityType.GROUP, groupShortName);
|
||||
// Rethrow as ConcurrencyFailureException so that is can be retried and linked to the group created by the concurrent transaction
|
||||
throw new ConcurrencyFailureException("IPR group creation failed due to concurrent duplicate group name creation: " + groupShortName);
|
||||
}
|
||||
|
||||
return group;
|
||||
@@ -559,8 +660,10 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Assign IPR groups to a node reference with the correct permissions.
|
||||
*
|
||||
* @param iprGroups iprGroups, first read and second write
|
||||
* @param nodeRef node reference
|
||||
* @param iprGroups
|
||||
* iprGroups, first read and second write
|
||||
* @param nodeRef
|
||||
* node reference
|
||||
*/
|
||||
private void assignIPRGroupsToNode(Pair<String, String> iprGroups, NodeRef nodeRef)
|
||||
{
|
||||
@@ -598,7 +701,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Clear the nodes IPR permissions
|
||||
*
|
||||
* @param nodeRef node reference
|
||||
* @param nodeRef
|
||||
* node reference
|
||||
*/
|
||||
private void clearPermissions(NodeRef nodeRef, Pair<String, String> iprGroups)
|
||||
{
|
||||
@@ -610,7 +714,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#getExtendedReaders(org.alfresco.service.cmr.repository.NodeRef)
|
||||
*/
|
||||
@Override @Deprecated public Set<String> getExtendedReaders(NodeRef nodeRef)
|
||||
@Override
|
||||
@Deprecated
|
||||
public Set<String> getExtendedReaders(NodeRef nodeRef)
|
||||
{
|
||||
return getReaders(nodeRef);
|
||||
}
|
||||
@@ -618,7 +724,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#getExtendedWriters(org.alfresco.service.cmr.repository.NodeRef)
|
||||
*/
|
||||
@Override @Deprecated public Set<String> getExtendedWriters(NodeRef nodeRef)
|
||||
@Override
|
||||
@Deprecated
|
||||
public Set<String> getExtendedWriters(NodeRef nodeRef)
|
||||
{
|
||||
return getWriters(nodeRef);
|
||||
}
|
||||
@@ -626,7 +734,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#addExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set)
|
||||
*/
|
||||
@Override @Deprecated public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
|
||||
{
|
||||
set(nodeRef, readers, writers);
|
||||
}
|
||||
@@ -634,7 +744,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#addExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set, boolean)
|
||||
*/
|
||||
@Override @Deprecated public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers, boolean applyToParents)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers, boolean applyToParents)
|
||||
{
|
||||
set(nodeRef, readers, writers);
|
||||
}
|
||||
@@ -642,7 +754,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeAllExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef)
|
||||
*/
|
||||
@Override @Deprecated public void removeAllExtendedSecurity(NodeRef nodeRef)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void removeAllExtendedSecurity(NodeRef nodeRef)
|
||||
{
|
||||
remove(nodeRef);
|
||||
}
|
||||
@@ -650,7 +764,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set)
|
||||
*/
|
||||
@Override @Deprecated public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
|
||||
{
|
||||
remove(nodeRef);
|
||||
}
|
||||
@@ -658,7 +774,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set, boolean)
|
||||
*/
|
||||
@Override @Deprecated public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String>writers, boolean applyToParents)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers, boolean applyToParents)
|
||||
{
|
||||
remove(nodeRef);
|
||||
}
|
||||
@@ -666,7 +784,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeAllExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, boolean)
|
||||
*/
|
||||
@Override @Deprecated public void removeAllExtendedSecurity(NodeRef nodeRef, boolean applyToParents)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void removeAllExtendedSecurity(NodeRef nodeRef, boolean applyToParents)
|
||||
{
|
||||
remove(nodeRef);
|
||||
}
|
||||
|
@@ -71,3 +71,4 @@ public class CustomLocalDateDeserializer extends StdDeserializer<LocalDate>
|
||||
return LOCAL_DATE_OPTIONAL_TIME_PARSER.parseLocalDate(str);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -29,14 +29,23 @@ package org.alfresco.module.org_alfresco_module_rm.test.legacy.service;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.springframework.dao.ConcurrencyFailureException;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase;
|
||||
import org.alfresco.query.PagingRequest;
|
||||
import org.alfresco.query.PagingResults;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.site.SiteModel;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.security.AccessPermission;
|
||||
import org.alfresco.service.cmr.security.AccessStatus;
|
||||
import org.alfresco.service.cmr.security.AuthorityType;
|
||||
import org.alfresco.service.cmr.site.SiteService;
|
||||
import org.alfresco.service.cmr.site.SiteVisibility;
|
||||
import org.alfresco.util.GUID;
|
||||
@@ -73,8 +82,7 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
|
||||
|
||||
private String createTestUser()
|
||||
{
|
||||
return doTestInTransaction(new Test<String>()
|
||||
{
|
||||
return doTestInTransaction(new Test<String>() {
|
||||
public String run()
|
||||
{
|
||||
String userName = GUID.generate();
|
||||
@@ -90,8 +98,7 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
|
||||
final String elephant = createTestUser();
|
||||
final String snake = createTestUser();
|
||||
|
||||
doTestInTransaction(new Test<Void>()
|
||||
{
|
||||
doTestInTransaction(new Test<Void>() {
|
||||
public Void run()
|
||||
{
|
||||
assertFalse(extendedSecurityService.hasExtendedSecurity(filePlan));
|
||||
@@ -133,8 +140,7 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
|
||||
final String monkey = createTestUser();
|
||||
final String elephant = createTestUser();
|
||||
|
||||
doTestInTransaction(new Test<Void>()
|
||||
{
|
||||
doTestInTransaction(new Test<Void>() {
|
||||
Set<String> extendedReaders = new HashSet<>(2);
|
||||
|
||||
public Void run() throws Exception
|
||||
@@ -184,112 +190,337 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
|
||||
|
||||
public void testDifferentUsersDifferentPermissions()
|
||||
{
|
||||
final String userNone = createTestUser();
|
||||
final String userRead = createTestUser();
|
||||
final String userWrite = createTestUser();
|
||||
final String siteShortName = GUID.generate();
|
||||
final String userNone = createTestUser();
|
||||
final String userRead = createTestUser();
|
||||
final String userWrite = createTestUser();
|
||||
final String siteShortName = GUID.generate();
|
||||
|
||||
doTestInTransaction(new Test<Void>()
|
||||
{
|
||||
doTestInTransaction(new Test<Void>() {
|
||||
public Void run() throws Exception
|
||||
{
|
||||
siteService.createSite(null, siteShortName, "test", "test", SiteVisibility.PRIVATE);
|
||||
return null;
|
||||
siteService.createSite(null, siteShortName, "test", "test", SiteVisibility.PRIVATE);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
final NodeRef documentLibrary = doTestInTransaction(new Test<NodeRef>()
|
||||
{
|
||||
final NodeRef documentLibrary = doTestInTransaction(new Test<NodeRef>() {
|
||||
public NodeRef run() throws Exception
|
||||
{
|
||||
siteService.setMembership(siteShortName, userRead, SiteModel.SITE_CONSUMER);
|
||||
siteService.setMembership(siteShortName, userWrite, SiteModel.SITE_COLLABORATOR);
|
||||
return siteService.createContainer(siteShortName, SiteService.DOCUMENT_LIBRARY, null, null);
|
||||
siteService.setMembership(siteShortName, userRead, SiteModel.SITE_CONSUMER);
|
||||
siteService.setMembership(siteShortName, userWrite, SiteModel.SITE_COLLABORATOR);
|
||||
return siteService.createContainer(siteShortName, SiteService.DOCUMENT_LIBRARY, null, null);
|
||||
}
|
||||
});
|
||||
|
||||
final NodeRef record = doTestInTransaction(new Test<NodeRef>()
|
||||
{
|
||||
final NodeRef record = doTestInTransaction(new Test<NodeRef>() {
|
||||
public NodeRef run() throws Exception
|
||||
{
|
||||
NodeRef record = fileFolderService.create(documentLibrary, GUID.generate(), ContentModel.TYPE_CONTENT).getNodeRef();
|
||||
recordService.createRecord(filePlan, record);
|
||||
return record;
|
||||
NodeRef record = fileFolderService.create(documentLibrary, GUID.generate(), ContentModel.TYPE_CONTENT)
|
||||
.getNodeRef();
|
||||
recordService.createRecord(filePlan, record);
|
||||
return record;
|
||||
}
|
||||
});
|
||||
|
||||
doTestInTransaction(new Test<Void>()
|
||||
{
|
||||
doTestInTransaction(new Test<Void>() {
|
||||
public Void run() throws Exception
|
||||
{
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userNone);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userNone);
|
||||
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userRead);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userRead);
|
||||
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userWrite);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userWrite);
|
||||
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userNone);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userNone);
|
||||
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userRead);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userRead);
|
||||
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userWrite);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userWrite);
|
||||
|
||||
return null;
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void testConcurrentSetWithRetry()
|
||||
{
|
||||
Set<String> extendedReaders = new HashSet<>(2);
|
||||
Set<String> extendedWriters = new HashSet<>(2);
|
||||
|
||||
Set<NodeRef> documents = setupConcurrentTestCase(10, extendedReaders, extendedWriters);
|
||||
|
||||
// For each record created previously, spawn a thread to set extended security so we cause concurrency
|
||||
// failure trying to create IPR groups with the same name
|
||||
fireParallelExecutionOfSetExtendedSecurity(documents, extendedReaders, extendedWriters, true);
|
||||
|
||||
// Look for duplicated IPR groups and verify all documents have the same groups assigned
|
||||
verifyCreatedGroups(documents, false);
|
||||
|
||||
AuthenticationUtil.clearCurrentSecurityContext();
|
||||
}
|
||||
|
||||
public void testConcurrentSetWithoutRetry()
|
||||
{
|
||||
Set<String> extendedReaders = new HashSet<>(2);
|
||||
Set<String> extendedWriters = new HashSet<>(2);
|
||||
|
||||
Set<NodeRef> documents = setupConcurrentTestCase(10, extendedReaders, extendedWriters);
|
||||
|
||||
// For each record created previously, spawn a thread to set extended security so we cause concurrency
|
||||
// failure trying to create IPR groups with the same name.
|
||||
// Since there is no retry, we expect to get a ConcurrencyFailureException
|
||||
Assert.assertThrows(ConcurrencyFailureException.class, () -> {
|
||||
fireParallelExecutionOfSetExtendedSecurity(documents, extendedReaders, extendedWriters, false);
|
||||
});
|
||||
|
||||
// Look for duplicated IPR groups and verify all documents have the same groups assigned
|
||||
// Since there was a ConcurrencyFailureException some threads failed to set extended security so some
|
||||
// documents may not have IPR groups created.
|
||||
verifyCreatedGroups(documents, true);
|
||||
|
||||
AuthenticationUtil.clearCurrentSecurityContext();
|
||||
}
|
||||
|
||||
private Set<NodeRef> setupConcurrentTestCase(int concurrentThreads, Set<String> extendedReaders, Set<String> extendedWriters)
|
||||
{
|
||||
final String usera = createTestUser();
|
||||
final String userb = createTestUser();
|
||||
final String owner = createTestUser();
|
||||
|
||||
extendedReaders.add(usera);
|
||||
extendedReaders.add(userb);
|
||||
extendedWriters.add(usera);
|
||||
extendedWriters.add(userb);
|
||||
|
||||
AuthenticationUtil.setAdminUserAsFullyAuthenticatedUser();
|
||||
|
||||
// Create a site
|
||||
NodeRef documentLib = createSite(new HashSet<>(), new HashSet<>());
|
||||
|
||||
// Create records in the site document library
|
||||
return createRecords(concurrentThreads, documentLib, owner);
|
||||
}
|
||||
|
||||
private NodeRef createSite(Set<String> readers, Set<String> writers)
|
||||
{
|
||||
return retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<NodeRef>() {
|
||||
@Override
|
||||
public NodeRef execute() throws Throwable
|
||||
{
|
||||
final String siteShortName = GUID.generate();
|
||||
siteService.createSite(null, siteShortName, "test", "test", SiteVisibility.PRIVATE);
|
||||
readers.forEach(reader -> siteService.setMembership(siteShortName, reader, SiteModel.SITE_CONSUMER));
|
||||
writers.forEach(writer -> siteService.setMembership(siteShortName, writer, SiteModel.SITE_COLLABORATOR));
|
||||
return siteService.createContainer(siteShortName, SiteService.DOCUMENT_LIBRARY, null, null);
|
||||
}
|
||||
}, false, true);
|
||||
}
|
||||
|
||||
private Set<NodeRef> createRecords(int numRecords, NodeRef parent, String owner)
|
||||
{
|
||||
return retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Set<NodeRef>>() {
|
||||
@Override
|
||||
public Set<NodeRef> execute() throws Throwable
|
||||
{
|
||||
int createdRecords = 0;
|
||||
Set<NodeRef> documents = new HashSet<>();
|
||||
while (createdRecords < numRecords)
|
||||
{
|
||||
final NodeRef doc = fileFolderService.create(parent, GUID.generate(), ContentModel.TYPE_CONTENT).getNodeRef();
|
||||
ownableService.setOwner(doc, owner);
|
||||
recordService.createRecord(filePlan, doc, rmFolder, true);
|
||||
recordService.file(doc);
|
||||
recordService.complete(doc);
|
||||
documents.add(doc);
|
||||
createdRecords++;
|
||||
}
|
||||
return documents;
|
||||
}
|
||||
}, false, true);
|
||||
}
|
||||
|
||||
private void setExtendedSecurity(NodeRef doc, Set<String> readers, Set<String> writers, boolean useRetry)
|
||||
{
|
||||
if (!useRetry)
|
||||
{
|
||||
setExtendedSecurity(doc, readers, writers);
|
||||
return;
|
||||
}
|
||||
|
||||
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>() {
|
||||
@Override
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
setExtendedSecurity(doc, readers, writers);
|
||||
return null;
|
||||
}
|
||||
}, false, true);
|
||||
}
|
||||
|
||||
private void setExtendedSecurity(NodeRef doc, Set<String> readers, Set<String> writers)
|
||||
{
|
||||
AuthenticationUtil.setAdminUserAsFullyAuthenticatedUser();
|
||||
extendedSecurityService.set(doc, readers, writers);
|
||||
}
|
||||
|
||||
private void fireParallelExecutionOfSetExtendedSecurity(Set<NodeRef> documents, Set<String> extendedReaders, Set<String> extendedWriters, boolean useRetry)
|
||||
{
|
||||
CompletableFuture<?>[] futures = documents.stream()
|
||||
.map(doc -> CompletableFuture.runAsync(() -> setExtendedSecurity(doc, extendedReaders, extendedWriters, useRetry)))
|
||||
.toArray(CompletableFuture[]::new);
|
||||
|
||||
try
|
||||
{
|
||||
CompletableFuture.allOf(futures).join();
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Throwable cause = e.getCause();
|
||||
if (cause instanceof ConcurrencyFailureException)
|
||||
{
|
||||
throw (ConcurrencyFailureException) cause;
|
||||
}
|
||||
throw new RuntimeException("Error during parallel execution", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void verifyCreatedGroups(Set<NodeRef> documents, boolean onlyDuplicatesValidation)
|
||||
{
|
||||
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>() {
|
||||
@Override
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
Set<String> expectedAuthorities = null;
|
||||
Set<Set<String>> errors = new HashSet<>();
|
||||
for (NodeRef doc : documents)
|
||||
{
|
||||
Set<AccessPermission> permissions = permissionService.getAllSetPermissions(doc);
|
||||
Set<String> authorities = getDocumentAuthorities(permissions);
|
||||
Set<String> authoritiesById = getAuthorityIds(authorities);
|
||||
|
||||
verifyIPRGroups(authorities, onlyDuplicatesValidation);
|
||||
|
||||
if (onlyDuplicatesValidation)
|
||||
{
|
||||
// Some documents may not have IPR groups created if there was a ConcurrencyFailureException
|
||||
continue;
|
||||
}
|
||||
|
||||
// All documents should have the same exact set of groups assigned
|
||||
if (expectedAuthorities == null)
|
||||
{
|
||||
expectedAuthorities = authoritiesById;
|
||||
}
|
||||
|
||||
if (!expectedAuthorities.equals(authoritiesById))
|
||||
{
|
||||
errors.add(authoritiesById);
|
||||
}
|
||||
}
|
||||
|
||||
assertTrue("Unexpected authorities linked to document", errors.isEmpty());
|
||||
|
||||
return null;
|
||||
}
|
||||
}, false, true);
|
||||
}
|
||||
|
||||
private Set<String> getDocumentAuthorities(Set<AccessPermission> permissions)
|
||||
{
|
||||
Set<String> authorities = new HashSet<>();
|
||||
|
||||
for (AccessPermission accessPermission : permissions)
|
||||
{
|
||||
String authority = accessPermission.getAuthority();
|
||||
String authName = authorityService.getName(AuthorityType.GROUP, authority);
|
||||
authorities.add(authName);
|
||||
|
||||
}
|
||||
return authorities;
|
||||
}
|
||||
|
||||
private Set<String> getAuthorityIds(Set<String> authorities)
|
||||
{
|
||||
Set<String> authorityIds = new HashSet<>();
|
||||
for (String authority : authorities)
|
||||
{
|
||||
String authId = authorityService.getAuthorityNodeRef(authority) != null
|
||||
? authorityService.getAuthorityNodeRef(authority).getId()
|
||||
: null;
|
||||
authorityIds.add(authId);
|
||||
}
|
||||
return authorityIds;
|
||||
}
|
||||
|
||||
private void verifyIPRGroups(Set<String> authorities, boolean onlyDuplicatesValidation)
|
||||
{
|
||||
boolean hasGroupIPR = false;
|
||||
|
||||
for (String authorityName : authorities)
|
||||
{
|
||||
String shortName = authorityService.getShortName(authorityName);
|
||||
|
||||
if (authorityName.startsWith("GROUP_IPR"))
|
||||
{
|
||||
hasGroupIPR = true;
|
||||
PagingResults<String> results = authorityService.getAuthorities(AuthorityType.GROUP, null, shortName, false,
|
||||
false, new PagingRequest(0, 10));
|
||||
|
||||
assertEquals("No duplicated IPR group expected", 1, results.getPage().size());
|
||||
}
|
||||
}
|
||||
|
||||
if (!onlyDuplicatesValidation)
|
||||
{
|
||||
assertTrue("No IPR Groups created", hasGroupIPR);
|
||||
}
|
||||
}
|
||||
}
|
@@ -926,3 +926,4 @@ public class RMCaveatConfigScriptTest extends BaseRMWebScriptTestCase
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -105,3 +105,5 @@ public class RMConstraintScriptTest extends BaseRMWebScriptTestCase
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@@ -3,9 +3,9 @@
|
||||
#
|
||||
|
||||
# Version label
|
||||
version.major=25
|
||||
version.minor=1
|
||||
version.revision=1
|
||||
version.major=23
|
||||
version.minor=6
|
||||
version.revision=0
|
||||
version.label=
|
||||
|
||||
# Edition label
|
||||
|
@@ -0,0 +1,100 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Records Management Module
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* -
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
* -
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
* -
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
* -
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.module.org_alfresco_module_rm.content.cleanser;
|
||||
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Spy;
|
||||
|
||||
import org.alfresco.module.org_alfresco_module_rm.test.util.BaseUnitTest;
|
||||
import org.alfresco.service.cmr.repository.ContentIOException;
|
||||
|
||||
/**
|
||||
* Eager content store cleaner unit test.
|
||||
*
|
||||
*/
|
||||
public class ContentCleanserSevenPassUnitTest extends BaseUnitTest
|
||||
{
|
||||
@InjectMocks
|
||||
@Spy
|
||||
private ContentCleanserSevenPass contentCleanserSevenPass = new ContentCleanserSevenPass()
|
||||
{
|
||||
/** dummy implementations */
|
||||
@Override
|
||||
protected void overwrite(File file, OverwriteOperation overwriteOperation)
|
||||
{
|
||||
// Intentionally left empty
|
||||
}
|
||||
};
|
||||
|
||||
@Mock
|
||||
private File mockedFile;
|
||||
|
||||
/**
|
||||
* Given that a file exists When I cleanse it Then the content is overwritten
|
||||
*/
|
||||
@Test
|
||||
public void cleanseFile()
|
||||
{
|
||||
when(mockedFile.exists()).thenReturn(true);
|
||||
when(mockedFile.canWrite()).thenReturn(true);
|
||||
contentCleanserSevenPass.cleanse(mockedFile);
|
||||
verify(contentCleanserSevenPass, times(2)).overwrite(mockedFile, contentCleanserSevenPass.overwriteOnes);
|
||||
verify(contentCleanserSevenPass, times(3)).overwrite(mockedFile, contentCleanserSevenPass.overwriteZeros);
|
||||
verify(contentCleanserSevenPass, times(2)).overwrite(mockedFile, contentCleanserSevenPass.overwriteRandom);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Given that the file does not exist When I cleanse it Then an exception is thrown
|
||||
*/
|
||||
@Test(expected = ContentIOException.class)
|
||||
public void fileDoesNotExist()
|
||||
{
|
||||
when(mockedFile.exists()).thenReturn(false);
|
||||
when(mockedFile.canWrite()).thenReturn(true);
|
||||
contentCleanserSevenPass.cleanse(mockedFile);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given that I can not write to the file When I cleanse it Then an exception is thrown
|
||||
*/
|
||||
@Test(expected = ContentIOException.class)
|
||||
public void cantWriteToFile()
|
||||
{
|
||||
when(mockedFile.exists()).thenReturn(true);
|
||||
when(mockedFile.canWrite()).thenReturn(false);
|
||||
contentCleanserSevenPass.cleanse(mockedFile);
|
||||
}
|
||||
}
|
@@ -96,3 +96,5 @@ public class RMv32HoldReportUpdatePatchUnitTest
|
||||
verify(mockedContentWriter, times(1)).putContent((InputStream) any());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@@ -52,6 +52,7 @@ import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.model.RenditionModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.capability.RMPermissionModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService;
|
||||
@@ -67,6 +68,7 @@ import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransacti
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.security.AccessPermission;
|
||||
import org.alfresco.service.cmr.security.AccessStatus;
|
||||
import org.alfresco.service.cmr.security.AuthorityService;
|
||||
@@ -523,6 +525,104 @@ public class ExtendedSecurityServiceImplUnitTest
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a node with no previous IPR groups assigned
|
||||
* And having pre-existing IPR groups matching the ones we need
|
||||
* When I add some read and write authorities but with a different casing
|
||||
* Then the existing IPR groups are used
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test public void addExtendedSecurityWithMixedCasingUsernames()
|
||||
{
|
||||
// Have the usernames in the node as the correct usernames but with incorrect casing
|
||||
String user1 = "UseR";
|
||||
String user2 = "UseR_w";
|
||||
|
||||
// Incorrect IPR Group names
|
||||
Set<String> diffCasingReaders = Stream.of(user1, GROUP).collect(Collectors.toSet());
|
||||
Set<String> diffCasingWriters = Stream.of(user2, GROUP_W).collect(Collectors.toSet());
|
||||
String wrongReadGroupPrefix = extendedSecurityService.getIPRGroupPrefixShortName(READER_GROUP_PREFIX, diffCasingReaders);
|
||||
String wrongWriteGroupPrefix = extendedSecurityService.getIPRGroupPrefixShortName(WRITER_GROUP_PREFIX, diffCasingWriters);
|
||||
String wrongReadGroup = wrongReadGroupPrefix + "0";
|
||||
String wrongWriteGroup = wrongWriteGroupPrefix + "0";
|
||||
|
||||
// Correct Group names
|
||||
String correctReadGroup = readGroupPrefix + "0";
|
||||
String correctWriteGroup = writeGroupPrefix + "0";
|
||||
|
||||
// If queried for the correct groups, return the results
|
||||
PagingResults<String> mockedCorrectReadPResults = mock(PagingResults.class);
|
||||
PagingResults<String> mockedCorrectWritePResults = mock(PagingResults.class);
|
||||
when(mockedCorrectReadPResults.getPage())
|
||||
.thenReturn(Stream.of(GROUP_PREFIX + correctReadGroup).collect(Collectors.toList()));
|
||||
when(mockedAuthorityService.getAuthorities(
|
||||
eq(AuthorityType.GROUP),
|
||||
eq(RMAuthority.ZONE_APP_RM),
|
||||
eq(readGroupPrefix),
|
||||
eq(false),
|
||||
eq(false),
|
||||
any(PagingRequest.class)))
|
||||
.thenReturn(mockedCorrectReadPResults);
|
||||
|
||||
when(mockedCorrectWritePResults.getPage())
|
||||
.thenReturn(Stream.of(GROUP_PREFIX + correctWriteGroup).collect(Collectors.toList()));
|
||||
when(mockedAuthorityService.getAuthorities(
|
||||
eq(AuthorityType.GROUP),
|
||||
eq(RMAuthority.ZONE_APP_RM),
|
||||
eq(writeGroupPrefix),
|
||||
eq(false),
|
||||
eq(false),
|
||||
any(PagingRequest.class)))
|
||||
.thenReturn(mockedCorrectWritePResults);
|
||||
|
||||
// Don't return results for the incorrect groups (lenient as these may not be called with normalization enabled)
|
||||
PagingResults<String> mockedWrongReadPResults = mock(PagingResults.class);
|
||||
PagingResults<String> mockedWrongWritePResults = mock(PagingResults.class);
|
||||
lenient().when(mockedWrongReadPResults.getPage())
|
||||
.thenReturn(Collections.emptyList());
|
||||
lenient().when(mockedAuthorityService.getAuthorities(
|
||||
eq(AuthorityType.GROUP),
|
||||
eq(RMAuthority.ZONE_APP_RM),
|
||||
eq(wrongReadGroupPrefix),
|
||||
eq(false),
|
||||
eq(false),
|
||||
any(PagingRequest.class)))
|
||||
.thenReturn(mockedWrongReadPResults);
|
||||
|
||||
lenient().when(mockedWrongWritePResults.getPage())
|
||||
.thenReturn(Collections.emptyList());
|
||||
lenient().when(mockedAuthorityService.getAuthorities(
|
||||
eq(AuthorityType.GROUP),
|
||||
eq(RMAuthority.ZONE_APP_RM),
|
||||
eq(wrongWriteGroupPrefix),
|
||||
eq(false),
|
||||
eq(false),
|
||||
any(PagingRequest.class)))
|
||||
.thenReturn(mockedWrongWritePResults);
|
||||
|
||||
// The users do exist, despite being in a different casing and are able to be retrieved
|
||||
NodeRef noderefUser1 = new NodeRef(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE, USER);
|
||||
when(mockedAuthorityService.authorityExists(user1)).thenReturn(true);
|
||||
when(mockedAuthorityService.getAuthorityNodeRef(user1)).thenReturn(noderefUser1);
|
||||
when(mockedNodeService.getProperty(noderefUser1, ContentModel.PROP_USERNAME)).thenReturn(USER);
|
||||
|
||||
NodeRef noderefUser2 = new NodeRef(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE, USER_W);
|
||||
when(mockedAuthorityService.authorityExists(user2)).thenReturn(true);
|
||||
when(mockedAuthorityService.getAuthorityNodeRef(user2)).thenReturn(noderefUser2);
|
||||
when(mockedNodeService.getProperty(noderefUser2, ContentModel.PROP_USERNAME)).thenReturn(USER_W);
|
||||
|
||||
// Set the extended security service to normalize usernames
|
||||
extendedSecurityService.setEnableUsernameNormalization(true);
|
||||
extendedSecurityService.set(nodeRef, diffCasingReaders, diffCasingWriters);
|
||||
|
||||
// Verify that the incorrect read group is not created
|
||||
verify(mockedAuthorityService, never()).createAuthority(AuthorityType.GROUP, wrongReadGroup, wrongReadGroup, Collections.singleton(RMAuthority.ZONE_APP_RM));
|
||||
|
||||
// Verify that the incorrect write group is not created
|
||||
verify(mockedAuthorityService, never()).createAuthority(AuthorityType.GROUP, wrongWriteGroup, wrongWriteGroup, Collections.singleton(RMAuthority.ZONE_APP_RM));
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a node with no previous IPR groups assigned
|
||||
* And existing IPR groups matches existing has, but not exact match
|
||||
|
@@ -47,3 +47,4 @@ public class RMYamlUnitTest extends BaseYamlUnitTest
|
||||
validateYamlFiles(getYamlFilesList(RM_COMMUNITY_YAML_FILES_PATH));
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<build>
|
||||
|
@@ -3315,7 +3315,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
aspectNames:
|
||||
type: array
|
||||
@@ -3346,7 +3346,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
@@ -3396,7 +3396,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
@@ -3429,7 +3429,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
@@ -3484,7 +3484,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
@@ -3549,7 +3549,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
properties:
|
||||
type: object
|
||||
@@ -3578,7 +3578,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
@@ -3637,7 +3637,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
@@ -3702,7 +3702,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
properties:
|
||||
type: object
|
||||
@@ -3729,7 +3729,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
@@ -3779,7 +3779,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
@@ -3881,7 +3881,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
@@ -3967,7 +3967,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
@@ -4027,7 +4027,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
@@ -4076,7 +4076,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
@@ -4141,7 +4141,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
properties:
|
||||
type: object
|
||||
@@ -4166,7 +4166,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
@@ -4223,7 +4223,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
@@ -4830,7 +4830,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
@@ -4852,7 +4852,7 @@ definitions:
|
||||
type: string
|
||||
pattern: "^(?!(.*[\\\"\\*\\\\\\>\\<\\?\\/\\:\\|]+.*)|(.*[\\.]?.*[\\.]+$)|(.*[ ]+$))"
|
||||
description: |
|
||||
The name must not contain spaces or the following special characters: * " `<` `>` \ / ? : and |.
|
||||
The name must not contain spaces or the following special characters: * " < > \ / ? : and |.
|
||||
The character . must not be used at the end of the name.
|
||||
nodeType:
|
||||
type: string
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -8,7 +8,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-amps</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
@@ -51,8 +51,8 @@
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-lang</groupId>
|
||||
<artifactId>commons-lang</artifactId>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
|
@@ -30,7 +30,7 @@ import java.util.regex.Pattern;
|
||||
|
||||
import org.alfresco.service.cmr.site.SiteInfo;
|
||||
import org.alfresco.service.cmr.wiki.WikiPageInfo;
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.apache.commons.lang3.StringEscapeUtils;
|
||||
import org.json.simple.JSONObject;
|
||||
import org.springframework.extensions.webscripts.Cache;
|
||||
import org.springframework.extensions.webscripts.Status;
|
||||
@@ -92,7 +92,7 @@ public class WikiPageGet extends AbstractWikiWebScript
|
||||
{
|
||||
links.add(link);
|
||||
// build the list of available pages
|
||||
WikiPageInfo wikiPage = wikiService.getWikiPage(site.getShortName(), StringEscapeUtils.unescapeHtml(link));
|
||||
WikiPageInfo wikiPage = wikiService.getWikiPage(site.getShortName(), StringEscapeUtils.unescapeHtml4(link));
|
||||
if (wikiPage != null)
|
||||
{
|
||||
pageTitles.add(wikiPage.getTitle());
|
||||
|
@@ -91,6 +91,15 @@ function doclist_getAllNodes(parsedArgs, filterParams, query, totalItemCount)
|
||||
};
|
||||
}
|
||||
|
||||
function sanitizeJunkFavouriteKeys(favourites){
|
||||
for (var key in favourites) {
|
||||
if (!key || key.trim() === "") {
|
||||
delete favourites[key];
|
||||
}
|
||||
}
|
||||
return favourites;
|
||||
}
|
||||
|
||||
/**
|
||||
* Main entry point: Create collection of documents and folders in the given space
|
||||
*
|
||||
@@ -124,6 +133,28 @@ function doclist_main()
|
||||
if (logger.isLoggingEnabled())
|
||||
logger.log("doclist.lib.js - NodeRef: " + parsedArgs.nodeRef + " Query: " + query);
|
||||
|
||||
favourites = sanitizeJunkFavouriteKeys(favourites);
|
||||
|
||||
if(Object.keys(favourites).length === 0 && query === null)
|
||||
{
|
||||
return {
|
||||
luceneQuery: "",
|
||||
paging: {
|
||||
totalRecords: 0,
|
||||
startIndex: 0
|
||||
},
|
||||
container: parsedArgs.rootNode,
|
||||
parent: null,
|
||||
onlineEditing: utils.moduleInstalled("org.alfresco.module.vti"),
|
||||
itemCount: {
|
||||
folders: 0,
|
||||
documents: 0
|
||||
},
|
||||
items: [],
|
||||
customJSON: slingshotDocLib.getJSON()
|
||||
};
|
||||
}
|
||||
|
||||
var totalItemCount = filterParams.limitResults ? parseInt(filterParams.limitResults, 10) : -1;
|
||||
// For all sites documentLibrary query we pull in all available results and post filter
|
||||
if (totalItemCount === 0) totalItemCount = -1;
|
||||
|
@@ -182,11 +182,14 @@ var Filters =
|
||||
case "favourites":
|
||||
for (var favourite in favourites)
|
||||
{
|
||||
if (filterQuery)
|
||||
if (favourite && favourite.trim() !== "")
|
||||
{
|
||||
filterQuery += " OR ";
|
||||
if (filterQuery)
|
||||
{
|
||||
filterQuery += " OR ";
|
||||
}
|
||||
filterQuery += "ID:\"" + favourite + "\"";
|
||||
}
|
||||
filterQuery += "ID:\"" + favourite + "\"";
|
||||
}
|
||||
|
||||
if (filterQuery.length !== 0)
|
||||
@@ -201,7 +204,13 @@ var Filters =
|
||||
else
|
||||
{
|
||||
// empty favourites query
|
||||
filterQuery = "+ID:\"\"";
|
||||
logger.warn("No favourites found for user: " + person.properties.userName);
|
||||
return {
|
||||
query: null,
|
||||
limitResults: 0,
|
||||
sort: [],
|
||||
language: "lucene"
|
||||
};
|
||||
}
|
||||
|
||||
filterParams.query = filterQuery;
|
||||
|
@@ -23,6 +23,10 @@ function runAction(p_params)
|
||||
if (p_params.destNode.hasAspect("cm:lockable") && !p_params.destNode.hasAspect("trx:transferred"))
|
||||
{
|
||||
p_params.destNode.unlock();
|
||||
if(p_params.destNode.hasAspect("gd2:editingInGoogle"))
|
||||
{
|
||||
p_params.destNode.removeAspect("gd2:editingInGoogle");
|
||||
}
|
||||
}
|
||||
|
||||
var resultId = originalDoc.name,
|
||||
|
@@ -80,6 +80,11 @@ function runAction(p_params)
|
||||
{
|
||||
result.fileExist = true;
|
||||
}
|
||||
if (error.indexOf("FolderExistsException") != -1)
|
||||
{
|
||||
result.fileExist = true;
|
||||
result.type = "folder";
|
||||
}
|
||||
}
|
||||
|
||||
results.push(result);
|
||||
|
@@ -45,7 +45,7 @@ import org.alfresco.service.cmr.wiki.WikiPageInfo;
|
||||
import org.alfresco.service.cmr.wiki.WikiService;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.util.PropertyMap;
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.apache.commons.lang3.StringEscapeUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.json.JSONArray;
|
||||
@@ -996,7 +996,7 @@ public class WikiRestApiTest extends BaseWebScriptTest
|
||||
String link = m.group(1);
|
||||
link += "?title=<script>alert('xss');</script>";
|
||||
WikiPageInfo wikiPage2 = this.wikiService.getWikiPage(SITE_SHORT_NAME_WIKI, link);
|
||||
WikiPageInfo wikiPage1 = this.wikiService.getWikiPage(SITE_SHORT_NAME_WIKI, StringEscapeUtils.unescapeHtml(link));
|
||||
WikiPageInfo wikiPage1 = this.wikiService.getWikiPage(SITE_SHORT_NAME_WIKI, StringEscapeUtils.unescapeHtml4(link));
|
||||
assertEquals(wikiPage2, wikiPage1);
|
||||
}
|
||||
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
@@ -145,12 +145,6 @@
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.awaitility</groupId>
|
||||
<artifactId>awaitility</artifactId>
|
||||
<version>${dependency.awaitility.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2005-2025 Alfresco Software Limited.
|
||||
* Copyright (C) 2005-2024 Alfresco Software Limited.
|
||||
*
|
||||
* This file is part of Alfresco
|
||||
*
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2005-2025 Alfresco Software Limited.
|
||||
* Copyright (C) 2005-2014 Alfresco Software Limited.
|
||||
*
|
||||
* This file is part of Alfresco
|
||||
*
|
||||
@@ -18,9 +18,6 @@
|
||||
*/
|
||||
package org.alfresco.util;
|
||||
|
||||
import static org.awaitility.Awaitility.await;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
@@ -29,10 +26,11 @@ import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
/**
|
||||
* Tests for our instance of {@link java.util.concurrent.ThreadPoolExecutor}
|
||||
*
|
||||
@@ -41,8 +39,7 @@ import org.apache.commons.logging.LogFactory;
|
||||
public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
{
|
||||
|
||||
private static final Duration MAX_WAIT_TIMEOUT = Duration.ofSeconds(1);
|
||||
private static final Log logger = LogFactory.getLog(DynamicallySizedThreadPoolExecutorTest.class);
|
||||
private static Log logger = LogFactory.getLog(DynamicallySizedThreadPoolExecutorTest.class);
|
||||
private static final int DEFAULT_KEEP_ALIVE_TIME = 90;
|
||||
|
||||
@Override
|
||||
@@ -51,9 +48,9 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
SleepUntilAllWake.reset();
|
||||
}
|
||||
|
||||
public void testUpToCore()
|
||||
public void testUpToCore() throws Exception
|
||||
{
|
||||
DynamicallySizedThreadPoolExecutor exec = createInstance(5, 10, DEFAULT_KEEP_ALIVE_TIME);
|
||||
DynamicallySizedThreadPoolExecutor exec = createInstance(5,10, DEFAULT_KEEP_ALIVE_TIME);
|
||||
|
||||
assertEquals(0, exec.getPoolSize());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
@@ -66,13 +63,13 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
assertEquals(5, exec.getPoolSize());
|
||||
|
||||
SleepUntilAllWake.wakeAll();
|
||||
waitForPoolSizeEquals(exec, 5);
|
||||
Thread.sleep(100);
|
||||
assertEquals(5, exec.getPoolSize());
|
||||
}
|
||||
|
||||
public void testPastCoreButNotHugeQueue()
|
||||
public void testPastCoreButNotHugeQueue() throws Exception
|
||||
{
|
||||
DynamicallySizedThreadPoolExecutor exec = createInstance(5, 10, DEFAULT_KEEP_ALIVE_TIME);
|
||||
DynamicallySizedThreadPoolExecutor exec = createInstance(5,10, DEFAULT_KEEP_ALIVE_TIME);
|
||||
|
||||
assertEquals(0, exec.getPoolSize());
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
@@ -99,13 +96,13 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
assertEquals(7, exec.getQueue().size());
|
||||
|
||||
SleepUntilAllWake.wakeAll();
|
||||
waitForPoolSizeEquals(exec, 5);
|
||||
Thread.sleep(100);
|
||||
assertEquals(5, exec.getPoolSize());
|
||||
}
|
||||
|
||||
public void testToExpandQueue() throws Exception
|
||||
{
|
||||
DynamicallySizedThreadPoolExecutor exec = createInstance(2, 4, 5);
|
||||
DynamicallySizedThreadPoolExecutor exec = createInstance(2,4,1);
|
||||
|
||||
assertEquals(0, exec.getPoolSize());
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
@@ -122,13 +119,13 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
|
||||
// Next should add one
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
waitForPoolSizeEquals(exec, 3); // Let the new thread spin up
|
||||
Thread.sleep(20); // Let the new thread spin up
|
||||
assertEquals(3, exec.getPoolSize());
|
||||
assertEquals(3, exec.getQueue().size());
|
||||
|
||||
// And again
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
waitForPoolSizeEquals(exec, 4); // Let the new thread spin up
|
||||
Thread.sleep(20); // Let the new thread spin up
|
||||
assertEquals(4, exec.getPoolSize());
|
||||
assertEquals(3, exec.getQueue().size());
|
||||
|
||||
@@ -142,10 +139,139 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
SleepUntilAllWake.wakeAll();
|
||||
Thread.sleep(100);
|
||||
|
||||
// All threads still running, as 5 second timeout
|
||||
// All threads still running, as 1 second timeout
|
||||
assertEquals(4, exec.getPoolSize());
|
||||
}
|
||||
|
||||
public void offTestToExpandThenContract() throws Exception
|
||||
{
|
||||
DynamicallySizedThreadPoolExecutor exec = createInstance(2,4,1);
|
||||
exec.setKeepAliveTime(30, TimeUnit.MILLISECONDS);
|
||||
|
||||
assertEquals(0, exec.getPoolSize());
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
assertEquals(2, exec.getPoolSize());
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
assertEquals(2, exec.getPoolSize());
|
||||
assertEquals(3, exec.getQueue().size());
|
||||
|
||||
// Next should add one
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
Thread.sleep(20); // Let the new thread spin up
|
||||
assertEquals(3, exec.getPoolSize());
|
||||
assertEquals(3, exec.getQueue().size());
|
||||
|
||||
// And again
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
Thread.sleep(20); // Let the new thread spin up
|
||||
assertEquals(4, exec.getPoolSize());
|
||||
assertEquals(3, exec.getQueue().size());
|
||||
|
||||
// But no more will be added, as we're at max
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
assertEquals(4, exec.getPoolSize());
|
||||
assertEquals(6, exec.getQueue().size());
|
||||
|
||||
SleepUntilAllWake.wakeAll();
|
||||
Thread.sleep(100);
|
||||
|
||||
// Wait longer than the timeout without any work, which should
|
||||
// let all the extra threads go away
|
||||
// (Depending on how closely your JVM follows the specification,
|
||||
// we may fall back to the core size which is correct, or we
|
||||
// may go to zero which is wrong, but hey, it's the JVM...)
|
||||
logger.debug("Core pool size is " + exec.getCorePoolSize());
|
||||
logger.debug("Current pool size is " + exec.getPoolSize());
|
||||
logger.debug("Queue size is " + exec.getQueue().size());
|
||||
assertTrue(
|
||||
"Pool size should be 0-2 as everything is idle, was " + exec.getPoolSize(),
|
||||
exec.getPoolSize() >= 0
|
||||
);
|
||||
assertTrue(
|
||||
"Pool size should be 0-2 as everything is idle, was " + exec.getPoolSize(),
|
||||
exec.getPoolSize() <= 2
|
||||
);
|
||||
|
||||
SleepUntilAllWake.reset();
|
||||
|
||||
// Add 2 new jobs, will stay/ go to at 2 threads
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
|
||||
// Let the idle threads grab them, then check
|
||||
Thread.sleep(20);
|
||||
assertEquals(2, exec.getPoolSize());
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
|
||||
// 3 more, still at 2 threads
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
assertEquals(2, exec.getPoolSize());
|
||||
assertEquals(3, exec.getQueue().size());
|
||||
|
||||
// And again wait for it all
|
||||
SleepUntilAllWake.wakeAll();
|
||||
Thread.sleep(100);
|
||||
assertEquals(2, exec.getPoolSize());
|
||||
|
||||
|
||||
// Now decrease the overall pool size
|
||||
// Will rise and fall to there now
|
||||
exec.setCorePoolSize(1);
|
||||
|
||||
// Run a quick job, to ensure that the
|
||||
// "can I kill one yet" logic is applied
|
||||
SleepUntilAllWake.reset();
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
SleepUntilAllWake.wakeAll();
|
||||
|
||||
Thread.sleep(100);
|
||||
assertEquals(1, exec.getPoolSize());
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
|
||||
SleepUntilAllWake.reset();
|
||||
|
||||
|
||||
// Push enough on to go up to 4 active threads
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
|
||||
Thread.sleep(20); // Let the new threads spin up
|
||||
assertEquals(4, exec.getPoolSize());
|
||||
assertEquals(6, exec.getQueue().size());
|
||||
|
||||
// Wait for them all to finish, should drop back to 1 now
|
||||
// (Or zero, if your JVM can't read the specification...)
|
||||
SleepUntilAllWake.wakeAll();
|
||||
Thread.sleep(100);
|
||||
assertTrue(
|
||||
"Pool size should be 0 or 1 as everything is idle, was " + exec.getPoolSize(),
|
||||
exec.getPoolSize() >= 0
|
||||
);
|
||||
assertTrue(
|
||||
"Pool size should be 0 or 1 as everything is idle, was " + exec.getPoolSize(),
|
||||
exec.getPoolSize() <= 1
|
||||
);
|
||||
}
|
||||
|
||||
private DynamicallySizedThreadPoolExecutor createInstance(int corePoolSize, int maximumPoolSize, int keepAliveTime)
|
||||
{
|
||||
// We need a thread factory
|
||||
@@ -165,11 +291,6 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
new ThreadPoolExecutor.CallerRunsPolicy());
|
||||
}
|
||||
|
||||
private void waitForPoolSizeEquals(DynamicallySizedThreadPoolExecutor exec, int expectedSize)
|
||||
{
|
||||
await().atMost(MAX_WAIT_TIMEOUT).until(() -> exec.getPoolSize() == expectedSize);
|
||||
}
|
||||
|
||||
public static class SleepUntilAllWake implements Runnable
|
||||
{
|
||||
private static ConcurrentMap<String, Thread> sleeping = new ConcurrentHashMap<String, Thread>();
|
||||
@@ -178,18 +299,17 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
@Override
|
||||
public void run()
|
||||
{
|
||||
if (allAwake)
|
||||
return;
|
||||
if(allAwake) return;
|
||||
|
||||
// Track us, and wait for the bang
|
||||
logger.debug("Adding thread: " + Thread.currentThread().getName());
|
||||
sleeping.put(Thread.currentThread().getName(), Thread.currentThread());
|
||||
try
|
||||
{
|
||||
Thread.sleep(30 * 1000);
|
||||
Thread.sleep(30*1000);
|
||||
System.err.println("Warning - Thread finished sleeping without wake!");
|
||||
}
|
||||
catch (InterruptedException e)
|
||||
catch(InterruptedException e)
|
||||
{
|
||||
logger.debug("Interrupted thread: " + Thread.currentThread().getName());
|
||||
}
|
||||
@@ -198,13 +318,12 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
public static void wakeAll()
|
||||
{
|
||||
allAwake = true;
|
||||
for (Entry<String, Thread> t : sleeping.entrySet())
|
||||
for(Entry<String, Thread> t : sleeping.entrySet())
|
||||
{
|
||||
logger.debug("Interrupting thread: " + t.getKey());
|
||||
t.getValue().interrupt();
|
||||
}
|
||||
}
|
||||
|
||||
public static void reset()
|
||||
{
|
||||
logger.debug("Resetting.");
|
||||
|
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2005-2025 Alfresco Software Limited.
|
||||
* Copyright (C) 2005-2023 Alfresco Software Limited.
|
||||
*
|
||||
* This file is part of Alfresco
|
||||
*
|
||||
@@ -20,11 +20,13 @@ package org.alfresco.util.transaction;
|
||||
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
|
||||
import jakarta.transaction.RollbackException;
|
||||
import jakarta.transaction.Status;
|
||||
import jakarta.transaction.UserTransaction;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.springframework.transaction.CannotCreateTransactionException;
|
||||
import org.springframework.transaction.NoTransactionException;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
@@ -33,8 +35,9 @@ import org.springframework.transaction.support.AbstractPlatformTransactionManage
|
||||
import org.springframework.transaction.support.DefaultTransactionStatus;
|
||||
|
||||
/**
|
||||
* @author Derek Hulley
|
||||
* @see org.alfresco.util.transaction.SpringAwareUserTransaction
|
||||
*
|
||||
* @author Derek Hulley
|
||||
*/
|
||||
public class SpringAwareUserTransactionTest extends TestCase
|
||||
{
|
||||
@@ -242,6 +245,58 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
checkNoStatusOnThread();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test for leaked transactions (no guarantee it will succeed due to reliance
|
||||
* on garbage collector), so disabled by default.
|
||||
*
|
||||
* Also, if it succeeds, transaction call stack tracing will be enabled
|
||||
* potentially hitting the performance of all subsequent tests.
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
public void xtestLeakedTransactionLogging() throws Exception
|
||||
{
|
||||
assertFalse(SpringAwareUserTransaction.isCallStackTraced());
|
||||
|
||||
TrxThread t1 = new TrxThread();
|
||||
t1.start();
|
||||
System.gc();
|
||||
Thread.sleep(1000);
|
||||
|
||||
TrxThread t2 = new TrxThread();
|
||||
t2.start();
|
||||
System.gc();
|
||||
Thread.sleep(1000);
|
||||
|
||||
assertTrue(SpringAwareUserTransaction.isCallStackTraced());
|
||||
|
||||
TrxThread t3 = new TrxThread();
|
||||
t3.start();
|
||||
System.gc();
|
||||
Thread.sleep(3000);
|
||||
System.gc();
|
||||
Thread.sleep(3000);
|
||||
}
|
||||
|
||||
private class TrxThread extends Thread
|
||||
{
|
||||
public void run()
|
||||
{
|
||||
try
|
||||
{
|
||||
getTrx();
|
||||
}
|
||||
catch (Exception e) {}
|
||||
}
|
||||
|
||||
public void getTrx() throws Exception
|
||||
{
|
||||
UserTransaction txn = getTxn();
|
||||
txn.begin();
|
||||
txn = null;
|
||||
}
|
||||
}
|
||||
|
||||
public void testConnectionPoolException() throws Exception
|
||||
{
|
||||
testNoTxnStatus();
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
@@ -118,18 +118,6 @@
|
||||
<groupId>org.jibx</groupId>
|
||||
<artifactId>jibx-run</artifactId>
|
||||
<version>1.4.2</version>
|
||||
<exclusions>
|
||||
<!-- [ACS-5371] Excluded to avoid conflict in JDK9+ as it includes javax.xml-->
|
||||
<exclusion>
|
||||
<groupId>xpp3</groupId>
|
||||
<artifactId>xpp3</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.sf.kxml</groupId>
|
||||
<artifactId>kxml2</artifactId>
|
||||
<version>${dependency.kxml2.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Data model classes
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@@ -9,6 +9,6 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
</project>
|
||||
|
@@ -37,7 +37,6 @@ commons-fileupload http://jakarta.apache.org/commons/
|
||||
commons-httpclient http://jakarta.apache.org/commons/
|
||||
commons-io http://jakarta.apache.org/commons/
|
||||
commons-jxpath http://jakarta.apache.org/commons/
|
||||
commons-lang http://jakarta.apache.org/commons/
|
||||
commons-lang3 http://jakarta.apache.org/commons/
|
||||
commons-logging http://jakarta.apache.org/commons/
|
||||
commons-net http://jakarta.apache.org/commons/
|
||||
@@ -146,7 +145,6 @@ libgif http://giflib.sourceforge.net/
|
||||
libfreetype http://www.freetype.org/
|
||||
PostgreSQL http://www.postgresql.org/
|
||||
PostgreSQL JDBC Driver http://www.postgresql.org/
|
||||
kXML 2 http://kxml.sourceforge.net/
|
||||
|
||||
|
||||
=== CDDL 1.0 ===
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# More infos about this image: https://github.com/Alfresco/alfresco-docker-base-tomcat
|
||||
FROM alfresco/alfresco-base-tomcat:tomcat10-jre17-rockylinux9@sha256:9622418e142fb4fe1c5320666ad61ea292bc5c98f3dd0b550b6add33d18f659f
|
||||
FROM alfresco/alfresco-base-tomcat:tomcat10-jre17-rockylinux9@sha256:395664f9d9be0c9f73d3b722a58fd559ee7231609b263dfe19502617652740e3
|
||||
|
||||
# Set default docker_context.
|
||||
ARG resource_path=target
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -1,3 +1,3 @@
|
||||
SOLR6_TAG=2.0.15
|
||||
POSTGRES_TAG=16.6
|
||||
SOLR6_TAG=2.0.13
|
||||
POSTGRES_TAG=15.4
|
||||
ACTIVEMQ_TAG=5.18.3-jre17-rockylinux8
|
||||
|
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<organization>
|
||||
|
@@ -16,7 +16,7 @@ import org.alfresco.utility.testrail.annotation.TestRail;
|
||||
import org.apache.chemistry.opencmis.commons.exceptions.CmisObjectNotFoundException;
|
||||
import org.apache.chemistry.opencmis.commons.exceptions.CmisPermissionDeniedException;
|
||||
import org.apache.chemistry.opencmis.commons.exceptions.CmisUnauthorizedException;
|
||||
import org.apache.commons.lang.time.DateUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -8,7 +8,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>25.1.2.1</version>
|
||||
<version>23.6.0.21</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
@@ -17,7 +17,7 @@
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<rest.api.explorer.branch>master</rest.api.explorer.branch>
|
||||
<httpclient-osgi-version>4.5.6</httpclient-osgi-version>
|
||||
<commons-lang3.version>3.17.0</commons-lang3.version>
|
||||
<commons-lang3.version>3.18.0</commons-lang3.version>
|
||||
<scribejava-apis.version>8.3.3</scribejava-apis.version>
|
||||
<java.version>17</java.version>
|
||||
</properties>
|
||||
@@ -171,7 +171,7 @@
|
||||
<dependency>
|
||||
<groupId>org.codehaus.groovy</groupId>
|
||||
<artifactId>groovy</artifactId>
|
||||
<version>3.0.23</version>
|
||||
<version>3.0.22</version>
|
||||
</dependency>
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/org.codehaus.groovy/groovy-json-->
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* alfresco-tas-restapi
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2024 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user