Compare commits

...

91 Commits

Author SHA1 Message Date
Giovanni Toraldo
a99e5cdde9 split failing job in two separate jobs 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
7bc6dd142b test core without build-cache but latest maven 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
6cbd2d7d50 [skip ci] remove push branch 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
1786cbec99 add the new input [db] 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
e16a53a23e rebuild 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
21fb85f2a9 drop debug 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
7dfd41997a ssh debug 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
6374aa30d6 Revert "build with maven daemon"
This reverts commit 7f70aa3d61f72a5538346329736805e4eabc4782.
2023-10-31 12:20:15 +01:00
Giovanni Toraldo
ed8bdc87da build with maven daemon 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
5b8d758947 build 2023-10-31 12:20:14 +01:00
Giovanni Toraldo
ed3f170d05 use custom setup-java-build 2023-10-31 12:20:14 +01:00
alfresco-build
8d51e9885a [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-30 18:33:51 +00:00
alfresco-build
a61faaece5 [maven-release-plugin][skip ci] prepare release 23.1.0.255 2023-10-30 18:33:47 +00:00
Maciej Pichura
30de66257f ACS-6143: Bump api-explorer version [skip tests] (#2279)
`UpdateRecordsTests` failure is unrelated.
2023-10-30 19:29:24 +01:00
Maciej Pichura
ae1f955cc2 ACS-6234: Temporarily disabling flaky test. (#2280)
UpdateRecordsTests failure is intermittent and unrelated.
2023-10-30 18:31:13 +01:00
rrajoria
452db9a963 Update googledrive and AOS GA-Version 2023-10-30 19:16:26 +05:30
mstrankowski
4eeb7feb74 ACS-6094: Update Transform Core and Transform Service versions 2023-10-30 14:11:51 +01:00
alfresco-build
5aa8c37c53 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-29 00:06:33 +00:00
alfresco-build
aafdd3c46a [maven-release-plugin][skip ci] prepare release 23.1.0.254 2023-10-29 00:06:30 +00:00
Alfresco CI User
f73cf70cbb [force] Force release for 2023-10-29. 2023-10-29 00:03:13 +00:00
alfresco-build
dd0e0626bd [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-23 10:05:09 +00:00
alfresco-build
e4a5c1a38e [maven-release-plugin][skip ci] prepare release 23.1.0.253 2023-10-23 10:05:07 +00:00
rrajoria
c698ed1d6d Update google drive and AOS Version 2023-10-23 14:20:00 +05:30
alfresco-build
2632486e0f [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-22 00:07:16 +00:00
alfresco-build
ea24992b57 [maven-release-plugin][skip ci] prepare release 23.1.0.252 2023-10-22 00:07:14 +00:00
Alfresco CI User
e6b35b7f66 [force] Force release for 2023-10-22. 2023-10-22 00:03:20 +00:00
alfresco-build
0aa5fd7f59 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-21 09:26:41 +00:00
alfresco-build
55862fc394 [maven-release-plugin][skip ci] prepare release 23.1.0.251 2023-10-21 09:26:39 +00:00
mstrankowski
f942c7b9df Update Transform Core to 5.0.0-A5, Transform Service to 4.0.0-A12 2023-10-21 10:41:39 +02:00
alfresco-build
585111602f [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-20 20:16:20 +00:00
alfresco-build
473942f3ba [maven-release-plugin][skip ci] prepare release 23.1.0.250 2023-10-20 20:16:17 +00:00
Maciej Pichura
99905d349b ACS-6142: update api-explorer to 23.1.0-A2 (#2262) 2023-10-20 21:29:33 +02:00
alfresco-build
4f1efa183c [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-20 19:23:38 +00:00
alfresco-build
141c5f3b34 [maven-release-plugin][skip ci] prepare release 23.1.0.249 2023-10-20 19:23:35 +00:00
Krystian Dabrowski
7c863be25e ACS-6075: Highlight snippet control parameters (#2261)
- added snippetCount, fragmentSize and mergeContiguous to highlighting field model
2023-10-20 15:52:46 +02:00
alfresco-build
ad6354bd32 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-18 13:27:37 +00:00
alfresco-build
30e191a8cd [maven-release-plugin][skip ci] prepare release 23.1.0.248 2023-10-18 13:27:34 +00:00
dependabot[bot]
b2bcfd72c1 Bump org.json:json from 20230618 to 20231013 (#2252)
Bumps [org.json:json](https://github.com/douglascrockford/JSON-java) from 20230618 to 20231013.
- [Release notes](https://github.com/douglascrockford/JSON-java/releases)
- [Changelog](https://github.com/stleary/JSON-java/blob/master/docs/RELEASES.md)
- [Commits](https://github.com/douglascrockford/JSON-java/commits)

---
updated-dependencies:
- dependency-name: org.json:json
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-10-18 14:35:09 +02:00
alfresco-build
698ca01778 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-16 12:23:19 +00:00
alfresco-build
e16a0820ee [maven-release-plugin][skip ci] prepare release 23.1.0.247 2023-10-16 12:23:16 +00:00
Manish Kumar
1f99216d37 [MNT-23933] Fixed Null Pointer Exception (#2253) 2023-10-16 16:44:43 +05:30
alfresco-build
3c60415ea0 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-15 00:08:05 +00:00
alfresco-build
e749ac6478 [maven-release-plugin][skip ci] prepare release 23.1.0.246 2023-10-15 00:08:02 +00:00
Alfresco CI User
6cdcf7928a [force] Force release for 2023-10-15. 2023-10-15 00:03:18 +00:00
alfresco-build
542230764d [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-13 13:32:06 +00:00
alfresco-build
878cd3ceee [maven-release-plugin][skip ci] prepare release 23.1.0.245 2023-10-13 13:32:03 +00:00
Piotr Żurek
582fc8ec2d ACS-6121 MNT-24007 Use issuer URI from the IdP (#2250) 2023-10-13 14:48:35 +02:00
alfresco-build
53c99a0ba4 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-13 12:14:08 +00:00
alfresco-build
07cd283a1e [maven-release-plugin][skip ci] prepare release 23.1.0.244 2023-10-13 12:14:06 +00:00
Domenico Sibilio
f7a4da0ba5 Free up GHA runner disk space [db] (#2249) 2023-10-13 13:28:07 +02:00
alfresco-build
c344f7ab1a [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-13 10:19:34 +00:00
alfresco-build
afe100097e [maven-release-plugin][skip ci] prepare release 23.1.0.243 2023-10-13 10:19:31 +00:00
Manish Kumar
2cc0137be3 [MNT-23896] run handleClientAuth only when this.enforce variable is true (#2248) 2023-10-13 14:25:49 +05:30
alfresco-build
3e91bf6739 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-12 13:23:31 +00:00
alfresco-build
4b77b77013 [maven-release-plugin][skip ci] prepare release 23.1.0.242 2023-10-12 13:23:28 +00:00
Kacper Magdziarz
7a84e4d5f1 [ACS-6116] Leftovers removal (#2246) 2023-10-12 14:37:56 +02:00
alfresco-build
eedb601320 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-12 11:23:11 +00:00
alfresco-build
536ac35aab [maven-release-plugin][skip ci] prepare release 23.1.0.241 2023-10-12 11:23:07 +00:00
Krystian Dabrowski
a61d5a407e ACS-6070: Support for highlighting prefix and postfix (#2245)
* ACS-6070: Support for highlighting prefix and postfix
- added prefix and postfix to field model
2023-10-12 12:01:28 +02:00
alfresco-build
e8c9c9aef5 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-10 11:23:18 +00:00
alfresco-build
6b832aecd1 [maven-release-plugin][skip ci] prepare release 23.1.0.240 2023-10-10 11:23:16 +00:00
Marcin Strankowski
eebacd0a5f MNT-23891: Change configuration for UpgradePasswordHashJob to one recommended, it has been tested locally and indeed runs proper code. A wrong class was called for the good parameters given, probably a copy/paste typo. (#2234) 2023-10-10 12:35:55 +02:00
alfresco-build
6eff1e1219 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-09 11:48:26 +00:00
alfresco-build
e52aaa6b8d [maven-release-plugin][skip ci] prepare release 23.1.0.239 2023-10-09 11:48:23 +00:00
Piotr Żurek
fb78a5fe41 ACS-6103 Upgrade PostgreSQL version (#2233) 2023-10-09 13:01:35 +02:00
alfresco-build
7b4c420f3e [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-09 09:52:01 +00:00
alfresco-build
507a617c51 [maven-release-plugin][skip ci] prepare release 23.1.0.238 2023-10-09 09:51:57 +00:00
Manish Kumar
637cdd4f3b [ACS-4778] Added usr:user nodeType for exclusion in property file (#2232) 2023-10-09 14:39:18 +05:30
alfresco-build
8959db9017 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-08 00:07:00 +00:00
alfresco-build
9032e1cd69 [maven-release-plugin][skip ci] prepare release 23.1.0.237 2023-10-08 00:06:57 +00:00
Alfresco CI User
32f33c04b2 [force] Force release for 2023-10-08. 2023-10-08 00:03:18 +00:00
alfresco-build
ce3b4f5f0c [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-06 14:21:47 +00:00
alfresco-build
9fd4efcec7 [maven-release-plugin][skip ci] prepare release 23.1.0.236 2023-10-06 14:21:45 +00:00
Krystian Dabrowski
395d7ded57 ACS-5471: Secondary path support (#2213)
* ACS-5471: Secondary path support

* ACS-5471: Secondary path support

* ACS-5471: Secondary path support

* ACS-5471: Secondary path support

* ACS-5471: Secondary path support

* ACS-5471: Secondary path support

* ACS-5471: Secondary path support

* ACS-5471: Secondary path support
- fixed test method name

* ACS-5471: Secondary path support
- trying to fix failing on CI DispositionScheduleLinkedRecordsTest

* ACS-5471: Secondary path support
- trying to fix failing on CI DispositionScheduleLinkedRecordsTest

* ACS-5471: Secondary path support
- trying to fix failing CI due to DispositionScheduleLinkedRecordsTest

* ACS-5471: Test adjustment to follow same behavior as introduced by ACS-5325

* ACS-5471: Fixing docker issues

---------

Co-authored-by: mpichura <maciej.pichura@hyland.com>
2023-10-06 15:36:46 +02:00
alfresco-build
c157780dcb [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-04 09:43:37 +00:00
alfresco-build
63be57cafe [maven-release-plugin][skip ci] prepare release 23.1.0.235 2023-10-04 09:43:33 +00:00
Wojtek Świętoń
64dad4fc89 ACS-5830 Bump alfresco-transform-service to latest 4.0.0-A3 2023-10-04 10:56:31 +02:00
alfresco-build
2f7db5f0ee [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-03 12:40:44 +00:00
alfresco-build
61ff6dafe8 [maven-release-plugin][skip ci] prepare release 23.1.0.234 2023-10-03 12:40:40 +00:00
Damian Ujma
456adc2aa2 ACS-5325 Invoke required policy (#2215)
---------

Co-authored-by: Domenico Sibilio <domenicosibilio@gmail.com>
2023-10-03 13:28:15 +02:00
HylandAditya
daf573e24a Merge pull request #2221 from Alfresco/dependabot/maven/commons-io-commons-io-2.14.0
Bump commons-io:commons-io from 2.13.0 to 2.14.0
2023-10-03 14:17:25 +05:30
alfresco-build
d46fbdcf4c [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-01 00:07:27 +00:00
alfresco-build
70f3982b56 [maven-release-plugin][skip ci] prepare release 23.1.0.233 2023-10-01 00:07:24 +00:00
Alfresco CI User
196817cd77 [force] Force release for 2023-10-01. 2023-10-01 00:03:21 +00:00
dependabot[bot]
58b0075a68 Bump commons-io:commons-io from 2.13.0 to 2.14.0
Bumps commons-io:commons-io from 2.13.0 to 2.14.0.

---
updated-dependencies:
- dependency-name: commons-io:commons-io
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-09-29 22:02:29 +00:00
alfresco-build
d616226918 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-29 14:21:51 +00:00
alfresco-build
c3dadf6bbf [maven-release-plugin][skip ci] prepare release 23.1.0.232 2023-09-29 14:21:48 +00:00
Krystian Dabrowski
a973e17a86 ACS-6073: DispositionScheduleLinkedRecordsTest - tests fail over and over again (#2220)
* ACS-6073: DispositionScheduleLinkedRecordsTest - tests suddenly started to fail over and over again
- disabling failing tests
2023-09-29 15:33:19 +02:00
Wojtek Świętoń
86d22ccd8e ACS-5830 Bump alfresco-transform-core.version to 5.0.0-A3 2023-09-29 12:59:39 +02:00
Tom Page
e0a1defb80 Update ya-pmd-scan to 2.0.5. 2023-09-28 10:32:34 +01:00
alfresco-build
eebd110c34 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-27 13:08:20 +00:00
53 changed files with 1029 additions and 370 deletions

View File

@@ -36,8 +36,11 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Prepare maven cache and check compilation"
@@ -55,11 +58,12 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
- name: "Init"
run: bash ./scripts/ci/init.sh
- uses: Alfresco/alfresco-build-tools/.github/actions/veracode@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/veracode@v1.35.2
continue-on-error: true
with:
srcclr-api-token: ${{ secrets.SRCCLR_API_TOKEN }}
@@ -76,10 +80,10 @@ jobs:
!contains(github.event.head_commit.message, '[skip tests]') &&
!contains(github.event.head_commit.message, '[force]')
steps:
- uses: Alfresco/ya-pmd-scan@v2.0.4
- uses: Alfresco/ya-pmd-scan@v2.0.5
all_unit_tests_suite:
name: "Core, Data-Model, Repository - AllUnitTestsSuite - Build and test"
core_datamodel_tests:
name: "Core, Data-Model - Build and test"
runs-on: ubuntu-latest
needs: [prepare]
if: >
@@ -88,14 +92,37 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run tests"
run: |
mvn -B test -pl core,data-model -am -DfailIfNoTests=false
mvn -B test -pl "repository,mmt" -am "-Dtest=AllUnitTestsSuite,AllMmtUnitTestSuite" -DfailIfNoTests=false
run: mvn -B test -pl core,data-model -am -DfailIfNoTests=false
- name: "Clean Maven cache"
run: bash ./scripts/ci/cleanup_cache.sh
all_unit_tests_suite:
name: "Repository - AllUnitTestsSuite - Build and test"
runs-on: ubuntu-latest
needs: [prepare]
if: >
!contains(github.event.head_commit.message, '[skip repo]') &&
!contains(github.event.head_commit.message, '[skip tests]') &&
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run tests"
run: mvn -B test -pl repository,mmt -am -Dtest=AllUnitTestsSuite,AllMmtUnitTestSuite -DfailIfNoTests=false
- name: "Clean Maven cache"
run: bash ./scripts/ci/cleanup_cache.sh
@@ -125,8 +152,11 @@ jobs:
REQUIRES_INSTALLED_ARTIFACTS: true
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -155,11 +185,14 @@ jobs:
strategy:
fail-fast: false
matrix:
version: ['10.2.18', '10.4', '10.5']
version: ["10.2.18", "10.4", "10.5"]
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: Run MariaDB ${{ matrix.version }} database
@@ -184,8 +217,11 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run MariaDB 10.6 database"
@@ -210,8 +246,11 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run MySQL 8 database"
@@ -223,8 +262,8 @@ jobs:
- name: "Clean Maven cache"
run: bash ./scripts/ci/cleanup_cache.sh
repository_postgresql_13_7_tests:
name: "Repository - PostgreSQL 13.7 tests"
repository_postgresql_13_12_tests:
name: "Repository - PostgreSQL 13.12 tests"
runs-on: ubuntu-latest
needs: [prepare]
if: >
@@ -235,21 +274,52 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run PostgreSQL 13.7 database"
- name: "Run PostgreSQL 13.12 database"
run: docker-compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile postgres up -d
env:
POSTGRES_VERSION: 13.7
POSTGRES_VERSION: 13.12
- name: "Run tests"
run: mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Clean Maven cache"
run: bash ./scripts/ci/cleanup_cache.sh
repository_postgresql_14_4_tests:
name: "Repository - PostgreSQL 14.4 tests"
repository_postgresql_14_9_tests:
name: "Repository - PostgreSQL 14.9 tests"
runs-on: ubuntu-latest
needs: [prepare]
if: >
(((github.ref_name == 'master' || startsWith(github.ref_name, 'release/')) && github.event_name != 'pull_request' &&
!contains(github.event.head_commit.message, '[skip db]')) ||
contains(github.event.head_commit.message, '[db]')) &&
!contains(github.event.head_commit.message, '[skip tests]') &&
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run PostgreSQL 14.9 database"
run: docker-compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile postgres up -d
env:
POSTGRES_VERSION: 14.9
- name: "Run tests"
run: mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Clean Maven cache"
run: bash ./scripts/ci/cleanup_cache.sh
repository_postgresql_15_4_tests:
name: "Repository - PostgreSQL 15.4 tests"
runs-on: ubuntu-latest
needs: [prepare]
if: >
@@ -260,14 +330,17 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run PostgreSQL 14.4 database"
- name: "Run PostgreSQL 15.4 database"
run: docker-compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile postgres up -d
env:
POSTGRES_VERSION: 14.4
POSTGRES_VERSION: 15.4
- name: "Run tests"
run: mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Clean Maven cache"
@@ -283,8 +356,11 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run ActiveMQ"
@@ -325,16 +401,19 @@ jobs:
compose-profile: with-transform-core-aio
- testSuite: SearchTestSuite
compose-profile: default
mvn-options: '-Dindex.subsystem.name=solr6'
mvn-options: "-Dindex.subsystem.name=solr6"
- testSuite: MTLSTestSuite
compose-profile: with-mtls-transform-core-aio
mtls: true
disabledHostnameVerification: false
mvn-options: '-Dencryption.ssl.keystore.location=${CI_WORKSPACE}/keystores/alfresco/alfresco.keystore -Dencryption.ssl.truststore.location=${CI_WORKSPACE}/keystores/alfresco/alfresco.truststore'
mvn-options: "-Dencryption.ssl.keystore.location=${CI_WORKSPACE}/keystores/alfresco/alfresco.keystore -Dencryption.ssl.truststore.location=${CI_WORKSPACE}/keystores/alfresco/alfresco.truststore"
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Set transformers tag"
@@ -403,8 +482,11 @@ jobs:
REQUIRES_LOCAL_IMAGES: true
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -440,11 +522,14 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run Postgres 14.4 database"
- name: "Run Postgres 15.4 database"
run: docker-compose -f ./scripts/ci/docker-compose/docker-compose.yaml --profile postgres up -d
- name: "Run tests"
run: mvn -B test -pl :alfresco-share-services -am -Dtest=ShareServicesTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
@@ -469,8 +554,11 @@ jobs:
REQUIRES_INSTALLED_ARTIFACTS: true
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -500,8 +588,11 @@ jobs:
REQUIRES_INSTALLED_ARTIFACTS: true
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -527,8 +618,11 @@ jobs:
REQUIRES_LOCAL_IMAGES: true
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -563,7 +657,7 @@ jobs:
ags_start_api_explorer:
name: "Test Tomcat deployment of api explorer"
runs-on: ubuntu-latest
needs: [ prepare ]
needs: [prepare]
if: >
(((github.ref_name == 'master' || startsWith(github.ref_name, 'release/') || github.event_name == 'pull_request' ) &&
!contains(github.event.head_commit.message, '[skip ags]')) ||
@@ -572,8 +666,11 @@ jobs:
!contains(github.event.head_commit.message, '[force]')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |

View File

@@ -34,11 +34,12 @@ jobs:
- uses: actions/checkout@v3
with:
persist-credentials: false
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.35.2
- name: "Init"
run: bash ./scripts/ci/init.sh
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v1.35.2
with:
username: ${{ env.GIT_USERNAME }}
email: ${{ env.GIT_EMAIL }}
@@ -62,11 +63,12 @@ jobs:
- uses: actions/checkout@v3
with:
persist-credentials: false
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.35.2
- name: "Init"
run: bash ./scripts/ci/init.sh
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v1.35.2
with:
username: ${{ env.GIT_USERNAME }}
email: ${{ env.GIT_EMAIL }}

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-automation-community-repo</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<build>

View File

@@ -135,7 +135,7 @@ public class DispositionScheduleLinkedRecordsTest extends BaseRMRestTest {
* <p>
* <p/> TestRail Test C775<p/>
**/
@Test
@Test(enabled = false) // temporary disabled, see ACS-6073
@AlfrescoTest(jira = "RM-1622")
public void dispositionScheduleLinkedRecords() throws UnsupportedEncodingException {
STEP("Create record category");
@@ -202,7 +202,7 @@ public class DispositionScheduleLinkedRecordsTest extends BaseRMRestTest {
* Check the disposition steps for a record can be executed
* When the record is linked to a folder with the same disposition schedule
* */
@Test
@Test(enabled = false) // temporary disabled, see ACS-6073
@AlfrescoTest (jira = "RM-3060")
public void sameDispositionScheduleLinkedRecords() throws UnsupportedEncodingException {

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -1,3 +1,3 @@
SOLR6_TAG=2.0.8.1
POSTGRES_TAG=14.4
POSTGRES_TAG=15.4
ACTIVEMQ_TAG=5.18.2-jre17-rockylinux8

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<properties>
@@ -416,9 +416,7 @@
<configuration>
<images>
<image>
<!-- TODO upgrade this old postgres version -->
<name>postgres:9.4.12</name>
<!--<name>postgres:13.3</name>-->
<name>postgres:15.4</name>
<run>
<ports>
<port>${postgresql.tests.port}:${postgresql.port}</port>

View File

@@ -61,6 +61,7 @@ public class RFC822MetadataExtracter extends org.alfresco.repo.content.metadata.
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
super.setNodeService(nodeService);
}
/**

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<build>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -74,8 +74,6 @@ public abstract class X509ServletFilterBase implements Filter
logger.debug("Initializing X509ServletFilter");
}
this.handleClientAuth();
this.enforce = checkEnforce(config.getServletContext());
if(logger.isDebugEnabled())
@@ -85,6 +83,8 @@ public abstract class X509ServletFilterBase implements Filter
if (this.enforce)
{
this.handleClientAuth();
/*
* We are enforcing so get the cert-contains string.
*/

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -9,6 +9,6 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
</project>

View File

@@ -98,4 +98,4 @@ EXPOSE 10001
# For remote debug
EXPOSE 8000
USER ${IMAGEUSERNAME}
USER ${IMAGEUSERNAME}

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -1,3 +1,3 @@
SOLR6_TAG=2.0.8.1
POSTGRES_TAG=14.4
POSTGRES_TAG=15.4
ACTIVEMQ_TAG=5.18.2-jre17-rockylinux8

View File

@@ -6,7 +6,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<organization>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -35,6 +35,7 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.List;
import java.util.stream.Stream;
import io.restassured.http.ContentType;
import org.alfresco.rest.core.JsonBodyGenerator;
@@ -51,9 +52,11 @@ import org.alfresco.rest.model.RestCommentModelsCollection;
import org.alfresco.rest.model.RestNodeAssocTargetModel;
import org.alfresco.rest.model.RestNodeAssociationModel;
import org.alfresco.rest.model.RestNodeAssociationModelCollection;
import org.alfresco.rest.model.RestNodeAssociationTypeModel;
import org.alfresco.rest.model.RestNodeBodyModel;
import org.alfresco.rest.model.RestNodeBodyMoveCopyModel;
import org.alfresco.rest.model.RestNodeChildAssocModelCollection;
import org.alfresco.rest.model.RestNodeChildAssociationModel;
import org.alfresco.rest.model.RestNodeModel;
import org.alfresco.rest.model.RestNodeModelsCollection;
import org.alfresco.rest.model.RestRatingModel;
@@ -72,6 +75,7 @@ import org.alfresco.rest.model.body.RestNodeLockBodyModel;
import org.alfresco.rest.model.builder.NodesBuilder;
import org.alfresco.utility.Utility;
import org.alfresco.utility.model.RepoTestModel;
import org.apache.commons.lang3.StringUtils;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.testng.reporters.Files;
@@ -824,25 +828,118 @@ public class Node extends ModelRequest<Node>
}
/**
* Create secondary children association using POST call 'nodes/{nodeId}/secondary-children
* Use a list of secondary children nodes
* Creates a secondary child association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @return a collection of nodes
* @param secondaryChild - node, which should become a secondary child
* @return a node's parent-child association
*/
public RestNodeChildAssocModelCollection createSecondaryChildren(String secondaryChildren)
public RestNodeChildAssociationModel addSecondaryChild(RepoTestModel secondaryChild)
{
RestRequest request = RestRequest.requestWithBody(HttpMethod.POST, secondaryChildren, "nodes/{nodeId}/secondary-children?{parameters}", repoModel.getNodeRef(), restWrapper.getParameters());
return addSecondaryChild("cm:contains", secondaryChild);
}
/**
* Creates a secondary child association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param associationType - type of secondary parent-child relationship association
* @param secondaryChild - node, which should become a secondary child
* @return a node's parent-child association
*/
public RestNodeChildAssociationModel addSecondaryChild(String associationType, RepoTestModel secondaryChild)
{
return addSecondaryChild(new RestNodeChildAssociationModel(secondaryChild.getNodeRef(), associationType));
}
/**
* Creates a secondary child association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param secondaryChildAssociation - node's secondary parent-child association model
* @return a node's parent-child association
*/
public RestNodeChildAssociationModel addSecondaryChild(RestNodeChildAssociationModel secondaryChildAssociation)
{
RestRequest request = RestRequest.requestWithBody(HttpMethod.POST, secondaryChildAssociation.toJson(), "nodes/{nodeId}/secondary-children?{parameters}", repoModel.getNodeRef(), restWrapper.getParameters());
return restWrapper.processModel(RestNodeChildAssociationModel.class, request);
}
/**
* Creates a secondary children association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param secondaryChildren - nodes, which should become secondary children
* @return a collection of node's parent-child associations
*/
public RestNodeChildAssocModelCollection addSecondaryChildren(RepoTestModel... secondaryChildren)
{
return addSecondaryChildren("cm:contains", secondaryChildren);
}
/**
* Creates a secondary children association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param associationType - type of secondary parent-child relationship association
* @param secondaryChildren - nodes, which should become secondary children
* @return a collection of node's parent-child associations
*/
public RestNodeChildAssocModelCollection addSecondaryChildren(String associationType, RepoTestModel... secondaryChildren)
{
return addSecondaryChildren(Stream.of(secondaryChildren)
.map(child -> new RestNodeChildAssociationModel(child.getNodeRef(), associationType))
.toArray(RestNodeChildAssociationModel[]::new));
}
/**
* Creates a secondary children association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param secondaryChildrenAssociations - node's secondary parent-child association models
* @return a collection of node's parent-child associations
*/
public RestNodeChildAssocModelCollection addSecondaryChildren(RestNodeChildAssociationModel... secondaryChildrenAssociations)
{
String requestBody = arrayToJson(Stream.of(secondaryChildrenAssociations).toList());
RestRequest request = RestRequest.requestWithBody(HttpMethod.POST, requestBody, "nodes/{nodeId}/secondary-children?{parameters}", repoModel.getNodeRef(), restWrapper.getParameters());
return restWrapper.processModels(RestNodeChildAssocModelCollection.class, request);
}
/**
* Delete secondary children using DELETE call 'nodes/{nodeId}/secondary-children/{childId}
* Removes secondary child association using DELETE call 'nodes/{nodeId}/secondary-children/{childId}'.
*
* @return a collection of nodes
* @param secondaryChild - node, which should NOT be a secondary child anymore
*/
public void deleteSecondaryChild(RestNodeAssociationModel child)
public void removeSecondaryChild(RepoTestModel secondaryChild)
{
RestRequest request = RestRequest.simpleRequest(HttpMethod.DELETE, "nodes/{nodeId}/secondary-children/{childId}?{parameters}", repoModel.getNodeRef(), child.getId(), restWrapper.getParameters());
removeSecondaryChild(null, secondaryChild);
}
/**
* Removes secondary child association using DELETE call 'nodes/{nodeId}/secondary-children/{childId}'.
*
* @param associationType - type of secondary parent-child relationship association
* @param secondaryChild - node, which should NOT be a secondary child anymore
*/
public void removeSecondaryChild(String associationType, RepoTestModel secondaryChild)
{
RestNodeAssociationModel associationModel = new RestNodeAssociationModel();
RestNodeAssociationTypeModel associationTypeModel = new RestNodeAssociationTypeModel();
if (associationType != null)
{
associationTypeModel.setAssocType(associationType);
}
associationModel.setAssociation(associationTypeModel);
associationModel.setId(secondaryChild.getNodeRef());
removeSecondaryChild(associationModel);
}
/**
* Removes secondary child association using DELETE call 'nodes/{nodeId}/secondary-children/{childId}'.
*
* @param secondaryChildAssociation - node's secondary parent-child association to remove
*/
public void removeSecondaryChild(RestNodeAssociationModel secondaryChildAssociation)
{
String parameters = StringUtils.isNotEmpty(secondaryChildAssociation.getAssociation().getAssocType()) ?
"assocType=" + secondaryChildAssociation.getAssociation().getAssocType() + "&" + restWrapper.getParameters() :
restWrapper.getParameters();
RestRequest request = RestRequest.simpleRequest(HttpMethod.DELETE, "nodes/{nodeId}/secondary-children/{childId}?{parameters}", repoModel.getNodeRef(), secondaryChildAssociation.getId(), parameters);
restWrapper.processEmptyModel(request);
}

View File

@@ -2,7 +2,7 @@
* #%L
* alfresco-tas-restapi
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -23,24 +23,6 @@
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
/*
* Copyright (C) 2017 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.rest.search;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -55,23 +37,43 @@ import org.alfresco.utility.model.TestModel;
*/
public class RestRequestFieldsModel extends TestModel implements IRestModel<RestRequestFieldsModel>
{
public RestRequestFieldsModel(){}
public RestRequestFieldsModel(String fieldValue)
{
this.field = fieldValue;
}
@JsonProperty(value = "entry")
RestRequestFieldsModel model;
@JsonProperty(required = true)
private String field;
private String prefix;
private String postfix;
private Integer snippetCount;
private Integer fragmentSize;
private Boolean mergeContiguous;
public RestRequestFieldsModel() {
super();
}
public static RestRequestFieldsModel of(String field)
{
RestRequestFieldsModel fieldModel = new RestRequestFieldsModel();
fieldModel.setField(field);
return fieldModel;
}
public static RestRequestFieldsModel of(String field, String prefix, String postfix)
{
RestRequestFieldsModel fieldModel = new RestRequestFieldsModel();
fieldModel.setField(field);
fieldModel.setPrefix(prefix);
fieldModel.setPostfix(postfix);
return fieldModel;
}
@Override
public RestRequestFieldsModel onModel()
{
return model;
}
@JsonProperty(required = true)
private String field;
public String getField()
{
@@ -82,8 +84,116 @@ public class RestRequestFieldsModel extends TestModel implements IRestModel<Rest
{
this.field = field;
}
public String getPrefix()
{
return prefix;
}
public void setPrefix(String prefix)
{
this.prefix = prefix;
}
public String getPostfix()
{
return postfix;
}
public void setPostfix(String postfix)
{
this.postfix = postfix;
}
public Integer getSnippetCount()
{
return snippetCount;
}
public void setSnippetCount(Integer snippetCount)
{
this.snippetCount = snippetCount;
}
public Integer getFragmentSize()
{
return fragmentSize;
}
public void setFragmentSize(Integer fragmentSize)
{
this.fragmentSize = fragmentSize;
}
public Boolean getMergeContiguous()
{
return mergeContiguous;
}
public void setMergeContiguous(Boolean mergeContiguous)
{
this.mergeContiguous = mergeContiguous;
}
public static Builder builder()
{
return new Builder();
}
public static class Builder
{
private String field;
private String prefix;
private String postfix;
private Integer snippetCount;
private Integer fragmentSize;
private Boolean mergeContiguous;
public Builder field(String field)
{
this.field = field;
return this;
}
public Builder prefix(String prefix)
{
this.prefix = prefix;
return this;
}
public Builder postfix(String postfix)
{
this.postfix = postfix;
return this;
}
public Builder snippetCount(Integer snippetCount)
{
this.snippetCount = snippetCount;
return this;
}
public Builder fragmentSize(Integer fragmentSize)
{
this.fragmentSize = fragmentSize;
return this;
}
public Builder mergeContiguous(Boolean mergeContiguous)
{
this.mergeContiguous = mergeContiguous;
return this;
}
public RestRequestFieldsModel build()
{
RestRequestFieldsModel fieldModel = new RestRequestFieldsModel();
fieldModel.setField(field);
fieldModel.setPrefix(prefix);
fieldModel.setPostfix(postfix);
fieldModel.setSnippetCount(snippetCount);
fieldModel.setFragmentSize(fragmentSize);
fieldModel.setMergeContiguous(mergeContiguous);
return fieldModel;
}
}
}

View File

@@ -25,6 +25,7 @@
*/
package org.alfresco.rest.search;
import java.util.Arrays;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -205,7 +206,13 @@ public class RestRequestHighlightModel extends TestModel implements IRestModel<R
public RestRequestHighlightModelBuilder fields(List<String> fields)
{
this.fields = fields.stream().map(field -> new RestRequestFieldsModel(field)).toList();
this.fields = fields.stream().map(RestRequestFieldsModel::of).toList();
return this;
}
public RestRequestHighlightModelBuilder fields(RestRequestFieldsModel... fields)
{
this.fields = Arrays.stream(fields).toList();
return this;
}

View File

@@ -125,11 +125,10 @@ public class NodesParentChildrenTests extends RestTest
RestNodeChildAssociationModel childAssoc1 = new RestNodeChildAssociationModel(nodesBuilder.getNode("f1").getId(), "cm:contains");
RestNodeChildAssociationModel childAssoc2 = new RestNodeChildAssociationModel(nodesBuilder.getNode("f2").getId(), "cm:contains");
RestNodeChildAssociationModel childAssoc3 = new RestNodeChildAssociationModel(nodesBuilder.getNode("f3").getId(), "cm:preferenceImage");
String secondaryChildrenBody = "[" + childAssoc1.toJson() + "," + childAssoc2.toJson() + "," + childAssoc3.toJson() + "]";
STEP("3. Create secondary child associations using POST /nodes/{nodeId}/secondary-children");
RestNodeChildAssocModelCollection secondaryChildAssoc = restClient.withCoreAPI().usingNode(nodesBuilder.getNode("F1").toContentModel())
.createSecondaryChildren(secondaryChildrenBody);
.addSecondaryChildren(childAssoc1, childAssoc2, childAssoc3);
restClient.assertStatusCodeIs(HttpStatus.CREATED);
secondaryChildAssoc.getEntryByIndex(0).assertThat().field("childId").is(childAssoc1.getChildId());
secondaryChildAssoc.getEntryByIndex(1).assertThat().field("childId").is(childAssoc2.getChildId());
@@ -142,7 +141,7 @@ public class NodesParentChildrenTests extends RestTest
secondaryChildren.assertThat().entriesListCountIs(2);
STEP("5. Check using DELETE /nodes/{nodeId}/secondary-children/{childId} that a secondary child can be deleted");
restClient.withCoreAPI().usingNode(nodesBuilder.getNode("F1").toContentModel()).deleteSecondaryChild(secondaryChildren.getEntryByIndex(0));
restClient.withCoreAPI().usingNode(nodesBuilder.getNode("F1").toContentModel()).removeSecondaryChild(secondaryChildren.getEntryByIndex(0));
restClient.assertStatusCodeIs(HttpStatus.NO_CONTENT);
STEP("6. Check using GET /nodes/{nodeId}/secondary-children that a secondary child association was deleted");
@@ -182,7 +181,7 @@ public class NodesParentChildrenTests extends RestTest
STEP("2. Create secondary child associations using POST /nodes/{nodeId}/secondary-children");
RestNodeChildAssociationModel childAssoc = new RestNodeChildAssociationModel(nodesBuilder.getNode("f1").getId(), "cm:contains");
restClient.withCoreAPI().usingNode(nodesBuilder.getNode("F1").toContentModel()).createSecondaryChildren(childAssoc.toJson());
restClient.withCoreAPI().usingNode(nodesBuilder.getNode("F1").toContentModel()).addSecondaryChild(childAssoc);
restClient.assertStatusCodeIs(HttpStatus.CREATED);
STEP("3. Get all parents for file 'f1' - both primary and secondary");

View File

@@ -11,6 +11,7 @@ import org.alfresco.utility.testrail.ExecutionType;
import org.alfresco.utility.testrail.annotation.TestRail;
import org.springframework.http.HttpStatus;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Ignore;
import org.testng.annotations.Test;
import java.util.List;
@@ -47,8 +48,9 @@ public class GetProcessesCoreTests extends RestTest
@TestRail(section = { TestGroup.REST_API, TestGroup.WORKFLOW,TestGroup.PROCESSES }, executionType = ExecutionType.REGRESSION,
description = "Verify user gets all processes started by him ordered descending by id")
@Test(groups = { TestGroup.REST_API, TestGroup.WORKFLOW, TestGroup.PROCESSES, TestGroup.REGRESSION })
public void getProcessesOrderedByIdDESC() throws Exception
@Test(groups = { TestGroup.REST_API, TestGroup.WORKFLOW, TestGroup.PROCESSES, TestGroup.REGRESSION }, enabled = false)
@Ignore("Until ACS-6234 is done")
public void getProcessesOrderedByIdDESC()
{
RestProcessModelsCollection processes = restClient.authenticateUser(userWhoStartsTask).withParams("orderBy=id DESC")
.withWorkflowAPI().getProcesses();

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<properties>

27
pom.xml
View File

@@ -2,7 +2,7 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Alfresco Community Repo Parent</name>
@@ -51,10 +51,10 @@
<dependency.alfresco-server-root.version>7.0.1</dependency.alfresco-server-root.version>
<dependency.activiti-engine.version>5.23.0</dependency.activiti-engine.version>
<dependency.activiti.version>5.23.0</dependency.activiti.version>
<dependency.alfresco-transform-service.version>4.0.0-A1</dependency.alfresco-transform-service.version>
<dependency.alfresco-transform-core.version>5.0.0-A2</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-core.version>5.0.0</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-service.version>4.0.0</dependency.alfresco-transform-service.version>
<dependency.alfresco-greenmail.version>7.0</dependency.alfresco-greenmail.version>
<dependency.acs-event-model.version>0.0.23</dependency.acs-event-model.version>
<dependency.acs-event-model.version>0.0.24</dependency.acs-event-model.version>
<dependency.aspectj.version>1.9.20.1</dependency.aspectj.version>
<dependency.spring.version>6.0.12</dependency.spring.version>
@@ -67,9 +67,9 @@
<dependency.bouncycastle.version>1.76</dependency.bouncycastle.version>
<dependency.mockito-core.version>5.4.0</dependency.mockito-core.version>
<dependency.assertj.version>3.24.2</dependency.assertj.version>
<dependency.org-json.version>20230618</dependency.org-json.version>
<dependency.org-json.version>20231013</dependency.org-json.version>
<dependency.commons-dbcp.version>2.9.0</dependency.commons-dbcp.version>
<dependency.commons-io.version>2.13.0</dependency.commons-io.version>
<dependency.commons-io.version>2.14.0</dependency.commons-io.version>
<dependency.gson.version>2.10.1</dependency.gson.version>
<dependency.guava.version>32.1.2-jre</dependency.guava.version>
<dependency.httpclient.version>4.5.14</dependency.httpclient.version>
@@ -100,7 +100,6 @@
<dependency.jakarta-ee-jaxb-api.version>4.0.0</dependency.jakarta-ee-jaxb-api.version>
<dependency.jakarta-ee-jaxb-impl.version>4.0.3</dependency.jakarta-ee-jaxb-impl.version>
<dependency.java-ee-jaxb-api.version>2.3.3</dependency.java-ee-jaxb-api.version>
<dependency.jakarta-ws-api.version>3.0.1</dependency.jakarta-ws-api.version>
<dependency.jakarta-soap-api.version>2.0.1</dependency.jakarta-soap-api.version>
<dependency.jakarta-annotation-api.version>2.1.1</dependency.jakarta-annotation-api.version>
@@ -114,9 +113,9 @@
<dependency.jakarta-ee-json-impl.version>1.1.4</dependency.jakarta-ee-json-impl.version>
<dependency.jakarta-json-path.version>2.8.0</dependency.jakarta-json-path.version>
<dependency.json-smart.version>2.5.0</dependency.json-smart.version>
<alfresco.googledrive.version>4.0.0-M1</alfresco.googledrive.version>
<alfresco.aos-module.version>2.0.0-M1</alfresco.aos-module.version>
<alfresco.api-explorer.version>23.1.0-A1</alfresco.api-explorer.version> <!-- Also in alfresco-enterprise-share -->
<alfresco.googledrive.version>4.0.0</alfresco.googledrive.version>
<alfresco.aos-module.version>2.0.0</alfresco.aos-module.version>
<alfresco.api-explorer.version>23.1.0</alfresco.api-explorer.version> <!-- Also in alfresco-enterprise-share -->
<alfresco.maven-plugin.version>2.2.0</alfresco.maven-plugin.version>
<license-maven-plugin.version>2.0.1</license-maven-plugin.version>
@@ -153,7 +152,7 @@
<connection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</connection>
<developerConnection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</developerConnection>
<url>https://github.com/Alfresco/alfresco-community-repo</url>
<tag>23.1.0.231</tag>
<tag>HEAD</tag>
</scm>
<distributionManagement>
@@ -216,12 +215,6 @@
<version>${dependency.jakarta-jws-api.version}</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.sun.mail</groupId>-->
<!-- <artifactId>javax.mail</artifactId>-->
<!-- <version>${dependency.java-ee-mail.version}</version>-->
<!-- </dependency>-->
<dependency>
<groupId>com.sun.mail</groupId>
<artifactId>jakarta.mail</artifactId>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.231</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -311,12 +311,24 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
public void onCreateChildAssociation(ChildAssociationRef childAssociationRef, boolean isNewNode)
{
getEventConsolidator(childAssociationRef).onCreateChildAssociation(childAssociationRef, isNewNode);
if (!childAssociationRef.isPrimary())
{
// if this is a secondary relationship simulate node move event to store state of previous secondary parents
ChildAssociationRef oldChildAssociationRef = childAssociationWithoutParentOf(childAssociationRef);
getEventConsolidator(childAssociationRef.getChildRef()).onMoveNode(oldChildAssociationRef, childAssociationRef);
}
}
@Override
public void beforeDeleteChildAssociation(ChildAssociationRef childAssociationRef)
{
getEventConsolidator(childAssociationRef).beforeDeleteChildAssociation(childAssociationRef);
if (!childAssociationRef.isPrimary())
{
// if this is a secondary relationship simulate node move event to store state of previous secondary parents
ChildAssociationRef newChildAssociationRef = childAssociationWithoutParentOf(childAssociationRef);
getEventConsolidator(childAssociationRef.getChildRef()).onMoveNode(childAssociationRef, newChildAssociationRef);
}
}
@Override
@@ -503,6 +515,18 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
return ZonedDateTime.ofInstant(commitTimeMs, ZoneOffset.UTC);
}
private static ChildAssociationRef childAssociationWithoutParentOf(ChildAssociationRef childAssociationRef)
{
return new ChildAssociationRef(
null,
null,
childAssociationRef.getQName(),
childAssociationRef.getChildRef(),
childAssociationRef.isPrimary(),
childAssociationRef.getNthSibling()
);
}
@Override
protected void onBootstrap(ApplicationEvent applicationEvent)
{

View File

@@ -65,6 +65,7 @@ public class NodeEventConsolidator extends EventConsolidator<NodeRef, NodeResour
private QName nodeType;
private QName nodeTypeBefore;
private List<String> primaryHierarchyBefore;
private List<String> secondaryParentsBefore;
private boolean resourceBeforeAllFieldsNull = true;
public NodeEventConsolidator(NodeResourceHelper nodeResourceHelper)
@@ -144,7 +145,25 @@ public class NodeEventConsolidator extends EventConsolidator<NodeRef, NodeResour
eventTypes.add(EventType.NODE_UPDATED);
createBuilderIfAbsent(newChildAssocRef.getChildRef());
setBeforePrimaryHierarchy(helper.getPrimaryHierarchy(oldChildAssocRef.getParentRef(), true));
if (newChildAssocRef.isPrimary())
{
setBeforePrimaryHierarchy(helper.getPrimaryHierarchy(oldChildAssocRef.getParentRef(), true));
}
else
{
List<String> secondaryParents = helper.getSecondaryParents(newChildAssocRef.getChildRef());
if (newChildAssocRef.getParentRef() != null)
{
// on create secondary child association event takes place - recreate secondary parents previous state
secondaryParents.remove(newChildAssocRef.getParentRef().getId());
}
else if(oldChildAssocRef.getParentRef() != null && !secondaryParents.contains(oldChildAssocRef.getParentRef().getId()))
{
// before remove secondary child association event takes place - recreate secondary parents previous state
secondaryParents.add(oldChildAssocRef.getParentRef().getId());
}
setSecondaryParentsBefore(secondaryParents);
}
}
@Override
@@ -174,7 +193,7 @@ public class NodeEventConsolidator extends EventConsolidator<NodeRef, NodeResour
public void beforeDeleteNode(NodeRef nodeRef)
{
eventTypes.add(EventType.NODE_DELETED);
createBuilderIfAbsent(nodeRef, false);
createBuilderIfAbsent(nodeRef);
}
@Override
@@ -240,6 +259,19 @@ public class NodeEventConsolidator extends EventConsolidator<NodeRef, NodeResour
}
}
private void setSecondaryParentsBefore(List<String> secondaryParents)
{
if (this.secondaryParentsBefore == null)
{
this.secondaryParentsBefore = secondaryParents;
}
}
List<String> getSecondaryParentsBefore()
{
return secondaryParentsBefore;
}
private NodeResource buildNodeResource()
{
if (resourceBuilder == null)
@@ -283,7 +315,7 @@ public class NodeEventConsolidator extends EventConsolidator<NodeRef, NodeResour
resourceBeforeAllFieldsNull = false;
}
Map<String, Map<String, String>> localizedProps =helper.getLocalizedPropertiesBefore(changedPropsBefore, after);
Map<String, Map<String, String>> localizedProps = helper.getLocalizedPropertiesBefore(changedPropsBefore, after);
if (!localizedProps.isEmpty())
{
builder.setLocalizedProperties(localizedProps);
@@ -309,8 +341,7 @@ public class NodeEventConsolidator extends EventConsolidator<NodeRef, NodeResour
builder.setModifiedByUser(modifier);
resourceBeforeAllFieldsNull = false;
}
modifiedAt =
helper.getZonedDateTime((Date) changedPropsBefore.get(ContentModel.PROP_MODIFIED));
modifiedAt = helper.getZonedDateTime((Date) changedPropsBefore.get(ContentModel.PROP_MODIFIED));
}
// Handle case where the content does not exist on the propertiesBefore
@@ -334,6 +365,12 @@ public class NodeEventConsolidator extends EventConsolidator<NodeRef, NodeResour
resourceBeforeAllFieldsNull = false;
}
if (secondaryParentsBefore != null)
{
builder.setSecondaryParents(secondaryParentsBefore);
resourceBeforeAllFieldsNull = false;
}
if (nodeTypeBefore != null)
{
builder.setNodeType(helper.getQNamePrefixString(nodeTypeBefore));

View File

@@ -26,6 +26,7 @@
package org.alfresco.repo.event2;
import static java.util.Optional.ofNullable;
import static java.util.function.Predicate.not;
import java.io.Serializable;
import java.time.ZoneId;
@@ -38,6 +39,7 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import com.google.common.collect.Sets;
@@ -145,21 +147,23 @@ public class NodeResourceHelper implements InitializingBean
// minor: save one lookup if creator & modifier are the same
Map<String, UserInfo> mapUserCache = new HashMap<>(2);
return NodeResource.builder().setId(nodeRef.getId())
.setName((String) properties.get(ContentModel.PROP_NAME))
.setNodeType(getQNamePrefixString(type))
.setIsFile(isSubClass(type, ContentModel.TYPE_CONTENT))
.setIsFolder(isSubClass(type, ContentModel.TYPE_FOLDER))
.setCreatedByUser(getUserInfo((String) properties.get(ContentModel.PROP_CREATOR), mapUserCache))
.setCreatedAt(getZonedDateTime((Date)properties.get(ContentModel.PROP_CREATED)))
.setModifiedByUser(getUserInfo((String) properties.get(ContentModel.PROP_MODIFIER), mapUserCache))
.setModifiedAt(getZonedDateTime((Date)properties.get(ContentModel.PROP_MODIFIED)))
.setContent(getContentInfo(properties))
.setPrimaryAssocQName(getPrimaryAssocQName(nodeRef))
.setPrimaryHierarchy(PathUtil.getNodeIdsInReverse(path, false))
.setProperties(mapToNodeProperties(properties))
.setLocalizedProperties(mapToNodeLocalizedProperties(properties))
.setAspectNames(getMappedAspects(nodeRef));
return NodeResource.builder()
.setId(nodeRef.getId())
.setName((String) properties.get(ContentModel.PROP_NAME))
.setNodeType(getQNamePrefixString(type))
.setIsFile(isSubClass(type, ContentModel.TYPE_CONTENT))
.setIsFolder(isSubClass(type, ContentModel.TYPE_FOLDER))
.setCreatedByUser(getUserInfo((String) properties.get(ContentModel.PROP_CREATOR), mapUserCache))
.setCreatedAt(getZonedDateTime((Date)properties.get(ContentModel.PROP_CREATED)))
.setModifiedByUser(getUserInfo((String) properties.get(ContentModel.PROP_MODIFIER), mapUserCache))
.setModifiedAt(getZonedDateTime((Date)properties.get(ContentModel.PROP_MODIFIED)))
.setContent(getContentInfo(properties))
.setPrimaryAssocQName(getPrimaryAssocQName(nodeRef))
.setPrimaryHierarchy(PathUtil.getNodeIdsInReverse(path, false))
.setProperties(mapToNodeProperties(properties))
.setLocalizedProperties(mapToNodeLocalizedProperties(properties))
.setAspectNames(getMappedAspects(nodeRef))
.setSecondaryParents(getSecondaryParents(nodeRef));
}
private boolean isSubClass(QName className, QName ofClassQName)
@@ -413,6 +417,21 @@ public class NodeResourceHelper implements InitializingBean
return PathUtil.getNodeIdsInReverse(path, showLeaf);
}
/**
* Gathers node's secondary parents.
*
* @param nodeRef - node reference
* @return a list of node's secondary parents.
*/
public List<String> getSecondaryParents(final NodeRef nodeRef)
{
return nodeService.getParentAssocs(nodeRef).stream()
.filter(not(ChildAssociationRef::isPrimary))
.map(ChildAssociationRef::getParentRef)
.map(NodeRef::getId)
.collect(Collectors.toList());
}
public PermissionService getPermissionService()
{
return permissionService;

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2020 Alfresco Software Limited
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -48,19 +48,31 @@ public class NodePropertyFilter extends AbstractNodeEventFilter
ContentModel.PROP_CREATOR,
ContentModel.PROP_CREATED,
ContentModel.PROP_CONTENT);
// These properties should not be excluded from the properties object
private static final Set<QName> ALLOWED_PROPERTIES = Set.of(ContentModel.PROP_CASCADE_TX,
ContentModel.PROP_CASCADE_CRC);
private final List<String> nodeAspectsBlackList;
private final List<String> nodePropertiesBlackList;
public NodePropertyFilter()
{
this.nodeAspectsBlackList = parseFilterList(FILTERED_PROPERTIES);
this.nodePropertiesBlackList = parseFilterList(FILTERED_PROPERTIES);
}
@Override
public Set<QName> getExcludedTypes()
{
Set<QName> result = new HashSet<>(EXCLUDED_TOP_LEVEL_PROPS);
nodeAspectsBlackList.forEach(nodeAspect -> result.addAll(expandTypeDef(nodeAspect)));
nodePropertiesBlackList.forEach(nodeProperty-> result.addAll(expandTypeDef(nodeProperty)));
return result;
}
@Override
public boolean isExcluded(QName qName)
{
if(qName != null && ALLOWED_PROPERTIES.contains(qName)){
return false;
}
return super.isExcluded(qName);
}
}

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -3203,13 +3203,16 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl implements Extens
// Invoke policy behaviour
invokeBeforeUpdateNode(parentNodeRef);
Map<QName, Serializable> propertiesBefore = nodeDAO.getNodeProperties(parentNodeId);
// Touch the node; it is cm:auditable
boolean changed = nodeDAO.setModifiedProperties(parentNodeId, modifiedDate, modifiedByToPropagate);
if (changed)
{
Map<QName, Serializable> propertiesAfter = nodeDAO.getNodeProperties(parentNodeId);
// Invoke policy behaviour
invokeOnUpdateNode(parentNodeRef);
invokeOnUpdateProperties(parentNodeRef, propertiesBefore, propertiesAfter);
}
return null;

View File

@@ -57,6 +57,7 @@ import com.nimbusds.jose.proc.JWSVerificationKeySelector;
import com.nimbusds.jose.proc.SecurityContext;
import com.nimbusds.jose.util.ResourceRetriever;
import com.nimbusds.jwt.proc.ConfigurableJWTProcessor;
import com.nimbusds.oauth2.sdk.id.Issuer;
import com.nimbusds.openid.connect.sdk.op.OIDCProviderMetadata;
import org.alfresco.repo.security.authentication.identityservice.IdentityServiceFacade.IdentityServiceFacadeException;
@@ -91,7 +92,9 @@ import org.springframework.security.oauth2.core.ClientAuthenticationMethod;
import org.springframework.security.oauth2.core.DelegatingOAuth2TokenValidator;
import org.springframework.security.oauth2.core.OAuth2AuthenticationException;
import org.springframework.security.oauth2.core.OAuth2Error;
import org.springframework.security.oauth2.core.OAuth2ErrorCodes;
import org.springframework.security.oauth2.core.OAuth2TokenValidator;
import org.springframework.security.oauth2.core.OAuth2TokenValidatorResult;
import org.springframework.security.oauth2.core.converter.ClaimTypeConverter;
import org.springframework.security.oauth2.core.http.converter.OAuth2AccessTokenResponseHttpMessageConverter;
import org.springframework.security.oauth2.jose.jws.SignatureAlgorithm;
@@ -99,7 +102,6 @@ import org.springframework.security.oauth2.jwt.Jwt;
import org.springframework.security.oauth2.jwt.JwtClaimNames;
import org.springframework.security.oauth2.jwt.JwtClaimValidator;
import org.springframework.security.oauth2.jwt.JwtDecoder;
import org.springframework.security.oauth2.jwt.JwtIssuerValidator;
import org.springframework.security.oauth2.jwt.JwtTimestampValidator;
import org.springframework.security.oauth2.jwt.NimbusJwtDecoder;
import org.springframework.web.client.RestOperations;
@@ -375,12 +377,18 @@ public class IdentityServiceFacadeFactoryBean implements FactoryBean<IdentitySer
.map(OIDCProviderMetadata::getAuthorizationEndpointURI)
.map(URI::toASCIIString)
.orElse(null);
final String issuerUri = Optional.of(metadata)
.map(OIDCProviderMetadata::getIssuer)
.map(Issuer::getValue)
.orElseGet(config::getIssuerUrl);
return ClientRegistration
.withRegistrationId("ids")
.authorizationUri(authUri)
.tokenUri(metadata.getTokenEndpointURI().toASCIIString())
.jwkSetUri(metadata.getJWKSetURI().toASCIIString())
.issuerUri(config.getIssuerUrl())
.issuerUri(issuerUri)
.authorizationGrantType(AuthorizationGrantType.PASSWORD);
}
@@ -565,6 +573,34 @@ public class IdentityServiceFacadeFactoryBean implements FactoryBean<IdentitySer
}
}
static class JwtIssuerValidator implements OAuth2TokenValidator<Jwt>
{
private final String requiredIssuer;
public JwtIssuerValidator(String issuer)
{
this.requiredIssuer = requireNonNull(issuer, "issuer cannot be null");
}
@Override
public OAuth2TokenValidatorResult validate(Jwt token)
{
requireNonNull(token, "token cannot be null");
final Object issuer = token.getClaim(JwtClaimNames.ISS);
if (issuer != null && requiredIssuer.equals(issuer.toString()))
{
return OAuth2TokenValidatorResult.success();
}
final OAuth2Error error = new OAuth2Error(
OAuth2ErrorCodes.INVALID_TOKEN,
"The iss claim is not valid. Expected `%s` but got `%s`.".formatted(requiredIssuer, issuer),
"https://tools.ietf.org/html/rfc6750#section-3.1");
return OAuth2TokenValidatorResult.failure(error);
}
}
private static boolean isDefined(String value)
{
return value != null && !value.isBlank();

View File

@@ -1221,7 +1221,7 @@ contentPropertyRestrictions.whitelist=
repo.event2.enabled=true
# Type and aspect filters which should be excluded
# Note: System folders node types are added by default
repo.event2.filter.nodeTypes=sys:*, fm:*, cm:thumbnail, cm:failedThumbnail, cm:rating, rma:rmsite include_subtypes
repo.event2.filter.nodeTypes=sys:*, fm:*, cm:thumbnail, cm:failedThumbnail, cm:rating, rma:rmsite include_subtypes, usr:user
repo.event2.filter.nodeAspects=sys:*
repo.event2.filter.childAssocTypes=rn:rendition
# Comma separated list of users which should be excluded
@@ -1231,6 +1231,7 @@ repo.event2.filter.users=
repo.event2.topic.endpoint=amqp:topic:alfresco.repo.event2
# Specifies if messages should be enqueued in in-memory queue or sent directly to the topic
repo.event2.queue.skip=false
#repo.event2.topic.endpoint=amqp:topic:VirtualTopic.alfresco.repo.event2
# Thread pool for async enqueue of repo events
repo.event2.queue.enqueueThreadPool.priority=1
repo.event2.queue.enqueueThreadPool.coreSize=8

View File

@@ -187,7 +187,7 @@
<property name="startDelay" value="${system.cronJob.startDelayMilliseconds}"/>
<property name="jobDetail">
<bean id="upgradePasswordHashJobDetail" class="org.springframework.scheduling.quartz.JobDetailFactoryBean">
<property name="jobClass" value="org.alfresco.repo.admin.patch.AsynchronousPatch$AsynchronousPatchJob"/>
<property name="jobClass" value="org.alfresco.repo.security.authentication.UpgradePasswordHashWorker$UpgradePasswordHashJob"/>
<property name="jobDataAsMap">
<map>
<entry key="upgradePasswordHashWorker" value-ref="upgradePasswordHashWorker"/>

View File

@@ -158,7 +158,7 @@ public class AccessAuditorTest
{
Object[] args = invocation.getArguments();
Map<String, Serializable> auditMap = (Map<String, Serializable>)args[1];
if ("/alfresco-access/transaction".equals(args[0]))
if ("/alfresco-access/transaction".equals(args[0]) && !"updateNodeProperties".equals(auditMap.get("action")))
{
auditMapList.add(auditMap);
}

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2020 Alfresco Software Limited
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -28,6 +28,7 @@ package org.alfresco.repo.event2;
import java.util.Arrays;
import java.util.List;
import java.util.stream.IntStream;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.event.v1.model.ChildAssociationResource;
@@ -64,12 +65,11 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(),
resultRepoEvent.getType());
retryingTransactionHelper.doInTransaction(() ->
nodeService.addChild(
parentNodeRef,
childNodeRef,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE, assocLocalName)));
retryingTransactionHelper.doInTransaction(() -> nodeService.addChild(
parentNodeRef,
childNodeRef,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE, assocLocalName)));
List<ChildAssociationRef> childAssociationRefs = retryingTransactionHelper.doInTransaction(() ->
nodeService.getChildAssocs(parentNodeRef));
@@ -77,10 +77,32 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals(1, childAssociationRefs.size());
assertFalse(childAssociationRefs.get(0).isPrimary());
checkNumOfEvents(3);
checkNumOfEvents(4);
final RepoEvent<EventData<ChildAssociationResource>> childAssocRepoEvent = getRepoEventWithoutWait(3);
// node event
final RepoEvent<EventData<NodeResource>> nodeRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(), nodeRepoEvent.getType());
assertNotNull("Repo event ID is not available.", nodeRepoEvent.getId());
assertNotNull("Source is not available", nodeRepoEvent.getSource());
assertEquals("Repo event source is not available.",
"/" + descriptorService.getCurrentRepositoryDescriptor().getId(),
nodeRepoEvent.getSource().toString());
assertNotNull("Repo event creation time is not available.", nodeRepoEvent.getTime());
assertEquals("Invalid repo event datacontenttype", "application/json",
nodeRepoEvent.getDatacontenttype());
assertNotNull(nodeRepoEvent.getDataschema());
assertEquals(EventJSONSchema.NODE_UPDATED_V1.getSchema(), nodeRepoEvent.getDataschema());
final EventData<NodeResource> nodeResourceEventData = getEventData(nodeRepoEvent);
assertNotNull("Event data group ID is not available. ", nodeResourceEventData.getEventGroupId());
assertNotNull("resourceBefore property is not available", nodeResourceEventData.getResourceBefore());
final NodeResource nodeResource = getNodeResource(nodeRepoEvent);
final NodeResource nodeResourceBefore = getNodeResourceBefore(nodeRepoEvent);
assertNotSame("Secondary parents actual and earlier state should differ", nodeResource.getSecondaryParents(), nodeResourceBefore.getSecondaryParents());
// child association event
final RepoEvent<EventData<ChildAssociationResource>> childAssocRepoEvent = getFilteredEvent(EventType.CHILD_ASSOC_CREATED, 0);
assertEquals("Wrong repo event type.", EventType.CHILD_ASSOC_CREATED.getType(), childAssocRepoEvent.getType());
assertNotNull("Repo event ID is not available.", childAssocRepoEvent.getId());
assertNotNull("Source is not available", childAssocRepoEvent.getSource());
@@ -93,16 +115,18 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
assertNotNull(childAssocRepoEvent.getDataschema());
assertEquals(EventJSONSchema.CHILD_ASSOC_CREATED_V1.getSchema(), childAssocRepoEvent.getDataschema());
final EventData<ChildAssociationResource> nodeResourceEventData = getEventData(childAssocRepoEvent);
// EventData attributes
assertNotNull("Event data group ID is not available. ", nodeResourceEventData.getEventGroupId());
assertNull("resourceBefore property is not available", nodeResourceEventData.getResourceBefore());
final EventData<ChildAssociationResource> childAssocResourceEventData = getEventData(childAssocRepoEvent);
assertNotNull("Event data group ID is not available. ", childAssocResourceEventData.getEventGroupId());
assertNull("resourceBefore property is not available", childAssocResourceEventData.getResourceBefore());
final ChildAssociationResource childAssociationResource = getChildAssocResource(childAssocRepoEvent);
assertEquals("Wrong parent", parentNodeRef.getId(), childAssociationResource.getParent().getId());
assertEquals("Wrong child", childNodeRef.getId(), childAssociationResource.getChild().getId());
assertEquals("Wrong assoc type", "cm:contains", childAssociationResource.getAssocType());
assertEquals("Wrong assoc name", "ce:" + assocLocalName, childAssociationResource.getAssocQName());
assertEquals("Node and child association events should have same eventGroupId", nodeResourceEventData.getEventGroupId(), childAssocResourceEventData.getEventGroupId());
assertTrue("Wrong node's secondary parents", nodeResource.getSecondaryParents().contains(childAssociationResource.getParent().getId()));
}
@Test
@@ -131,7 +155,7 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals(1, childAssociationRefs.size());
assertFalse(childAssociationRefs.get(0).isPrimary());
checkNumOfEvents(3);
checkNumOfEvents(4);
retryingTransactionHelper.doInTransaction(() ->
nodeService.removeChildAssociation(childAssociationRef));
@@ -141,10 +165,32 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals(0, childAssociationRefs.size());
checkNumOfEvents(4);
checkNumOfEvents(6);
final RepoEvent<EventData<ChildAssociationResource>> childAssocRepoEvent = getRepoEventWithoutWait(4);
// node repo event
final RepoEvent<EventData<NodeResource>> nodeRepoEvent = getRepoEventWithoutWait(5);
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(), nodeRepoEvent.getType());
assertNotNull("Repo event ID is not available.", nodeRepoEvent.getId());
assertNotNull("Source is not available", nodeRepoEvent.getSource());
assertEquals("Repo event source is not available.",
"/" + descriptorService.getCurrentRepositoryDescriptor().getId(),
nodeRepoEvent.getSource().toString());
assertNotNull("Repo event creation time is not available.", nodeRepoEvent.getTime());
assertEquals("Invalid repo event datacontenttype", "application/json",
nodeRepoEvent.getDatacontenttype());
assertNotNull(nodeRepoEvent.getDataschema());
assertEquals(EventJSONSchema.NODE_UPDATED_V1.getSchema(), nodeRepoEvent.getDataschema());
final EventData<NodeResource> nodeResourceEventData = getEventData(nodeRepoEvent);
assertNotNull("Event data group ID is not available. ", nodeResourceEventData.getEventGroupId());
assertNotNull("resourceBefore property is not available", nodeResourceEventData.getResourceBefore());
final NodeResource nodeResource = getNodeResource(nodeRepoEvent);
final NodeResource nodeResourceBefore = getNodeResourceBefore(nodeRepoEvent);
assertNotSame("Secondary parents actual and earlier state should differ", nodeResource.getSecondaryParents(), nodeResourceBefore.getSecondaryParents());
// child association repo event
final RepoEvent<EventData<ChildAssociationResource>> childAssocRepoEvent = getFilteredEvent(EventType.CHILD_ASSOC_DELETED, 0);
assertEquals("Wrong repo event type.", EventType.CHILD_ASSOC_DELETED.getType(), childAssocRepoEvent.getType());
assertNotNull("Repo event ID is not available. ", childAssocRepoEvent.getId());
assertNotNull("Source is not available", childAssocRepoEvent.getSource());
@@ -156,15 +202,17 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
assertNotNull(childAssocRepoEvent.getDataschema());
assertEquals(EventJSONSchema.CHILD_ASSOC_DELETED_V1.getSchema(), childAssocRepoEvent.getDataschema());
final EventData<ChildAssociationResource> nodeResourceEventData = getEventData(childAssocRepoEvent);
// EventData attributes
assertNotNull("Event data group ID is not available. ", nodeResourceEventData.getEventGroupId());
assertNull("resourceBefore property is not available", nodeResourceEventData.getResourceBefore());
final EventData<ChildAssociationResource> childAssocResourceEventData = getEventData(childAssocRepoEvent);
assertNotNull("Event data group ID is not available. ", childAssocResourceEventData.getEventGroupId());
assertNull("resourceBefore property is not available", childAssocResourceEventData.getResourceBefore());
final ChildAssociationResource childAssociationResource = getChildAssocResource(childAssocRepoEvent);
assertEquals("Wrong parent", parentNodeRef.getId(), childAssociationResource.getParent().getId());
assertEquals("Wrong child", childNodeRef.getId(), childAssociationResource.getChild().getId());
assertEquals("Wrong assoc type", "cm:contains", childAssociationResource.getAssocType());
assertEquals("Node and child association events should have same eventGroupId", nodeResourceEventData.getEventGroupId(), childAssocResourceEventData.getEventGroupId());
assertTrue("Wrong node's secondary parents", nodeResourceBefore.getSecondaryParents().contains(childAssociationResource.getParent().getId()));
}
@Test
@@ -179,17 +227,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() ->
nodeService.addChild(
@@ -212,12 +253,15 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
return null;
});
checkNumOfEvents(7);
// 3 assoc.child.Created events should be created
checkNumOfEvents(8);
// 1 node.Updated events should be created
List<RepoEvent<EventData<NodeResource>>> nodeUpdateEvent = getFilteredEvents(EventType.NODE_UPDATED);
assertEquals("Wrong association events number", 1, nodeUpdateEvent.size());
// 3 assoc.child.Created events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_CREATED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
}
@Test
@@ -231,17 +275,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
List<NodeRef> parents = Arrays.asList(parent1NodeRef, parent2NodeRef, parent3NodeRef);
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() -> {
for (NodeRef parent : parents)
@@ -268,10 +305,14 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
return null;
});
checkNumOfEvents(7);
checkNumOfEvents(8);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(5);
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(), resultRepoEvent.getType());
// 3 assoc.child.Created events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_CREATED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
// All events in the transaction should have the same eventGroupId
String assocEventGroupID1 = getEventData(childAssocEvents.get(0)).getEventGroupId();
@@ -294,17 +335,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
for (NodeRef parent : parents)
{
@@ -330,10 +364,15 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
return null;
});
checkNumOfEvents(7);
checkNumOfEvents(10);
// 3 node.Updated events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> nodeUpdateEvents = getFilteredEvents(EventType.NODE_UPDATED);
assertEquals("Wrong node update events number", 3, nodeUpdateEvents.size());
// 3 assoc.child.Created events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_CREATED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
assertEquals(parent1NodeRef.getId(), getChildAssocResource(childAssocEvents.get(0)).getParent().getId());
assertEquals(childNodeRef.getId(), getChildAssocResource(childAssocEvents.get(0)).getChild().getId());
@@ -360,17 +399,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() -> {
for (NodeRef child : children)
@@ -388,10 +420,15 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
return null;
});
checkNumOfEvents(7);
checkNumOfEvents(10);
// 3 node.Updated events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> nodeUpdateEvents = getFilteredEvents(EventType.NODE_UPDATED);
assertEquals("Wrong node update events number", 3, nodeUpdateEvents.size());
// 3 assoc.child.Created events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_CREATED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
}
@Test
@@ -406,17 +443,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
for (NodeRef child : children)
{
@@ -432,10 +462,15 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
return null;
});
checkNumOfEvents(7);
checkNumOfEvents(10);
// 3 node.Updated events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> nodeUpdateEvents = getFilteredEvents(EventType.NODE_UPDATED);
assertEquals("Wrong node update events number", 3, nodeUpdateEvents.size());
// 3 assoc.child.Created events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_CREATED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
assertEquals(parentNodeRef.getId(), getChildAssocResource(childAssocEvents.get(0)).getParent().getId());
assertEquals(child1NodeRef.getId(), getChildAssocResource(childAssocEvents.get(0)).getChild().getId());
@@ -462,17 +497,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() ->
nodeService.addChild(parents, childNodeRef, ContentModel.ASSOC_CONTAINS,
@@ -501,15 +529,19 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
return null;
});
checkNumOfEvents(10);
checkNumOfEvents(12);
// 2 node.Updated events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> nodeUpdateEvents = getFilteredEvents(EventType.NODE_UPDATED);
assertEquals("Wrong node update events number", 2, nodeUpdateEvents.size());
// 3 assoc.child.Deleted events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_DELETED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
}
@Test
public void testDeleteAssociationOneParentMultipleChildrenDifferentTransactions()
public void testDeleteAssociationMultipleParentOneChildrenDifferentTransactions()
{
final NodeRef parent1NodeRef = createNode(ContentModel.TYPE_FOLDER);
final NodeRef parent2NodeRef = createNode(ContentModel.TYPE_FOLDER);
@@ -520,17 +552,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() ->
nodeService.addChild(parents, childNodeRef, ContentModel.ASSOC_CONTAINS,
@@ -557,7 +582,7 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
nodeService.removeChildAssociation(childAssociationRef));
}
checkNumOfEvents(10);
checkNumOfEvents(14);
// 3 assoc.child.Deleted events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_DELETED);
@@ -588,17 +613,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() -> {
for (NodeRef child : children)
@@ -619,11 +637,15 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
deleteNode(parentNodeRef);
checkNumOfEvents(11);
checkNumOfEvents(17);
// 6 node.Updated events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> nodeUpdateEvents = getFilteredEvents(EventType.NODE_UPDATED);
assertEquals("Wrong node update events number", 6, nodeUpdateEvents.size());
// 3 assoc.child.Deleted events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_DELETED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
}
@Test
@@ -638,17 +660,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() ->
nodeService.addChild(parents, childNodeRef, ContentModel.ASSOC_CONTAINS,
@@ -670,11 +685,15 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
deleteNode(childNodeRef);
checkNumOfEvents(11);
checkNumOfEvents(12);
// 2 node.Updated events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> nodeUpdateEvents = getFilteredEvents(EventType.NODE_UPDATED);
assertEquals("Wrong node update events number", 2, nodeUpdateEvents.size());
// 3 assoc.child.Deleted events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_DELETED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
}
@Test
@@ -685,11 +704,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(2);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() ->
{
@@ -708,13 +726,14 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals(1, childAssociationRefs.size());
assertFalse(childAssociationRefs.get(0).isPrimary());
checkNumOfEvents(4);
checkNumOfEvents(5);
// Check the node events occur before the child association event
List<RepoEvent<?>> repoEvents = getRepoEventsContainer().getEvents();
assertEquals("org.alfresco.event.node.Created", repoEvents.get(0).getType());
assertEquals("org.alfresco.event.node.Created", repoEvents.get(1).getType());
assertEquals("org.alfresco.event.node.Updated", repoEvents.get(2).getType());
assertEquals("org.alfresco.event.assoc.child.Created", repoEvents.get(3).getType());
assertEquals(EventType.NODE_CREATED.getType(), repoEvents.get(0).getType());
assertEquals(EventType.NODE_CREATED.getType(), repoEvents.get(1).getType());
assertEquals(EventType.NODE_UPDATED.getType(), repoEvents.get(2).getType());
assertEquals(EventType.NODE_UPDATED.getType(), repoEvents.get(3).getType());
assertEquals(EventType.CHILD_ASSOC_CREATED.getType(), repoEvents.get(4).getType());
}
}

View File

@@ -27,24 +27,38 @@ package org.alfresco.repo.event2;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.BDDMockito.given;
import static org.mockito.BDDMockito.then;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.when;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.event.v1.model.EventType;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.junit.Before;
import org.junit.Test;
public class EventConsolidatorUnitTest
{
private NodeResourceHelper nodeResourceHelper = mock(NodeResourceHelper.class);
private final NodeResourceHelper nodeResourceHelper = mock(NodeResourceHelper.class);
private NodeEventConsolidator eventConsolidator;
@Before
public void setUp() throws Exception
{
eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
}
@Test
public void testGetMappedAspectsBeforeRemovedAndAddedEmpty()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
Set<String> currentAspects = new HashSet<>();
currentAspects.add("cm:geographic");
currentAspects.add("cm:auditable");
@@ -57,7 +71,6 @@ public class EventConsolidatorUnitTest
@Test
public void testGetMappedAspectsBefore_AspectRemoved()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
Set<String> currentAspects = new HashSet<>();
@@ -79,7 +92,6 @@ public class EventConsolidatorUnitTest
@Test
public void testGetMappedAspectsBefore_AspectAdded()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
Set<String> currentAspects = new HashSet<>();
@@ -102,7 +114,6 @@ public class EventConsolidatorUnitTest
@Test
public void testGetMappedAspectsBefore_AspectAddedAndRemoved()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
Set<String> currentAspects = new HashSet<>();
@@ -125,7 +136,6 @@ public class EventConsolidatorUnitTest
@Test
public void testGetMappedAspectsBefore_AspectRemovedAndAdded()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.removeAspect(ContentModel.ASSOC_CONTAINS);
@@ -150,8 +160,6 @@ public class EventConsolidatorUnitTest
@Test
public void testGetMappedAspectsBefore_AspectAddedTwiceRemovedOnce()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
@@ -178,8 +186,6 @@ public class EventConsolidatorUnitTest
@Test
public void testGetMappedAspectsBefore_AspectRemovedTwiceAddedOnce()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
@@ -206,7 +212,6 @@ public class EventConsolidatorUnitTest
@Test
public void testGetMappedAspectsBefore_FilteredAspectAdded()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASPECT_COPIEDFROM);
Set<String> currentAspects = new HashSet<>();
@@ -227,7 +232,6 @@ public class EventConsolidatorUnitTest
@Test
public void testAddAspect()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
assertEquals(1, eventConsolidator.getAspectsAdded().size());
@@ -238,7 +242,6 @@ public class EventConsolidatorUnitTest
@Test
public void testRemoveAspect()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.removeAspect(ContentModel.ASSOC_CONTAINS);
assertEquals(0, eventConsolidator.getAspectsAdded().size());
@@ -249,7 +252,6 @@ public class EventConsolidatorUnitTest
@Test
public void testAddAspectRemoveAspect()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.removeAspect(ContentModel.ASSOC_CONTAINS);
@@ -260,7 +262,6 @@ public class EventConsolidatorUnitTest
@Test
public void testRemoveAspectAddAspect()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.removeAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
@@ -271,7 +272,6 @@ public class EventConsolidatorUnitTest
@Test
public void testAddAspectTwiceRemoveAspectOnce()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.removeAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
@@ -284,7 +284,6 @@ public class EventConsolidatorUnitTest
@Test
public void testAddAspectOnceRemoveAspectTwice()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.removeAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.removeAspect(ContentModel.ASSOC_CONTAINS);
@@ -293,4 +292,83 @@ public class EventConsolidatorUnitTest
assertEquals(1, eventConsolidator.getAspectsRemoved().size());
assertTrue(eventConsolidator.getAspectsRemoved().contains(ContentModel.ASSOC_CONTAINS));
}
@Test
public void testOnMoveNodeWithPrimaryParent()
{
ChildAssociationRef oldAssociationMock = mock(ChildAssociationRef.class);
ChildAssociationRef newAssociationMock = mock(ChildAssociationRef.class);
NodeRef parentRefMock = mock(NodeRef.class);
given(newAssociationMock.isPrimary()).willReturn(true);
given(oldAssociationMock.getParentRef()).willReturn(parentRefMock);
eventConsolidator.onMoveNode(oldAssociationMock, newAssociationMock);
then(newAssociationMock).should().getChildRef();
then(newAssociationMock).should().isPrimary();
then(newAssociationMock).shouldHaveNoMoreInteractions();
then(nodeResourceHelper).should().getPrimaryHierarchy(parentRefMock, true);
assertTrue("Node event consolidator should contain event type: UPDATED", eventConsolidator.getEventTypes().contains(EventType.NODE_UPDATED));
}
@Test
public void testOnMoveNodeAfterSecondaryParentAdded()
{
ChildAssociationRef oldAssociationMock = mock(ChildAssociationRef.class);
ChildAssociationRef newAssociationMock = mock(ChildAssociationRef.class);
NodeRef nodeRefMock = mock(NodeRef.class);
NodeRef parentRefMock = mock(NodeRef.class);
List<String> secondaryParentsMock = mock(List.class);
given(newAssociationMock.isPrimary()).willReturn(false);
given(newAssociationMock.getChildRef()).willReturn(nodeRefMock);
given(newAssociationMock.getParentRef()).willReturn(parentRefMock);
given(parentRefMock.getId()).willReturn("parent-id");
given(nodeResourceHelper.getSecondaryParents(any(NodeRef.class))).willReturn(secondaryParentsMock);
// when
eventConsolidator.onMoveNode(oldAssociationMock, newAssociationMock);
then(newAssociationMock).should().isPrimary();
then(newAssociationMock).should(times(2)).getChildRef();
then(newAssociationMock).should(times(2)).getParentRef();
then(newAssociationMock).shouldHaveNoMoreInteractions();
then(oldAssociationMock).shouldHaveNoInteractions();
then(nodeResourceHelper).should().getSecondaryParents(nodeRefMock);
then(secondaryParentsMock).should().remove("parent-id");
then(secondaryParentsMock).shouldHaveNoMoreInteractions();
assertTrue("Node event consolidator should contain event type: UPDATED", eventConsolidator.getEventTypes().contains(EventType.NODE_UPDATED));
assertEquals(secondaryParentsMock, eventConsolidator.getSecondaryParentsBefore());
}
@Test
public void testOnMoveNodeBeforeSecondaryParentRemoved()
{
ChildAssociationRef oldAssociationMock = mock(ChildAssociationRef.class);
ChildAssociationRef newAssociationMock = mock(ChildAssociationRef.class);
NodeRef nodeRefMock = mock(NodeRef.class);
NodeRef parentRefMock = mock(NodeRef.class);
List<String> secondaryParentsMock = mock(List.class);
given(newAssociationMock.isPrimary()).willReturn(false);
given(newAssociationMock.getChildRef()).willReturn(nodeRefMock);
given(oldAssociationMock.getParentRef()).willReturn(parentRefMock);
given(parentRefMock.getId()).willReturn("parent-id");
given(nodeResourceHelper.getSecondaryParents(any(NodeRef.class))).willReturn(secondaryParentsMock);
// when
eventConsolidator.onMoveNode(oldAssociationMock, newAssociationMock);
then(newAssociationMock).should().isPrimary();
then(newAssociationMock).should(times(2)).getChildRef();
then(newAssociationMock).should().getParentRef();
then(newAssociationMock).shouldHaveNoMoreInteractions();
then(oldAssociationMock).should(times(3)).getParentRef();
then(oldAssociationMock).shouldHaveNoMoreInteractions();
then(nodeResourceHelper).should().getSecondaryParents(nodeRefMock);
then(secondaryParentsMock).should().contains("parent-id");
then(secondaryParentsMock).should().add("parent-id");
then(secondaryParentsMock).shouldHaveNoMoreInteractions();
assertTrue("Node event consolidator should contain event type: NODE_UPDATED", eventConsolidator.getEventTypes().contains(EventType.NODE_UPDATED));
assertEquals(secondaryParentsMock, eventConsolidator.getSecondaryParentsBefore());
}
}

View File

@@ -119,6 +119,9 @@ public class EventFilterUnitTest
assertTrue("System properties are excluded by default.",
propertyFilter.isExcluded(ContentModel.PROP_NODE_DBID));
assertFalse("Property cascadeTx is not excluded", propertyFilter.isExcluded(ContentModel.PROP_CASCADE_TX));
assertFalse("Property cascadeCRC is not excluded", propertyFilter.isExcluded(ContentModel.PROP_CASCADE_CRC));
assertFalse(propertyFilter.isExcluded(ContentModel.PROP_TITLE));
}

View File

@@ -28,14 +28,43 @@ package org.alfresco.repo.event2;
import static org.alfresco.repo.event2.NodeResourceHelper.getLocalizedPropertiesBefore;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.BDDMockito.given;
import static org.mockito.BDDMockito.then;
import static org.mockito.Mockito.mock;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class NodeResourceHelperUnitTest
{
@Mock
private NodeService nodeServiceMock;
@InjectMocks
private NodeResourceHelper nodeResourceHelper;
@Before
public void setUp() throws Exception
{
MockitoAnnotations.openMocks(this);
}
@Test
public void shouldExtractOnlyRelevantPropertiesForBeforeNode()
{
@@ -111,4 +140,39 @@ public class NodeResourceHelperUnitTest
return this;
}
}
@Test
public void testGetSecondaryParents()
{
NodeRef nodeRefMock = mock(NodeRef.class);
NodeRef parentRefMock = mock(NodeRef.class);
ChildAssociationRef secondaryParentMock = mock(ChildAssociationRef.class);
given(nodeServiceMock.getParentAssocs(any(NodeRef.class))).willReturn(List.of(secondaryParentMock));
given(secondaryParentMock.isPrimary()).willReturn(false);
given(secondaryParentMock.getParentRef()).willReturn(parentRefMock);
// when
List<String> secondaryParents = nodeResourceHelper.getSecondaryParents(nodeRefMock);
then(nodeServiceMock).should().getParentAssocs(nodeRefMock);
then(nodeServiceMock).shouldHaveNoMoreInteractions();
then(secondaryParentMock).should().isPrimary();
then(secondaryParentMock).should().getParentRef();
then(secondaryParentMock).shouldHaveNoMoreInteractions();
then(parentRefMock).should().getId();
then(parentRefMock).shouldHaveNoMoreInteractions();
assertNotNull(secondaryParents);
}
@Test
public void testGetNoneSecondaryParents()
{
NodeRef nodeRefMock = mock(NodeRef.class);
// when
List<String> secondaryParents = nodeResourceHelper.getSecondaryParents(nodeRefMock);
assertNotNull(secondaryParents);
assertTrue(secondaryParents.isEmpty());
}
}

View File

@@ -87,7 +87,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
});
checkNumOfEvents(2);
resultRepoEvent = getRepoEvent(2);
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(),
resultRepoEvent.getType());
@@ -227,7 +227,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
});
checkNumOfEvents(2);
resultRepoEvent = getRepoEvent(2);
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(), resultRepoEvent.getType());
@@ -625,7 +625,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
// Create active model
CustomModelDefinition modelDefinition =
retryingTransactionHelper.doInTransaction(() -> customModelService.createCustomModel(model, true));
retryingTransactionHelper.doInTransaction(() -> customModelService.createCustomModel(model, true));
assertNotNull(modelDefinition);
assertEquals(modelName, modelDefinition.getName().getLocalName());
@@ -635,8 +635,11 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
Collection<TypeDefinition> types = modelDefinition.getTypeDefinitions();
assertEquals(1, types.size());
// we should have only 2 events, node.Created and node.Updated
checkNumOfEvents(2);
// node.Created event should be generated for the model
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEvent(1);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getFilteredEvent(EventType.NODE_CREATED, 0);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
NodeResource nodeResource = getNodeResource(resultRepoEvent);
assertEquals("Incorrect node type was found", "cm:dictionaryModel", nodeResource.getNodeType());
@@ -647,9 +650,9 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals(ContentModel.TYPE_CONTENT, nodeService.getType(nodeRef));
// node.Created event should be generated
resultRepoEvent = getRepoEvent(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEvent(3);
nodeResource = getNodeResource(resultRepoEvent);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
assertEquals("cm:content node type was not found", "cm:content", nodeResource.getNodeType());
QName typeQName = QName.createQName("{" + namespacePair.getFirst()+ "}" + typeName);
@@ -661,15 +664,15 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
return null;
});
// we should have 3 events, node.Created for the model, node.Created for the node and node.Updated
checkNumOfEvents(3);
// we should have 4 events, node.Created for the model, node.Updated for the parent, node.Created for the node and node.Updated
checkNumOfEvents(4);
resultRepoEvent = getRepoEvent(3);
resultRepoEvent = getRepoEvent(4);
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(), resultRepoEvent.getType());
nodeResource = getNodeResource(resultRepoEvent);
assertEquals("Incorrect node type was found", namespacePair.getSecond() + QName.NAMESPACE_PREFIX + typeName, nodeResource.getNodeType());
NodeResource resourceBefore = getNodeResourceBefore(3);
NodeResource resourceBefore = getNodeResourceBefore(4);
assertEquals("Incorrect node type was found", "cm:content", resourceBefore.getNodeType());
assertNull(resourceBefore.getId());
assertNull(resourceBefore.getContent());
@@ -788,7 +791,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
});
checkNumOfEvents(4);
NodeResource resourceBefore = getNodeResourceBefore(4);
NodeResource resource = getNodeResource(4);
@@ -808,7 +811,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
assertNull(resourceBefore.getModifiedByUser());
assertNull(resourceBefore.getCreatedAt());
assertNull(resourceBefore.getCreatedByUser());
assertNull(resourceBefore.getProperties());
assertNotNull(resourceBefore.getProperties());
assertNull(resourceBefore.getAspectNames());
assertNotNull(resourceBefore.getPrimaryHierarchy());
assertNull("Content should have been null.", resource.getContent());
@@ -818,7 +821,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
assertNotNull(resource.getModifiedByUser());
assertNotNull(resource.getAspectNames());
assertNull(resource.getContent());
assertTrue(resource.getProperties().isEmpty());
assertFalse(resource.getProperties().isEmpty());
}
@Test
@@ -1020,7 +1023,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
NodeResource resource = getNodeResource(1);
final Set<String> originalAspects = resource.getAspectNames();
assertNotNull(originalAspects);
retryingTransactionHelper.doInTransaction(() -> {
// Add cm:geographic aspect with default value
nodeService.addAspect(nodeRef, ContentModel.ASPECT_GEOGRAPHIC, null);

View File

@@ -31,15 +31,20 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Map;
import java.util.UUID;
import org.alfresco.repo.security.authentication.identityservice.IdentityServiceFacadeFactoryBean.JwtDecoderProvider;
import org.alfresco.repo.security.authentication.identityservice.IdentityServiceFacadeFactoryBean.JwtIssuerValidator;
import org.junit.Test;
import org.springframework.security.oauth2.client.registration.ClientRegistration.ProviderDetails;
import org.springframework.security.oauth2.core.OAuth2Error;
import org.springframework.security.oauth2.core.OAuth2TokenValidatorResult;
import org.springframework.security.oauth2.jwt.Jwt;
import org.springframework.security.oauth2.jwt.JwtDecoder;
public class IdentityServiceFacadeFactoryBeanTest
{
private static final String EXPECTED_ISSUER = "expected-issuer";
@Test
public void shouldCreateJwtDecoderWithoutIDSWhenPublicKeyIsProvided()
{
@@ -62,4 +67,53 @@ public class IdentityServiceFacadeFactoryBeanTest
.containsEntry(USERNAME_CLAIM, "piotrek");
}
@Test
public void shouldFailWithNotMatchingIssuerURIs()
{
final JwtIssuerValidator issuerValidator = new JwtIssuerValidator(EXPECTED_ISSUER);
final OAuth2TokenValidatorResult validationResult = issuerValidator.validate(tokenWithIssuer("different-issuer"));
assertThat(validationResult).isNotNull();
assertThat(validationResult.hasErrors()).isTrue();
assertThat(validationResult.getErrors()).hasSize(1);
final OAuth2Error error = validationResult.getErrors().iterator().next();
assertThat(error).isNotNull();
assertThat(error.getDescription()).contains(EXPECTED_ISSUER, "different-issuer");
}
@Test
public void shouldFailWithNullIssuerURI()
{
final JwtIssuerValidator issuerValidator = new JwtIssuerValidator(EXPECTED_ISSUER);
final OAuth2TokenValidatorResult validationResult = issuerValidator.validate(tokenWithIssuer(null));
assertThat(validationResult).isNotNull();
assertThat(validationResult.hasErrors()).isTrue();
assertThat(validationResult.getErrors()).hasSize(1);
final OAuth2Error error = validationResult.getErrors().iterator().next();
assertThat(error).isNotNull();
assertThat(error.getDescription()).contains(EXPECTED_ISSUER, "null");
}
@Test
public void shouldSucceedWithMatchingIssuerURI()
{
final JwtIssuerValidator issuerValidator = new JwtIssuerValidator(EXPECTED_ISSUER);
final OAuth2TokenValidatorResult validationResult = issuerValidator.validate(tokenWithIssuer(EXPECTED_ISSUER));
assertThat(validationResult).isNotNull();
assertThat(validationResult.hasErrors()).isFalse();
assertThat(validationResult.getErrors()).isEmpty();
}
private Jwt tokenWithIssuer(String issuer)
{
return Jwt.withTokenValue(UUID.randomUUID().toString())
.issuer(issuer)
.header("JUST", "FOR TESTING")
.build();
}
}

View File

@@ -9,7 +9,7 @@ services:
ports:
- "8090:8090"
postgres:
image: postgres:14.4
image: postgres:15.4
profiles: ["default", "with-transform-core-aio", "postgres", "with-mtls-transform-core-aio"]
environment:
- POSTGRES_PASSWORD=alfresco
@@ -56,4 +56,4 @@ services:
CLIENT_SSL_TRUST_STORE: "file:/tengineAIO.truststore"
CLIENT_SSL_TRUST_STORE_PASSWORD: "password"
CLIENT_SSL_TRUST_STORE_TYPE: "JCEKS"
CLIENT_SSL_TRUST_STORE_TYPE: "JCEKS"