Compare commits

...

178 Commits

Author SHA1 Message Date
Giovanni Toraldo
a99e5cdde9 split failing job in two separate jobs 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
7bc6dd142b test core without build-cache but latest maven 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
6cbd2d7d50 [skip ci] remove push branch 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
1786cbec99 add the new input [db] 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
e16a53a23e rebuild 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
21fb85f2a9 drop debug 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
7dfd41997a ssh debug 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
6374aa30d6 Revert "build with maven daemon"
This reverts commit 7f70aa3d61f72a5538346329736805e4eabc4782.
2023-10-31 12:20:15 +01:00
Giovanni Toraldo
ed8bdc87da build with maven daemon 2023-10-31 12:20:15 +01:00
Giovanni Toraldo
5b8d758947 build 2023-10-31 12:20:14 +01:00
Giovanni Toraldo
ed3f170d05 use custom setup-java-build 2023-10-31 12:20:14 +01:00
alfresco-build
8d51e9885a [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-30 18:33:51 +00:00
alfresco-build
a61faaece5 [maven-release-plugin][skip ci] prepare release 23.1.0.255 2023-10-30 18:33:47 +00:00
Maciej Pichura
30de66257f ACS-6143: Bump api-explorer version [skip tests] (#2279)
`UpdateRecordsTests` failure is unrelated.
2023-10-30 19:29:24 +01:00
Maciej Pichura
ae1f955cc2 ACS-6234: Temporarily disabling flaky test. (#2280)
UpdateRecordsTests failure is intermittent and unrelated.
2023-10-30 18:31:13 +01:00
rrajoria
452db9a963 Update googledrive and AOS GA-Version 2023-10-30 19:16:26 +05:30
mstrankowski
4eeb7feb74 ACS-6094: Update Transform Core and Transform Service versions 2023-10-30 14:11:51 +01:00
alfresco-build
5aa8c37c53 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-29 00:06:33 +00:00
alfresco-build
aafdd3c46a [maven-release-plugin][skip ci] prepare release 23.1.0.254 2023-10-29 00:06:30 +00:00
Alfresco CI User
f73cf70cbb [force] Force release for 2023-10-29. 2023-10-29 00:03:13 +00:00
alfresco-build
dd0e0626bd [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-23 10:05:09 +00:00
alfresco-build
e4a5c1a38e [maven-release-plugin][skip ci] prepare release 23.1.0.253 2023-10-23 10:05:07 +00:00
rrajoria
c698ed1d6d Update google drive and AOS Version 2023-10-23 14:20:00 +05:30
alfresco-build
2632486e0f [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-22 00:07:16 +00:00
alfresco-build
ea24992b57 [maven-release-plugin][skip ci] prepare release 23.1.0.252 2023-10-22 00:07:14 +00:00
Alfresco CI User
e6b35b7f66 [force] Force release for 2023-10-22. 2023-10-22 00:03:20 +00:00
alfresco-build
0aa5fd7f59 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-21 09:26:41 +00:00
alfresco-build
55862fc394 [maven-release-plugin][skip ci] prepare release 23.1.0.251 2023-10-21 09:26:39 +00:00
mstrankowski
f942c7b9df Update Transform Core to 5.0.0-A5, Transform Service to 4.0.0-A12 2023-10-21 10:41:39 +02:00
alfresco-build
585111602f [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-20 20:16:20 +00:00
alfresco-build
473942f3ba [maven-release-plugin][skip ci] prepare release 23.1.0.250 2023-10-20 20:16:17 +00:00
Maciej Pichura
99905d349b ACS-6142: update api-explorer to 23.1.0-A2 (#2262) 2023-10-20 21:29:33 +02:00
alfresco-build
4f1efa183c [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-20 19:23:38 +00:00
alfresco-build
141c5f3b34 [maven-release-plugin][skip ci] prepare release 23.1.0.249 2023-10-20 19:23:35 +00:00
Krystian Dabrowski
7c863be25e ACS-6075: Highlight snippet control parameters (#2261)
- added snippetCount, fragmentSize and mergeContiguous to highlighting field model
2023-10-20 15:52:46 +02:00
alfresco-build
ad6354bd32 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-18 13:27:37 +00:00
alfresco-build
30e191a8cd [maven-release-plugin][skip ci] prepare release 23.1.0.248 2023-10-18 13:27:34 +00:00
dependabot[bot]
b2bcfd72c1 Bump org.json:json from 20230618 to 20231013 (#2252)
Bumps [org.json:json](https://github.com/douglascrockford/JSON-java) from 20230618 to 20231013.
- [Release notes](https://github.com/douglascrockford/JSON-java/releases)
- [Changelog](https://github.com/stleary/JSON-java/blob/master/docs/RELEASES.md)
- [Commits](https://github.com/douglascrockford/JSON-java/commits)

---
updated-dependencies:
- dependency-name: org.json:json
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-10-18 14:35:09 +02:00
alfresco-build
698ca01778 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-16 12:23:19 +00:00
alfresco-build
e16a0820ee [maven-release-plugin][skip ci] prepare release 23.1.0.247 2023-10-16 12:23:16 +00:00
Manish Kumar
1f99216d37 [MNT-23933] Fixed Null Pointer Exception (#2253) 2023-10-16 16:44:43 +05:30
alfresco-build
3c60415ea0 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-15 00:08:05 +00:00
alfresco-build
e749ac6478 [maven-release-plugin][skip ci] prepare release 23.1.0.246 2023-10-15 00:08:02 +00:00
Alfresco CI User
6cdcf7928a [force] Force release for 2023-10-15. 2023-10-15 00:03:18 +00:00
alfresco-build
542230764d [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-13 13:32:06 +00:00
alfresco-build
878cd3ceee [maven-release-plugin][skip ci] prepare release 23.1.0.245 2023-10-13 13:32:03 +00:00
Piotr Żurek
582fc8ec2d ACS-6121 MNT-24007 Use issuer URI from the IdP (#2250) 2023-10-13 14:48:35 +02:00
alfresco-build
53c99a0ba4 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-13 12:14:08 +00:00
alfresco-build
07cd283a1e [maven-release-plugin][skip ci] prepare release 23.1.0.244 2023-10-13 12:14:06 +00:00
Domenico Sibilio
f7a4da0ba5 Free up GHA runner disk space [db] (#2249) 2023-10-13 13:28:07 +02:00
alfresco-build
c344f7ab1a [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-13 10:19:34 +00:00
alfresco-build
afe100097e [maven-release-plugin][skip ci] prepare release 23.1.0.243 2023-10-13 10:19:31 +00:00
Manish Kumar
2cc0137be3 [MNT-23896] run handleClientAuth only when this.enforce variable is true (#2248) 2023-10-13 14:25:49 +05:30
alfresco-build
3e91bf6739 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-12 13:23:31 +00:00
alfresco-build
4b77b77013 [maven-release-plugin][skip ci] prepare release 23.1.0.242 2023-10-12 13:23:28 +00:00
Kacper Magdziarz
7a84e4d5f1 [ACS-6116] Leftovers removal (#2246) 2023-10-12 14:37:56 +02:00
alfresco-build
eedb601320 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-12 11:23:11 +00:00
alfresco-build
536ac35aab [maven-release-plugin][skip ci] prepare release 23.1.0.241 2023-10-12 11:23:07 +00:00
Krystian Dabrowski
a61d5a407e ACS-6070: Support for highlighting prefix and postfix (#2245)
* ACS-6070: Support for highlighting prefix and postfix
- added prefix and postfix to field model
2023-10-12 12:01:28 +02:00
alfresco-build
e8c9c9aef5 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-10 11:23:18 +00:00
alfresco-build
6b832aecd1 [maven-release-plugin][skip ci] prepare release 23.1.0.240 2023-10-10 11:23:16 +00:00
Marcin Strankowski
eebacd0a5f MNT-23891: Change configuration for UpgradePasswordHashJob to one recommended, it has been tested locally and indeed runs proper code. A wrong class was called for the good parameters given, probably a copy/paste typo. (#2234) 2023-10-10 12:35:55 +02:00
alfresco-build
6eff1e1219 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-09 11:48:26 +00:00
alfresco-build
e52aaa6b8d [maven-release-plugin][skip ci] prepare release 23.1.0.239 2023-10-09 11:48:23 +00:00
Piotr Żurek
fb78a5fe41 ACS-6103 Upgrade PostgreSQL version (#2233) 2023-10-09 13:01:35 +02:00
alfresco-build
7b4c420f3e [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-09 09:52:01 +00:00
alfresco-build
507a617c51 [maven-release-plugin][skip ci] prepare release 23.1.0.238 2023-10-09 09:51:57 +00:00
Manish Kumar
637cdd4f3b [ACS-4778] Added usr:user nodeType for exclusion in property file (#2232) 2023-10-09 14:39:18 +05:30
alfresco-build
8959db9017 [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-08 00:07:00 +00:00
alfresco-build
9032e1cd69 [maven-release-plugin][skip ci] prepare release 23.1.0.237 2023-10-08 00:06:57 +00:00
Alfresco CI User
32f33c04b2 [force] Force release for 2023-10-08. 2023-10-08 00:03:18 +00:00
alfresco-build
ce3b4f5f0c [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-06 14:21:47 +00:00
alfresco-build
9fd4efcec7 [maven-release-plugin][skip ci] prepare release 23.1.0.236 2023-10-06 14:21:45 +00:00
Krystian Dabrowski
395d7ded57 ACS-5471: Secondary path support (#2213)
* ACS-5471: Secondary path support

* ACS-5471: Secondary path support

* ACS-5471: Secondary path support

* ACS-5471: Secondary path support

* ACS-5471: Secondary path support

* ACS-5471: Secondary path support

* ACS-5471: Secondary path support

* ACS-5471: Secondary path support
- fixed test method name

* ACS-5471: Secondary path support
- trying to fix failing on CI DispositionScheduleLinkedRecordsTest

* ACS-5471: Secondary path support
- trying to fix failing on CI DispositionScheduleLinkedRecordsTest

* ACS-5471: Secondary path support
- trying to fix failing CI due to DispositionScheduleLinkedRecordsTest

* ACS-5471: Test adjustment to follow same behavior as introduced by ACS-5325

* ACS-5471: Fixing docker issues

---------

Co-authored-by: mpichura <maciej.pichura@hyland.com>
2023-10-06 15:36:46 +02:00
alfresco-build
c157780dcb [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-04 09:43:37 +00:00
alfresco-build
63be57cafe [maven-release-plugin][skip ci] prepare release 23.1.0.235 2023-10-04 09:43:33 +00:00
Wojtek Świętoń
64dad4fc89 ACS-5830 Bump alfresco-transform-service to latest 4.0.0-A3 2023-10-04 10:56:31 +02:00
alfresco-build
2f7db5f0ee [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-03 12:40:44 +00:00
alfresco-build
61ff6dafe8 [maven-release-plugin][skip ci] prepare release 23.1.0.234 2023-10-03 12:40:40 +00:00
Damian Ujma
456adc2aa2 ACS-5325 Invoke required policy (#2215)
---------

Co-authored-by: Domenico Sibilio <domenicosibilio@gmail.com>
2023-10-03 13:28:15 +02:00
HylandAditya
daf573e24a Merge pull request #2221 from Alfresco/dependabot/maven/commons-io-commons-io-2.14.0
Bump commons-io:commons-io from 2.13.0 to 2.14.0
2023-10-03 14:17:25 +05:30
alfresco-build
d46fbdcf4c [maven-release-plugin][skip ci] prepare for next development iteration 2023-10-01 00:07:27 +00:00
alfresco-build
70f3982b56 [maven-release-plugin][skip ci] prepare release 23.1.0.233 2023-10-01 00:07:24 +00:00
Alfresco CI User
196817cd77 [force] Force release for 2023-10-01. 2023-10-01 00:03:21 +00:00
dependabot[bot]
58b0075a68 Bump commons-io:commons-io from 2.13.0 to 2.14.0
Bumps commons-io:commons-io from 2.13.0 to 2.14.0.

---
updated-dependencies:
- dependency-name: commons-io:commons-io
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-09-29 22:02:29 +00:00
alfresco-build
d616226918 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-29 14:21:51 +00:00
alfresco-build
c3dadf6bbf [maven-release-plugin][skip ci] prepare release 23.1.0.232 2023-09-29 14:21:48 +00:00
Krystian Dabrowski
a973e17a86 ACS-6073: DispositionScheduleLinkedRecordsTest - tests fail over and over again (#2220)
* ACS-6073: DispositionScheduleLinkedRecordsTest - tests suddenly started to fail over and over again
- disabling failing tests
2023-09-29 15:33:19 +02:00
Wojtek Świętoń
86d22ccd8e ACS-5830 Bump alfresco-transform-core.version to 5.0.0-A3 2023-09-29 12:59:39 +02:00
Tom Page
e0a1defb80 Update ya-pmd-scan to 2.0.5. 2023-09-28 10:32:34 +01:00
alfresco-build
eebd110c34 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-27 13:08:20 +00:00
alfresco-build
9d60e36682 [maven-release-plugin][skip ci] prepare release 23.1.0.231 2023-09-27 13:08:17 +00:00
Tom Page
248fecd030 ACS-4953 Add builder to TAS highlight POJO. (#2214)
* ACS-4953 Add builder to TAS highlight POJO.

Also rename ResponseHighLightModel to ResponseHighlightModel.
2023-09-27 12:13:11 +01:00
alfresco-build
d3498f4bc4 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-27 04:48:50 +00:00
alfresco-build
a862b8f829 [maven-release-plugin][skip ci] prepare release 23.1.0.230 2023-09-27 04:48:48 +00:00
Piotr Żurek
73a3f9bb19 ACS-6015 Add -parameters compiler flag (#2212) 2023-09-27 06:03:42 +02:00
alfresco-build
71063661ea [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-26 11:10:23 +00:00
alfresco-build
449c58adad [maven-release-plugin][skip ci] prepare release 23.1.0.229 2023-09-26 11:10:21 +00:00
dependabot[bot]
94066cc682 Bump org.apache.httpcomponents.core5:httpcore5 from 5.2.2 to 5.2.3 (#2201)
Bumps [org.apache.httpcomponents.core5:httpcore5](https://github.com/apache/httpcomponents-core) from 5.2.2 to 5.2.3.
- [Changelog](https://github.com/apache/httpcomponents-core/blob/master/RELEASE_NOTES.txt)
- [Commits](https://github.com/apache/httpcomponents-core/compare/rel/v5.2.2...rel/v5.2.3)

---
updated-dependencies:
- dependency-name: org.apache.httpcomponents.core5:httpcore5
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-09-26 12:02:14 +02:00
alfresco-build
52991462a7 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-26 08:50:17 +00:00
alfresco-build
e530ffb9b6 [maven-release-plugin][skip ci] prepare release 23.1.0.228 2023-09-26 08:50:14 +00:00
Kacper Magdziarz
5e0c592fe9 [ACS-6011] Upgrade to Jakarta compatible ATS (#2210) 2023-09-26 09:25:16 +02:00
dependabot[bot]
fb3552945e Bump org.slf4j:slf4j-api from 2.0.7 to 2.0.9 (#2175)
Bumps org.slf4j:slf4j-api from 2.0.7 to 2.0.9.

---
updated-dependencies:
- dependency-name: org.slf4j:slf4j-api
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-09-26 02:53:31 +05:30
alfresco-build
8a43c5741d [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-25 16:16:05 +00:00
alfresco-build
6af36ae79e [maven-release-plugin][skip ci] prepare release 23.1.0.227 2023-09-25 16:16:02 +00:00
Piotr Żurek
d1585e4578 ACS-6057 Gytheio removal (#2206) 2023-09-25 17:31:36 +02:00
alfresco-build
dee199da5e [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-25 15:06:39 +00:00
alfresco-build
cd0db19ef8 [maven-release-plugin][skip ci] prepare release 23.1.0.226 2023-09-25 15:06:35 +00:00
dependabot[bot]
ea835f9185 Bump org.alfresco:alfresco-transform-model from 4.1.0-M1 to 5.0.0-A2 (#2205)
Bumps [org.alfresco:alfresco-transform-model](https://github.com/Alfresco/alfresco-transform-core) from 4.1.0-M1 to 5.0.0-A2.
- [Release notes](https://github.com/Alfresco/alfresco-transform-core/releases)
- [Changelog](https://github.com/Alfresco/alfresco-transform-core/blob/master/docs/build-and-release.md)
- [Commits](https://github.com/Alfresco/alfresco-transform-core/compare/4.1.0-M1...5.0.0-A2)

---
updated-dependencies:
- dependency-name: org.alfresco:alfresco-transform-model
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-09-25 15:54:46 +05:30
alfresco-build
b3f49ebe54 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-24 00:06:41 +00:00
alfresco-build
a956469a0b [maven-release-plugin][skip ci] prepare release 23.1.0.225 2023-09-24 00:06:38 +00:00
Alfresco CI User
a6f57ef9e8 [force] Force release for 2023-09-24. 2023-09-24 00:03:22 +00:00
Paweł Rainer
67b5c9bc70 Bump pmd scan job version (#2204) 2023-09-22 11:55:47 +01:00
alfresco-build
cd72aba32b [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-22 10:32:18 +00:00
alfresco-build
7f43175288 [maven-release-plugin][skip ci] prepare release 23.1.0.224 2023-09-22 10:32:15 +00:00
dependabot[bot]
60437b1cc3 Bump org.projectlombok:lombok from 1.18.28 to 1.18.30 (#2200)
Bumps [org.projectlombok:lombok](https://github.com/projectlombok/lombok) from 1.18.28 to 1.18.30.
- [Release notes](https://github.com/projectlombok/lombok/releases)
- [Changelog](https://github.com/projectlombok/lombok/blob/master/doc/changelog.markdown)
- [Commits](https://github.com/projectlombok/lombok/compare/v1.18.28...v1.18.30)

---
updated-dependencies:
- dependency-name: org.projectlombok:lombok
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-09-21 11:19:33 +05:30
alfresco-build
5eae584ea5 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-19 21:08:53 +00:00
alfresco-build
234222ba4b [maven-release-plugin][skip ci] prepare release 23.1.0.223 2023-09-19 21:08:49 +00:00
dependabot[bot]
d6c4934910 Bump dependency.rest-assured.version from 5.3.1 to 5.3.2 (#2189)
Bumps `dependency.rest-assured.version` from 5.3.1 to 5.3.2.

Updates `io.rest-assured:rest-assured` from 5.3.1 to 5.3.2
- [Changelog](https://github.com/rest-assured/rest-assured/blob/master/changelog.txt)
- [Commits](https://github.com/rest-assured/rest-assured/commits)

Updates `io.rest-assured:json-path` from 5.3.1 to 5.3.2
- [Changelog](https://github.com/rest-assured/rest-assured/blob/master/changelog.txt)
- [Commits](https://github.com/rest-assured/rest-assured/commits)

Updates `io.rest-assured:xml-path` from 5.3.1 to 5.3.2
- [Changelog](https://github.com/rest-assured/rest-assured/blob/master/changelog.txt)
- [Commits](https://github.com/rest-assured/rest-assured/commits)

Updates `io.rest-assured:json-schema-validator` from 5.3.1 to 5.3.2

---
updated-dependencies:
- dependency-name: io.rest-assured:rest-assured
  dependency-type: direct:development
  update-type: version-update:semver-patch
- dependency-name: io.rest-assured:json-path
  dependency-type: direct:production
  update-type: version-update:semver-patch
- dependency-name: io.rest-assured:xml-path
  dependency-type: direct:production
  update-type: version-update:semver-patch
- dependency-name: io.rest-assured:json-schema-validator
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-09-19 20:29:43 +05:30
atkumar14
5fc889a101 Merge pull request #2162 from Alfresco/dependabot/maven/org.yaml-snakeyaml-2.2
Bump org.yaml:snakeyaml from 2.1 to 2.2
2023-09-19 16:57:39 +05:30
alfresco-build
ce8f1bda77 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-19 09:29:55 +00:00
alfresco-build
37606c1aa5 [maven-release-plugin][skip ci] prepare release 23.1.0.222 2023-09-19 09:29:52 +00:00
dependabot[bot]
48d7f4223e Bump org.apache.commons:commons-compress from 1.23.0 to 1.24.0 (#2191)
Bumps org.apache.commons:commons-compress from 1.23.0 to 1.24.0.

---
updated-dependencies:
- dependency-name: org.apache.commons:commons-compress
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-09-19 14:11:49 +05:30
Tom Page
e070354764 PRODENG-222 Fail builds based on PMD scan. (#2196) 2023-09-18 18:14:17 +01:00
alfresco-build
0c4ce183be [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-17 00:06:31 +00:00
alfresco-build
c6881ac128 [maven-release-plugin][skip ci] prepare release 23.1.0.221 2023-09-17 00:06:29 +00:00
Alfresco CI User
4c4c561f94 [force] Force release for 2023-09-17. 2023-09-17 00:03:17 +00:00
alfresco-build
7c993ba695 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-15 14:31:23 +00:00
alfresco-build
8f929d9744 [maven-release-plugin][skip ci] prepare release 23.1.0.220 2023-09-15 14:31:20 +00:00
Domenico Sibilio
c28be29c0e ACS-5944 Bump ATS to the latest -Mx (#2194) 2023-09-15 15:45:46 +02:00
alfresco-build
285080566a [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-15 12:22:24 +00:00
alfresco-build
99f8ba31c4 [maven-release-plugin][skip ci] prepare release 23.1.0.219 2023-09-15 12:22:21 +00:00
MohinishSah
ce9bf0cd71 Updating Google docs and AOS Milestone version 2023-09-15 17:05:03 +05:30
alfresco-build
e436be7f10 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-11 08:39:40 +00:00
alfresco-build
06a4e28b40 [maven-release-plugin][skip ci] prepare release 23.1.0.218 2023-09-11 08:39:36 +00:00
MohinishSah
5b9311a18a Updating latest AOS and Google drive version 2023-09-11 13:00:02 +05:30
alfresco-build
ccd2f5e996 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-10 00:06:26 +00:00
alfresco-build
94ac09bf44 [maven-release-plugin][skip ci] prepare release 23.1.0.217 2023-09-10 00:06:24 +00:00
Alfresco CI User
9331c33e87 [force] Force release for 2023-09-10. 2023-09-10 00:03:13 +00:00
alfresco-build
ba250f6872 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-08 04:27:40 +00:00
alfresco-build
5343b615e4 [maven-release-plugin][skip ci] prepare release 23.1.0.216 2023-09-08 04:27:37 +00:00
dependabot[bot]
80ffd2826c Bump org.messaginghub:pooled-jms from 3.1.1 to 3.1.2 (#2183)
Bumps [org.messaginghub:pooled-jms](https://github.com/messaginghub/pooled-jms) from 3.1.1 to 3.1.2.
- [Commits](https://github.com/messaginghub/pooled-jms/compare/3.1.1...3.1.2)

---
updated-dependencies:
- dependency-name: org.messaginghub:pooled-jms
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-09-07 23:33:59 +02:00
alfresco-build
41ce8a869e [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-06 17:03:00 +00:00
alfresco-build
9ece324e92 [maven-release-plugin][skip ci] prepare release 23.1.0.215 2023-09-06 17:02:57 +00:00
evasques
ddd0fac97e MNT-23953 - PropTablesCleaner v3 Skip IDs (#2181)
* Configure v3 to skip ids per table. Added unit test
2023-09-06 17:13:36 +01:00
Domenico Sibilio
c43dc66e2e ACS-5943 Bump ATS to 4.1.0-A1 / 3.1.0-A1 (#2182) 2023-09-06 16:56:58 +02:00
alfresco-build
a09c7ef4ca [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-06 11:57:10 +00:00
alfresco-build
e89f0b4e82 [maven-release-plugin][skip ci] prepare release 23.1.0.214 2023-09-06 11:57:07 +00:00
Kacper Magdziarz
e7f9b439ae [ACS-5936] Remove taglibs-standard jars (#2180) 2023-09-06 13:10:24 +02:00
alfresco-build
4a0f5eab77 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-06 07:22:38 +00:00
alfresco-build
779c16677b [maven-release-plugin][skip ci] prepare release 23.1.0.213 2023-09-06 07:22:36 +00:00
kmagdziarz
ddf14c7e31 [ACS-5935] Revert change. 2023-09-06 08:39:25 +02:00
Kacper Magdziarz
71df1b01e2 [ACS-5935] Remove rpc-api (#2178) 2023-09-06 08:36:28 +02:00
alfresco-build
fc83e0d131 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-06 06:22:33 +00:00
alfresco-build
bce387bbb7 [maven-release-plugin][skip ci] prepare release 23.1.0.212 2023-09-06 06:22:31 +00:00
dependabot[bot]
9fb46ed66c Bump io.fabric8:docker-maven-plugin from 0.43.0 to 0.43.4 (#2148)
Bumps [io.fabric8:docker-maven-plugin](https://github.com/fabric8io/docker-maven-plugin) from 0.43.0 to 0.43.4.
- [Release notes](https://github.com/fabric8io/docker-maven-plugin/releases)
- [Changelog](https://github.com/fabric8io/docker-maven-plugin/blob/master/doc/changelog.md)
- [Commits](https://github.com/fabric8io/docker-maven-plugin/compare/v0.43.0...v0.43.4)

---
updated-dependencies:
- dependency-name: io.fabric8:docker-maven-plugin
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-09-06 07:35:48 +02:00
alfresco-build
203f2933bc [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-05 18:40:10 +00:00
alfresco-build
5849da3cc1 [maven-release-plugin][skip ci] prepare release 23.1.0.211 2023-09-05 18:40:07 +00:00
dependabot[bot]
c3bcc5f242 Bump org.alfresco.tas:ftp from 1.18 to 1.19 (#2171)
Bumps [org.alfresco.tas:ftp](https://github.com/Alfresco/alfresco-tas-ftp) from 1.18 to 1.19.
- [Changelog](https://github.com/Alfresco/alfresco-tas-ftp/blob/master/docs/CHANGELOG.md)
- [Commits](https://github.com/Alfresco/alfresco-tas-ftp/compare/v1.18...v1.19)

---
updated-dependencies:
- dependency-name: org.alfresco.tas:ftp
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-09-05 19:58:48 +02:00
alfresco-build
7078ba7d56 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-05 13:02:43 +00:00
alfresco-build
65dd5be19b [maven-release-plugin][skip ci] prepare release 23.1.0.210 2023-09-05 13:02:40 +00:00
dependabot[bot]
08e67aa004 Bump org.aspectj:aspectjrt from 1.9.19 to 1.9.20.1 (#2176)
* Bump org.aspectj:aspectjrt from 1.9.19 to 1.9.20.1

Bumps [org.aspectj:aspectjrt](https://github.com/eclipse/org.aspectj) from 1.9.19 to 1.9.20.1.
- [Release notes](https://github.com/eclipse/org.aspectj/releases)
- [Commits](https://github.com/eclipse/org.aspectj/commits)

---
updated-dependencies:
- dependency-name: org.aspectj:aspectjrt
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

* Get rid of the aspectj warnings

* Switch to the maintained aspectj-maven-plugin

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: pzurek <Piotr.Zurek@hyland.com>
2023-09-05 14:01:06 +02:00
dependabot[bot]
63db311d11 Bump dependency.webscripts.version from 8.46 to 9.0 (#2173)
Bumps `dependency.webscripts.version` from 8.46 to 9.0.

Updates `org.alfresco.surf:spring-surf-core-configservice` from 8.46 to 9.0

Updates `org.alfresco.surf:spring-webscripts` from 8.46 to 9.0
- [Commits](https://github.com/Alfresco/surf-webscripts/compare/spring-surf-webscripts-parent-8.46...spring-surf-webscripts-parent-9.0)

Updates `org.alfresco.surf:spring-webscripts:tests` from 8.46 to 9.0
- [Commits](https://github.com/Alfresco/surf-webscripts/compare/spring-surf-webscripts-parent-8.46...spring-surf-webscripts-parent-9.0)

Updates `org.alfresco.surf:spring-webscripts-api` from 8.46 to 9.0

---
updated-dependencies:
- dependency-name: org.alfresco.surf:spring-surf-core-configservice
  dependency-type: direct:production
  update-type: version-update:semver-major
- dependency-name: org.alfresco.surf:spring-webscripts
  dependency-type: direct:production
  update-type: version-update:semver-major
- dependency-name: org.alfresco.surf:spring-webscripts:tests
  dependency-type: direct:production
  update-type: version-update:semver-major
- dependency-name: org.alfresco.surf:spring-webscripts-api
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-09-05 08:44:17 +02:00
dependabot[bot]
ed674cfa2e Bump org.alfresco.tas:webdav from 1.20 to 1.21 (#2172)
Bumps [org.alfresco.tas:webdav](https://github.com/Alfresco/alfresco-tas-webdav) from 1.20 to 1.21.
- [Changelog](https://github.com/Alfresco/alfresco-tas-webdav/blob/master/docs/CHANGELOG.md)
- [Commits](https://github.com/Alfresco/alfresco-tas-webdav/compare/v1.20...v1.21)

---
updated-dependencies:
- dependency-name: org.alfresco.tas:webdav
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-09-05 06:08:51 +02:00
alfresco-build
21d7cf8c69 [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-03 00:07:11 +00:00
alfresco-build
077ea7f9a5 [maven-release-plugin][skip ci] prepare release 23.1.0.209 2023-09-03 00:07:09 +00:00
Alfresco CI User
6384a33ac9 [force] Force release for 2023-09-03. 2023-09-03 00:03:13 +00:00
Piyush Joshi
0800c0e42a Merge pull request #2164 from Alfresco/MNT-23107_NullPointer_ScriptLocation
[MNT-23107] The scriptLocation property is not picked up when set in a bean definition
2023-09-01 15:46:49 +05:30
alfresco-build
94b0bae03c [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-01 08:45:02 +00:00
alfresco-build
82eef1b975 [maven-release-plugin][skip ci] prepare release 23.1.0.208 2023-09-01 08:44:58 +00:00
Piotr Żurek
67fa274abb ACS-5933 Switch to SS 2.0.8.1 (#2170) 2023-09-01 09:59:08 +02:00
alfresco-build
acc50febda [maven-release-plugin][skip ci] prepare for next development iteration 2023-09-01 07:19:12 +00:00
alfresco-build
7b4ba83797 [maven-release-plugin][skip ci] prepare release 23.1.0.207 2023-09-01 07:19:10 +00:00
dependabot[bot]
874835301e Bump com.google.code.gson:gson from 2.8.9 to 2.10.1 (#2169)
Bumps [com.google.code.gson:gson](https://github.com/google/gson) from 2.8.9 to 2.10.1.
- [Release notes](https://github.com/google/gson/releases)
- [Changelog](https://github.com/google/gson/blob/main/CHANGELOG.md)
- [Commits](https://github.com/google/gson/compare/gson-parent-2.8.9...gson-parent-2.10.1)

---
updated-dependencies:
- dependency-name: com.google.code.gson:gson
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-09-01 11:50:52 +05:30
alfresco-build
9413bad6ef [maven-release-plugin][skip ci] prepare for next development iteration 2023-08-31 19:25:53 +00:00
pjoshi31
03fe795fba Added null check 2023-08-30 06:13:41 +05:30
pjoshi31
557292982a Added null check 2023-08-29 18:46:00 +05:30
dependabot[bot]
d0fabbdf1f Bump org.yaml:snakeyaml from 2.1 to 2.2
Bumps [org.yaml:snakeyaml](https://bitbucket.org/snakeyaml/snakeyaml) from 2.1 to 2.2.
- [Commits](https://bitbucket.org/snakeyaml/snakeyaml/branches/compare/snakeyaml-2.2..snakeyaml-2.1)

---
updated-dependencies:
- dependency-name: org.yaml:snakeyaml
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-08-28 22:31:04 +00:00
74 changed files with 1647 additions and 632 deletions

View File

@@ -36,8 +36,11 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Prepare maven cache and check compilation"
@@ -55,11 +58,12 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
- name: "Init"
run: bash ./scripts/ci/init.sh
- uses: Alfresco/alfresco-build-tools/.github/actions/veracode@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/veracode@v1.35.2
continue-on-error: true
with:
srcclr-api-token: ${{ secrets.SRCCLR_API_TOKEN }}
@@ -76,12 +80,10 @@ jobs:
!contains(github.event.head_commit.message, '[skip tests]') &&
!contains(github.event.head_commit.message, '[force]')
steps:
- uses: Alfresco/ya-pmd-scan@v2.0.0
with:
fail-on-new-issues: "false"
- uses: Alfresco/ya-pmd-scan@v2.0.5
all_unit_tests_suite:
name: "Core, Data-Model, Repository - AllUnitTestsSuite - Build and test"
core_datamodel_tests:
name: "Core, Data-Model - Build and test"
runs-on: ubuntu-latest
needs: [prepare]
if: >
@@ -90,14 +92,37 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run tests"
run: |
mvn -B test -pl core,data-model -am -DfailIfNoTests=false
mvn -B test -pl "repository,mmt" -am "-Dtest=AllUnitTestsSuite,AllMmtUnitTestSuite" -DfailIfNoTests=false
run: mvn -B test -pl core,data-model -am -DfailIfNoTests=false
- name: "Clean Maven cache"
run: bash ./scripts/ci/cleanup_cache.sh
all_unit_tests_suite:
name: "Repository - AllUnitTestsSuite - Build and test"
runs-on: ubuntu-latest
needs: [prepare]
if: >
!contains(github.event.head_commit.message, '[skip repo]') &&
!contains(github.event.head_commit.message, '[skip tests]') &&
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run tests"
run: mvn -B test -pl repository,mmt -am -Dtest=AllUnitTestsSuite,AllMmtUnitTestSuite -DfailIfNoTests=false
- name: "Clean Maven cache"
run: bash ./scripts/ci/cleanup_cache.sh
@@ -127,8 +152,11 @@ jobs:
REQUIRES_INSTALLED_ARTIFACTS: true
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -157,11 +185,14 @@ jobs:
strategy:
fail-fast: false
matrix:
version: ['10.2.18', '10.4', '10.5']
version: ["10.2.18", "10.4", "10.5"]
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: Run MariaDB ${{ matrix.version }} database
@@ -186,8 +217,11 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run MariaDB 10.6 database"
@@ -212,8 +246,11 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run MySQL 8 database"
@@ -225,8 +262,8 @@ jobs:
- name: "Clean Maven cache"
run: bash ./scripts/ci/cleanup_cache.sh
repository_postgresql_13_7_tests:
name: "Repository - PostgreSQL 13.7 tests"
repository_postgresql_13_12_tests:
name: "Repository - PostgreSQL 13.12 tests"
runs-on: ubuntu-latest
needs: [prepare]
if: >
@@ -237,21 +274,52 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run PostgreSQL 13.7 database"
- name: "Run PostgreSQL 13.12 database"
run: docker-compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile postgres up -d
env:
POSTGRES_VERSION: 13.7
POSTGRES_VERSION: 13.12
- name: "Run tests"
run: mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Clean Maven cache"
run: bash ./scripts/ci/cleanup_cache.sh
repository_postgresql_14_4_tests:
name: "Repository - PostgreSQL 14.4 tests"
repository_postgresql_14_9_tests:
name: "Repository - PostgreSQL 14.9 tests"
runs-on: ubuntu-latest
needs: [prepare]
if: >
(((github.ref_name == 'master' || startsWith(github.ref_name, 'release/')) && github.event_name != 'pull_request' &&
!contains(github.event.head_commit.message, '[skip db]')) ||
contains(github.event.head_commit.message, '[db]')) &&
!contains(github.event.head_commit.message, '[skip tests]') &&
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run PostgreSQL 14.9 database"
run: docker-compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile postgres up -d
env:
POSTGRES_VERSION: 14.9
- name: "Run tests"
run: mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Clean Maven cache"
run: bash ./scripts/ci/cleanup_cache.sh
repository_postgresql_15_4_tests:
name: "Repository - PostgreSQL 15.4 tests"
runs-on: ubuntu-latest
needs: [prepare]
if: >
@@ -262,14 +330,17 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run PostgreSQL 14.4 database"
- name: "Run PostgreSQL 15.4 database"
run: docker-compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile postgres up -d
env:
POSTGRES_VERSION: 14.4
POSTGRES_VERSION: 15.4
- name: "Run tests"
run: mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Clean Maven cache"
@@ -285,8 +356,11 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run ActiveMQ"
@@ -327,16 +401,19 @@ jobs:
compose-profile: with-transform-core-aio
- testSuite: SearchTestSuite
compose-profile: default
mvn-options: '-Dindex.subsystem.name=solr6'
mvn-options: "-Dindex.subsystem.name=solr6"
- testSuite: MTLSTestSuite
compose-profile: with-mtls-transform-core-aio
mtls: true
disabledHostnameVerification: false
mvn-options: '-Dencryption.ssl.keystore.location=${CI_WORKSPACE}/keystores/alfresco/alfresco.keystore -Dencryption.ssl.truststore.location=${CI_WORKSPACE}/keystores/alfresco/alfresco.truststore'
mvn-options: "-Dencryption.ssl.keystore.location=${CI_WORKSPACE}/keystores/alfresco/alfresco.keystore -Dencryption.ssl.truststore.location=${CI_WORKSPACE}/keystores/alfresco/alfresco.truststore"
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Set transformers tag"
@@ -405,8 +482,11 @@ jobs:
REQUIRES_LOCAL_IMAGES: true
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -442,11 +522,14 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run Postgres 14.4 database"
- name: "Run Postgres 15.4 database"
run: docker-compose -f ./scripts/ci/docker-compose/docker-compose.yaml --profile postgres up -d
- name: "Run tests"
run: mvn -B test -pl :alfresco-share-services -am -Dtest=ShareServicesTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
@@ -471,8 +554,11 @@ jobs:
REQUIRES_INSTALLED_ARTIFACTS: true
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -502,8 +588,11 @@ jobs:
REQUIRES_INSTALLED_ARTIFACTS: true
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -529,8 +618,11 @@ jobs:
REQUIRES_LOCAL_IMAGES: true
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -565,7 +657,7 @@ jobs:
ags_start_api_explorer:
name: "Test Tomcat deployment of api explorer"
runs-on: ubuntu-latest
needs: [ prepare ]
needs: [prepare]
if: >
(((github.ref_name == 'master' || startsWith(github.ref_name, 'release/') || github.event_name == 'pull_request' ) &&
!contains(github.event.head_commit.message, '[skip ags]')) ||
@@ -574,8 +666,11 @@ jobs:
!contains(github.event.head_commit.message, '[force]')
steps:
- uses: actions/checkout@v3
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@hack-build-cache
with:
build-cache: "true"
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |

View File

@@ -34,11 +34,12 @@ jobs:
- uses: actions/checkout@v3
with:
persist-credentials: false
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.35.2
- name: "Init"
run: bash ./scripts/ci/init.sh
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v1.35.2
with:
username: ${{ env.GIT_USERNAME }}
email: ${{ env.GIT_EMAIL }}
@@ -62,11 +63,12 @@ jobs:
- uses: actions/checkout@v3
with:
persist-credentials: false
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v1.35.2
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.35.2
- name: "Init"
run: bash ./scripts/ci/init.sh
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v1.33.0
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v1.35.2
with:
username: ${{ env.GIT_USERNAME }}
email: ${{ env.GIT_EMAIL }}

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-automation-community-repo</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<build>

View File

@@ -135,7 +135,7 @@ public class DispositionScheduleLinkedRecordsTest extends BaseRMRestTest {
* <p>
* <p/> TestRail Test C775<p/>
**/
@Test
@Test(enabled = false) // temporary disabled, see ACS-6073
@AlfrescoTest(jira = "RM-1622")
public void dispositionScheduleLinkedRecords() throws UnsupportedEncodingException {
STEP("Create record category");
@@ -202,7 +202,7 @@ public class DispositionScheduleLinkedRecordsTest extends BaseRMRestTest {
* Check the disposition steps for a record can be executed
* When the record is linked to a folder with the same disposition schedule
* */
@Test
@Test(enabled = false) // temporary disabled, see ACS-6073
@AlfrescoTest (jira = "RM-3060")
public void sameDispositionScheduleLinkedRecords() throws UnsupportedEncodingException {

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -1,3 +1,3 @@
SOLR6_TAG=2.0.7-A5
POSTGRES_TAG=14.4
SOLR6_TAG=2.0.8.1
POSTGRES_TAG=15.4
ACTIVEMQ_TAG=5.18.2-jre17-rockylinux8

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<properties>
@@ -416,9 +416,7 @@
<configuration>
<images>
<image>
<!-- TODO upgrade this old postgres version -->
<name>postgres:9.4.12</name>
<!--<name>postgres:13.3</name>-->
<name>postgres:15.4</name>
<run>
<ports>
<port>${postgresql.tests.port}:${postgresql.port}</port>

View File

@@ -61,6 +61,7 @@ public class RFC822MetadataExtracter extends org.alfresco.repo.content.metadata.
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
super.setNodeService(nodeService);
}
/**

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<build>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<properties>
@@ -131,23 +131,6 @@
<groupId>jakarta.servlet</groupId>
<artifactId>jakarta.servlet-api</artifactId>
</dependency>
<!-- SHA-2432 -->
<dependency>
<groupId>org.apache.taglibs</groupId>
<artifactId>taglibs-standard-spec</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.taglibs</groupId>
<artifactId>taglibs-standard-impl</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.taglibs</groupId>
<artifactId>taglibs-standard-jstlel</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
<build>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -74,8 +74,6 @@ public abstract class X509ServletFilterBase implements Filter
logger.debug("Initializing X509ServletFilter");
}
this.handleClientAuth();
this.enforce = checkEnforce(config.getServletContext());
if(logger.isDebugEnabled())
@@ -85,6 +83,8 @@ public abstract class X509ServletFilterBase implements Filter
if (this.enforce)
{
this.handleClientAuth();
/*
* We are enforcing so get the cert-contains string.
*/

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -9,6 +9,6 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
</project>

View File

@@ -208,10 +208,6 @@ Hibernate http://www.hibernate.org/
jid3lib http://javamusictag.sourceforge.net/
TinyMCE http://www.tinymce.com/
=== LGPL 3.0 ===
Gytheio https://github.com/Alfresco/gytheio
=== MIT License ===
Bouncy Castle http://www.bouncycastle.org/

View File

@@ -98,4 +98,4 @@ EXPOSE 10001
# For remote debug
EXPOSE 8000
USER ${IMAGEUSERNAME}
USER ${IMAGEUSERNAME}

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -1,3 +1,3 @@
SOLR6_TAG=2.0.7-A5
POSTGRES_TAG=14.4
SOLR6_TAG=2.0.8.1
POSTGRES_TAG=15.4
ACTIVEMQ_TAG=5.18.2-jre17-rockylinux8

View File

@@ -6,7 +6,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<organization>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -35,6 +35,7 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.List;
import java.util.stream.Stream;
import io.restassured.http.ContentType;
import org.alfresco.rest.core.JsonBodyGenerator;
@@ -51,9 +52,11 @@ import org.alfresco.rest.model.RestCommentModelsCollection;
import org.alfresco.rest.model.RestNodeAssocTargetModel;
import org.alfresco.rest.model.RestNodeAssociationModel;
import org.alfresco.rest.model.RestNodeAssociationModelCollection;
import org.alfresco.rest.model.RestNodeAssociationTypeModel;
import org.alfresco.rest.model.RestNodeBodyModel;
import org.alfresco.rest.model.RestNodeBodyMoveCopyModel;
import org.alfresco.rest.model.RestNodeChildAssocModelCollection;
import org.alfresco.rest.model.RestNodeChildAssociationModel;
import org.alfresco.rest.model.RestNodeModel;
import org.alfresco.rest.model.RestNodeModelsCollection;
import org.alfresco.rest.model.RestRatingModel;
@@ -72,6 +75,7 @@ import org.alfresco.rest.model.body.RestNodeLockBodyModel;
import org.alfresco.rest.model.builder.NodesBuilder;
import org.alfresco.utility.Utility;
import org.alfresco.utility.model.RepoTestModel;
import org.apache.commons.lang3.StringUtils;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.testng.reporters.Files;
@@ -824,25 +828,118 @@ public class Node extends ModelRequest<Node>
}
/**
* Create secondary children association using POST call 'nodes/{nodeId}/secondary-children
* Use a list of secondary children nodes
* Creates a secondary child association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @return a collection of nodes
* @param secondaryChild - node, which should become a secondary child
* @return a node's parent-child association
*/
public RestNodeChildAssocModelCollection createSecondaryChildren(String secondaryChildren)
public RestNodeChildAssociationModel addSecondaryChild(RepoTestModel secondaryChild)
{
RestRequest request = RestRequest.requestWithBody(HttpMethod.POST, secondaryChildren, "nodes/{nodeId}/secondary-children?{parameters}", repoModel.getNodeRef(), restWrapper.getParameters());
return addSecondaryChild("cm:contains", secondaryChild);
}
/**
* Creates a secondary child association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param associationType - type of secondary parent-child relationship association
* @param secondaryChild - node, which should become a secondary child
* @return a node's parent-child association
*/
public RestNodeChildAssociationModel addSecondaryChild(String associationType, RepoTestModel secondaryChild)
{
return addSecondaryChild(new RestNodeChildAssociationModel(secondaryChild.getNodeRef(), associationType));
}
/**
* Creates a secondary child association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param secondaryChildAssociation - node's secondary parent-child association model
* @return a node's parent-child association
*/
public RestNodeChildAssociationModel addSecondaryChild(RestNodeChildAssociationModel secondaryChildAssociation)
{
RestRequest request = RestRequest.requestWithBody(HttpMethod.POST, secondaryChildAssociation.toJson(), "nodes/{nodeId}/secondary-children?{parameters}", repoModel.getNodeRef(), restWrapper.getParameters());
return restWrapper.processModel(RestNodeChildAssociationModel.class, request);
}
/**
* Creates a secondary children association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param secondaryChildren - nodes, which should become secondary children
* @return a collection of node's parent-child associations
*/
public RestNodeChildAssocModelCollection addSecondaryChildren(RepoTestModel... secondaryChildren)
{
return addSecondaryChildren("cm:contains", secondaryChildren);
}
/**
* Creates a secondary children association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param associationType - type of secondary parent-child relationship association
* @param secondaryChildren - nodes, which should become secondary children
* @return a collection of node's parent-child associations
*/
public RestNodeChildAssocModelCollection addSecondaryChildren(String associationType, RepoTestModel... secondaryChildren)
{
return addSecondaryChildren(Stream.of(secondaryChildren)
.map(child -> new RestNodeChildAssociationModel(child.getNodeRef(), associationType))
.toArray(RestNodeChildAssociationModel[]::new));
}
/**
* Creates a secondary children association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param secondaryChildrenAssociations - node's secondary parent-child association models
* @return a collection of node's parent-child associations
*/
public RestNodeChildAssocModelCollection addSecondaryChildren(RestNodeChildAssociationModel... secondaryChildrenAssociations)
{
String requestBody = arrayToJson(Stream.of(secondaryChildrenAssociations).toList());
RestRequest request = RestRequest.requestWithBody(HttpMethod.POST, requestBody, "nodes/{nodeId}/secondary-children?{parameters}", repoModel.getNodeRef(), restWrapper.getParameters());
return restWrapper.processModels(RestNodeChildAssocModelCollection.class, request);
}
/**
* Delete secondary children using DELETE call 'nodes/{nodeId}/secondary-children/{childId}
* Removes secondary child association using DELETE call 'nodes/{nodeId}/secondary-children/{childId}'.
*
* @return a collection of nodes
* @param secondaryChild - node, which should NOT be a secondary child anymore
*/
public void deleteSecondaryChild(RestNodeAssociationModel child)
public void removeSecondaryChild(RepoTestModel secondaryChild)
{
RestRequest request = RestRequest.simpleRequest(HttpMethod.DELETE, "nodes/{nodeId}/secondary-children/{childId}?{parameters}", repoModel.getNodeRef(), child.getId(), restWrapper.getParameters());
removeSecondaryChild(null, secondaryChild);
}
/**
* Removes secondary child association using DELETE call 'nodes/{nodeId}/secondary-children/{childId}'.
*
* @param associationType - type of secondary parent-child relationship association
* @param secondaryChild - node, which should NOT be a secondary child anymore
*/
public void removeSecondaryChild(String associationType, RepoTestModel secondaryChild)
{
RestNodeAssociationModel associationModel = new RestNodeAssociationModel();
RestNodeAssociationTypeModel associationTypeModel = new RestNodeAssociationTypeModel();
if (associationType != null)
{
associationTypeModel.setAssocType(associationType);
}
associationModel.setAssociation(associationTypeModel);
associationModel.setId(secondaryChild.getNodeRef());
removeSecondaryChild(associationModel);
}
/**
* Removes secondary child association using DELETE call 'nodes/{nodeId}/secondary-children/{childId}'.
*
* @param secondaryChildAssociation - node's secondary parent-child association to remove
*/
public void removeSecondaryChild(RestNodeAssociationModel secondaryChildAssociation)
{
String parameters = StringUtils.isNotEmpty(secondaryChildAssociation.getAssociation().getAssocType()) ?
"assocType=" + secondaryChildAssociation.getAssociation().getAssocType() + "&" + restWrapper.getParameters() :
restWrapper.getParameters();
RestRequest request = RestRequest.simpleRequest(HttpMethod.DELETE, "nodes/{nodeId}/secondary-children/{childId}?{parameters}", repoModel.getNodeRef(), secondaryChildAssociation.getId(), parameters);
restWrapper.processEmptyModel(request);
}

View File

@@ -44,6 +44,7 @@
package org.alfresco.rest.search;
import java.util.List;
import java.util.Objects;
import org.alfresco.rest.core.IRestModel;
import org.alfresco.utility.model.TestModel;
@@ -52,11 +53,11 @@ import org.alfresco.utility.model.TestModel;
* @author Michael Suzuki
*
*/
public class ResponseHighLightModel extends TestModel implements IRestModel<ResponseHighLightModel>
public class ResponseHighlightModel extends TestModel implements IRestModel<ResponseHighlightModel>
{
private ResponseHighLightModel model;
private ResponseHighlightModel model;
private String field;
private List<Object> snippets;
private List<String> snippets;
public String getField()
{
@@ -66,19 +67,44 @@ public class ResponseHighLightModel extends TestModel implements IRestModel<Resp
{
this.field = field;
}
public List<Object> getSnippets()
public List<String> getSnippets()
{
return snippets;
}
public void setSnippets(List<Object> snippets)
public void setSnippets(List<String> snippets)
{
this.snippets = snippets;
}
@Override
public ResponseHighLightModel onModel()
public ResponseHighlightModel onModel()
{
return model;
}
@Override
public boolean equals(Object o)
{
if (this == o)
{
return true;
}
if (o == null || getClass() != o.getClass())
{
return false;
}
ResponseHighlightModel that = (ResponseHighlightModel) o;
return Objects.equals(model, that.model) && Objects.equals(field, that.field) && Objects.equals(snippets, that.snippets);
}
@Override
public int hashCode()
{
return Objects.hash(model, field, snippets);
}
@Override
public String toString()
{
return "ResponseHighlightModel{model=%s, field=%s, snippets=%s}".formatted(model, field, snippets);
}
}

View File

@@ -2,7 +2,7 @@
* #%L
* alfresco-tas-restapi
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -23,24 +23,6 @@
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
/*
* Copyright (C) 2017 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.rest.search;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -55,23 +37,43 @@ import org.alfresco.utility.model.TestModel;
*/
public class RestRequestFieldsModel extends TestModel implements IRestModel<RestRequestFieldsModel>
{
public RestRequestFieldsModel(){}
public RestRequestFieldsModel(String fieldValue)
{
this.field = fieldValue;
}
@JsonProperty(value = "entry")
RestRequestFieldsModel model;
@JsonProperty(required = true)
private String field;
private String prefix;
private String postfix;
private Integer snippetCount;
private Integer fragmentSize;
private Boolean mergeContiguous;
public RestRequestFieldsModel() {
super();
}
public static RestRequestFieldsModel of(String field)
{
RestRequestFieldsModel fieldModel = new RestRequestFieldsModel();
fieldModel.setField(field);
return fieldModel;
}
public static RestRequestFieldsModel of(String field, String prefix, String postfix)
{
RestRequestFieldsModel fieldModel = new RestRequestFieldsModel();
fieldModel.setField(field);
fieldModel.setPrefix(prefix);
fieldModel.setPostfix(postfix);
return fieldModel;
}
@Override
public RestRequestFieldsModel onModel()
{
return model;
}
@JsonProperty(required = true)
private String field;
public String getField()
{
@@ -82,8 +84,116 @@ public class RestRequestFieldsModel extends TestModel implements IRestModel<Rest
{
this.field = field;
}
public String getPrefix()
{
return prefix;
}
public void setPrefix(String prefix)
{
this.prefix = prefix;
}
public String getPostfix()
{
return postfix;
}
public void setPostfix(String postfix)
{
this.postfix = postfix;
}
public Integer getSnippetCount()
{
return snippetCount;
}
public void setSnippetCount(Integer snippetCount)
{
this.snippetCount = snippetCount;
}
public Integer getFragmentSize()
{
return fragmentSize;
}
public void setFragmentSize(Integer fragmentSize)
{
this.fragmentSize = fragmentSize;
}
public Boolean getMergeContiguous()
{
return mergeContiguous;
}
public void setMergeContiguous(Boolean mergeContiguous)
{
this.mergeContiguous = mergeContiguous;
}
public static Builder builder()
{
return new Builder();
}
public static class Builder
{
private String field;
private String prefix;
private String postfix;
private Integer snippetCount;
private Integer fragmentSize;
private Boolean mergeContiguous;
public Builder field(String field)
{
this.field = field;
return this;
}
public Builder prefix(String prefix)
{
this.prefix = prefix;
return this;
}
public Builder postfix(String postfix)
{
this.postfix = postfix;
return this;
}
public Builder snippetCount(Integer snippetCount)
{
this.snippetCount = snippetCount;
return this;
}
public Builder fragmentSize(Integer fragmentSize)
{
this.fragmentSize = fragmentSize;
return this;
}
public Builder mergeContiguous(Boolean mergeContiguous)
{
this.mergeContiguous = mergeContiguous;
return this;
}
public RestRequestFieldsModel build()
{
RestRequestFieldsModel fieldModel = new RestRequestFieldsModel();
fieldModel.setField(field);
fieldModel.setPrefix(prefix);
fieldModel.setPostfix(postfix);
fieldModel.setSnippetCount(snippetCount);
fieldModel.setFragmentSize(fragmentSize);
fieldModel.setMergeContiguous(mergeContiguous);
return fieldModel;
}
}
}

View File

@@ -2,29 +2,30 @@
* #%L
* alfresco-tas-restapi
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.search;
import java.util.Arrays;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -33,62 +34,38 @@ import org.alfresco.rest.core.IRestModel;
import org.alfresco.utility.model.TestModel;
/**
* Generated by 'msuzuki' on '2017-02-23 13:41' from 'Alfresco Search REST API' swagger file
* Generated by 'msuzuki' on '2017-02-23 13:41' from 'Alfresco Search REST API' swagger file
* Generated from 'Alfresco Search REST API' swagger file
* Base Path {@linkplain /alfresco/api/-default-/public/search/versions/1}
*/
public class RestRequestHighlightModel extends TestModel implements IRestModel<RestRequestHighlightModel>
{
@JsonProperty(value = "entry")
@JsonProperty("entry")
RestRequestHighlightModel model;
/** The string used to mark the start of a highlight in a fragment. */
private String prefix;
/** The string used to mark the end of a highlight in a fragment. */
private String postfix;
/** The maximum number of distinct highlight snippets to return for each highlight field. */
private int snippetCount;
/** The character length of each snippet. */
private int fragmentSize;
/** The number of characters to be considered for highlighting. Matches after this count will not be shown. */
private int maxAnalyzedChars;
/** If fragments overlap they can be merged into one larger fragment */
private boolean mergeContiguous;
/** Should phrases be identified. */
private boolean usePhraseHighlighter;
/** The fields to highlight and field specific configuration properties for each field */
private List<RestRequestFieldsModel> fields;
@Override
public RestRequestHighlightModel onModel()
{
return model;
}
/**
The string used to mark the start of a highlight in a fragment.
*/
private String prefix;
/**
The string used to mark the end of a highlight in a fragment.
*/
private String postfix;
/**
The maximum number of distinct highlight snippets to return for each highlight field.
*/
private int snippetCount;
/**
The character length of each snippet.
*/
private int fragmentSize;
/**
The number of characters to be considered for highlighting. Matches after this count will not be shown.
*/
private int maxAnalyzedChars;
/**
If fragments over lap they can be merged into one larger fragment
*/
private boolean mergeContiguous;
/**
Should phrases be identified.
*/
private boolean usePhraseHighlighter;
/**
The fields to highlight and field specific configuration properties for each field
*/
private List<RestRequestFieldsModel> fields;
public String getPrefix()
{
return this.prefix;
@@ -97,7 +74,7 @@ public class RestRequestHighlightModel extends TestModel implements IRestModel<R
public void setPrefix(String prefix)
{
this.prefix = prefix;
}
}
public String getPostfix()
{
@@ -107,7 +84,7 @@ public class RestRequestHighlightModel extends TestModel implements IRestModel<R
public void setPostfix(String postfix)
{
this.postfix = postfix;
}
}
public int getSnippetCount()
{
@@ -117,7 +94,7 @@ public class RestRequestHighlightModel extends TestModel implements IRestModel<R
public void setSnippetCount(int snippetCount)
{
this.snippetCount = snippetCount;
}
}
public int getFragmentSize()
{
@@ -127,7 +104,7 @@ public class RestRequestHighlightModel extends TestModel implements IRestModel<R
public void setFragmentSize(int fragmentSize)
{
this.fragmentSize = fragmentSize;
}
}
public int getMaxAnalyzedChars()
{
@@ -137,9 +114,9 @@ public class RestRequestHighlightModel extends TestModel implements IRestModel<R
public void setMaxAnalyzedChars(int maxAnalyzedChars)
{
this.maxAnalyzedChars = maxAnalyzedChars;
}
}
public boolean getMergeContiguous()
public boolean isMergeContiguous()
{
return this.mergeContiguous;
}
@@ -147,9 +124,9 @@ public class RestRequestHighlightModel extends TestModel implements IRestModel<R
public void setMergeContiguous(boolean mergeContiguous)
{
this.mergeContiguous = mergeContiguous;
}
}
public boolean getUsePhraseHighlighter()
public boolean isUsePhraseHighlighter()
{
return this.usePhraseHighlighter;
}
@@ -157,7 +134,7 @@ public class RestRequestHighlightModel extends TestModel implements IRestModel<R
public void setUsePhraseHighlighter(boolean usePhraseHighlighter)
{
this.usePhraseHighlighter = usePhraseHighlighter;
}
}
public List<RestRequestFieldsModel> getFields()
{
@@ -167,6 +144,91 @@ public class RestRequestHighlightModel extends TestModel implements IRestModel<R
public void setFields(List<RestRequestFieldsModel> fields)
{
this.fields = fields;
}
}
public static RestRequestHighlightModelBuilder builder()
{
return new RestRequestHighlightModelBuilder();
}
public static class RestRequestHighlightModelBuilder
{
private String prefix;
private String postfix;
private int snippetCount;
private int fragmentSize;
private int maxAnalyzedChars;
private boolean mergeContiguous;
private boolean usePhraseHighlighter;
private List<RestRequestFieldsModel> fields;
public RestRequestHighlightModelBuilder prefix(String prefix)
{
this.prefix = prefix;
return this;
}
public RestRequestHighlightModelBuilder postfix(String postfix)
{
this.postfix = postfix;
return this;
}
public RestRequestHighlightModelBuilder snippetCount(int snippetCount)
{
this.snippetCount = snippetCount;
return this;
}
public RestRequestHighlightModelBuilder fragmentSize(int fragmentSize)
{
this.fragmentSize = fragmentSize;
return this;
}
public RestRequestHighlightModelBuilder maxAnalyzedChars(int maxAnalyzedChars)
{
this.maxAnalyzedChars = maxAnalyzedChars;
return this;
}
public RestRequestHighlightModelBuilder mergeContinuous(boolean mergeContiguous)
{
this.mergeContiguous = mergeContiguous;
return this;
}
public RestRequestHighlightModelBuilder usePhraseHighlighter(boolean usePhraseHighlighter)
{
this.usePhraseHighlighter = usePhraseHighlighter;
return this;
}
public RestRequestHighlightModelBuilder fields(List<String> fields)
{
this.fields = fields.stream().map(RestRequestFieldsModel::of).toList();
return this;
}
public RestRequestHighlightModelBuilder fields(RestRequestFieldsModel... fields)
{
this.fields = Arrays.stream(fields).toList();
return this;
}
public RestRequestHighlightModel build()
{
RestRequestHighlightModel highlightModel = new RestRequestHighlightModel();
highlightModel.setPrefix(prefix);
highlightModel.setPostfix(postfix);
highlightModel.setSnippetCount(snippetCount);
highlightModel.setFragmentSize(fragmentSize);
highlightModel.setMaxAnalyzedChars(maxAnalyzedChars);
highlightModel.setMergeContiguous(mergeContiguous);
highlightModel.setUsePhraseHighlighter(usePhraseHighlighter);
highlightModel.setFields(fields);
return highlightModel;
}
}
}

View File

@@ -60,7 +60,7 @@ public class SearchScoreModel extends TestModel implements IRestModel<SearchScor
@JsonProperty(required = true)
private float score;
private List<ResponseHighLightModel> highlight;
private List<ResponseHighlightModel> highlight;
public float getScore()
{
@@ -78,12 +78,12 @@ public class SearchScoreModel extends TestModel implements IRestModel<SearchScor
return this;
}
public List<ResponseHighLightModel> getHighlight()
public List<ResponseHighlightModel> getHighlight()
{
return highlight;
}
public void setHighlight(List<ResponseHighLightModel> highlight)
public void setHighlight(List<ResponseHighlightModel> highlight)
{
this.highlight = highlight;
}

View File

@@ -125,11 +125,10 @@ public class NodesParentChildrenTests extends RestTest
RestNodeChildAssociationModel childAssoc1 = new RestNodeChildAssociationModel(nodesBuilder.getNode("f1").getId(), "cm:contains");
RestNodeChildAssociationModel childAssoc2 = new RestNodeChildAssociationModel(nodesBuilder.getNode("f2").getId(), "cm:contains");
RestNodeChildAssociationModel childAssoc3 = new RestNodeChildAssociationModel(nodesBuilder.getNode("f3").getId(), "cm:preferenceImage");
String secondaryChildrenBody = "[" + childAssoc1.toJson() + "," + childAssoc2.toJson() + "," + childAssoc3.toJson() + "]";
STEP("3. Create secondary child associations using POST /nodes/{nodeId}/secondary-children");
RestNodeChildAssocModelCollection secondaryChildAssoc = restClient.withCoreAPI().usingNode(nodesBuilder.getNode("F1").toContentModel())
.createSecondaryChildren(secondaryChildrenBody);
.addSecondaryChildren(childAssoc1, childAssoc2, childAssoc3);
restClient.assertStatusCodeIs(HttpStatus.CREATED);
secondaryChildAssoc.getEntryByIndex(0).assertThat().field("childId").is(childAssoc1.getChildId());
secondaryChildAssoc.getEntryByIndex(1).assertThat().field("childId").is(childAssoc2.getChildId());
@@ -142,7 +141,7 @@ public class NodesParentChildrenTests extends RestTest
secondaryChildren.assertThat().entriesListCountIs(2);
STEP("5. Check using DELETE /nodes/{nodeId}/secondary-children/{childId} that a secondary child can be deleted");
restClient.withCoreAPI().usingNode(nodesBuilder.getNode("F1").toContentModel()).deleteSecondaryChild(secondaryChildren.getEntryByIndex(0));
restClient.withCoreAPI().usingNode(nodesBuilder.getNode("F1").toContentModel()).removeSecondaryChild(secondaryChildren.getEntryByIndex(0));
restClient.assertStatusCodeIs(HttpStatus.NO_CONTENT);
STEP("6. Check using GET /nodes/{nodeId}/secondary-children that a secondary child association was deleted");
@@ -182,7 +181,7 @@ public class NodesParentChildrenTests extends RestTest
STEP("2. Create secondary child associations using POST /nodes/{nodeId}/secondary-children");
RestNodeChildAssociationModel childAssoc = new RestNodeChildAssociationModel(nodesBuilder.getNode("f1").getId(), "cm:contains");
restClient.withCoreAPI().usingNode(nodesBuilder.getNode("F1").toContentModel()).createSecondaryChildren(childAssoc.toJson());
restClient.withCoreAPI().usingNode(nodesBuilder.getNode("F1").toContentModel()).addSecondaryChild(childAssoc);
restClient.assertStatusCodeIs(HttpStatus.CREATED);
STEP("3. Get all parents for file 'f1' - both primary and secondary");

View File

@@ -11,6 +11,7 @@ import org.alfresco.utility.testrail.ExecutionType;
import org.alfresco.utility.testrail.annotation.TestRail;
import org.springframework.http.HttpStatus;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Ignore;
import org.testng.annotations.Test;
import java.util.List;
@@ -47,8 +48,9 @@ public class GetProcessesCoreTests extends RestTest
@TestRail(section = { TestGroup.REST_API, TestGroup.WORKFLOW,TestGroup.PROCESSES }, executionType = ExecutionType.REGRESSION,
description = "Verify user gets all processes started by him ordered descending by id")
@Test(groups = { TestGroup.REST_API, TestGroup.WORKFLOW, TestGroup.PROCESSES, TestGroup.REGRESSION })
public void getProcessesOrderedByIdDESC() throws Exception
@Test(groups = { TestGroup.REST_API, TestGroup.WORKFLOW, TestGroup.PROCESSES, TestGroup.REGRESSION }, enabled = false)
@Ignore("Until ACS-6234 is done")
public void getProcessesOrderedByIdDESC()
{
RestProcessModelsCollection processes = restClient.authenticateUser(userWhoStartsTask).withParams("orderBy=id DESC")
.withWorkflowAPI().getProcesses();

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<properties>

100
pom.xml
View File

@@ -2,7 +2,7 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Alfresco Community Repo Parent</name>
@@ -51,37 +51,37 @@
<dependency.alfresco-server-root.version>7.0.1</dependency.alfresco-server-root.version>
<dependency.activiti-engine.version>5.23.0</dependency.activiti-engine.version>
<dependency.activiti.version>5.23.0</dependency.activiti.version>
<dependency.alfresco-transform-service.version>3.0.1-A2</dependency.alfresco-transform-service.version>
<dependency.alfresco-transform-core.version>4.0.1-A3</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-core.version>5.0.0</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-service.version>4.0.0</dependency.alfresco-transform-service.version>
<dependency.alfresco-greenmail.version>7.0</dependency.alfresco-greenmail.version>
<dependency.acs-event-model.version>0.0.23</dependency.acs-event-model.version>
<dependency.acs-event-model.version>0.0.24</dependency.acs-event-model.version>
<dependency.spring.version>6.0.9</dependency.spring.version>
<dependency.aspectj.version>1.9.20.1</dependency.aspectj.version>
<dependency.spring.version>6.0.12</dependency.spring.version>
<dependency.spring-security.version>6.1.4</dependency.spring-security.version>
<dependency.antlr.version>3.5.3</dependency.antlr.version>
<dependency.jackson.version>2.15.2</dependency.jackson.version>
<dependency.cxf.version>4.0.2</dependency.cxf.version>
<dependency.opencmis.version>1.0.0-jakarta-1</dependency.opencmis.version>
<dependency.webscripts.version>8.46</dependency.webscripts.version>
<dependency.webscripts.version>9.0</dependency.webscripts.version>
<dependency.bouncycastle.version>1.76</dependency.bouncycastle.version>
<dependency.mockito-core.version>5.4.0</dependency.mockito-core.version>
<dependency.assertj.version>3.24.2</dependency.assertj.version>
<dependency.org-json.version>20230618</dependency.org-json.version>
<dependency.org-json.version>20231013</dependency.org-json.version>
<dependency.commons-dbcp.version>2.9.0</dependency.commons-dbcp.version>
<dependency.commons-io.version>2.13.0</dependency.commons-io.version>
<dependency.gson.version>2.8.9</dependency.gson.version>
<dependency.commons-io.version>2.14.0</dependency.commons-io.version>
<dependency.gson.version>2.10.1</dependency.gson.version>
<dependency.guava.version>32.1.2-jre</dependency.guava.version>
<dependency.httpclient.version>4.5.14</dependency.httpclient.version>
<dependency.httpcore.version>4.4.16</dependency.httpcore.version>
<dependency.httpcomponents-httpclient5.version>5.2.1</dependency.httpcomponents-httpclient5.version>
<dependency.httpcomponents-httpcore5.version>5.2.2</dependency.httpcomponents-httpcore5.version>
<dependency.httpcomponents-httpcore5.version>5.2.3</dependency.httpcomponents-httpcore5.version>
<dependency.commons-httpclient.version>3.1-HTTPCLIENT-1265</dependency.commons-httpclient.version>
<dependency.xercesImpl.version>2.12.2</dependency.xercesImpl.version>
<dependency.slf4j.version>2.0.7</dependency.slf4j.version>
<dependency.slf4j.version>2.0.9</dependency.slf4j.version>
<dependency.log4j.version>2.20.0</dependency.log4j.version>
<dependency.gytheio.version>0.20.0-A1</dependency.gytheio.version>
<dependency.groovy.version>3.0.19</dependency.groovy.version>
<dependency.tika.version>2.4.1</dependency.tika.version>
<dependency.spring-security.version>6.1.3</dependency.spring-security.version>
<dependency.truezip.version>7.7.10</dependency.truezip.version>
<dependency.poi.version>5.2.2</dependency.poi.version>
<dependency.poi-ooxml-lite.version>5.2.3</dependency.poi-ooxml-lite.version>
@@ -89,19 +89,17 @@
<dependency.camel.version>4.0.0</dependency.camel.version> <!-- when bumping this version, please keep track/sync with included netty.io dependencies -->
<dependency.netty.version>4.1.96.Final</dependency.netty.version> <!-- must be in sync with camels transitive dependencies, e.g.: netty-common -->
<dependency.activemq.version>5.18.2</dependency.activemq.version>
<dependency.apache-compress.version>1.23.0</dependency.apache-compress.version>
<dependency.apache.taglibs.version>1.2.5</dependency.apache.taglibs.version>
<dependency.apache-compress.version>1.24.0</dependency.apache-compress.version>
<dependency.awaitility.version>4.2.0</dependency.awaitility.version>
<dependency.swagger-ui.version>3.38.0</dependency.swagger-ui.version>
<dependency.swagger-parser.version>1.0.67</dependency.swagger-parser.version>
<dependency.maven-filtering.version>3.1.1</dependency.maven-filtering.version>
<dependency.maven-artifact.version>3.8.6</dependency.maven-artifact.version>
<dependency.jdom2.version>2.0.6.1</dependency.jdom2.version>
<dependency.pooled-jms.version>3.1.1</dependency.pooled-jms.version>
<dependency.pooled-jms.version>3.1.2</dependency.pooled-jms.version>
<dependency.jakarta-ee-jaxb-api.version>4.0.0</dependency.jakarta-ee-jaxb-api.version>
<dependency.jakarta-ee-jaxb-impl.version>4.0.3</dependency.jakarta-ee-jaxb-impl.version>
<dependency.java-ee-jaxb-api.version>2.3.3</dependency.java-ee-jaxb-api.version>
<dependency.jakarta-ws-api.version>3.0.1</dependency.jakarta-ws-api.version>
<dependency.jakarta-soap-api.version>2.0.1</dependency.jakarta-soap-api.version>
<dependency.jakarta-annotation-api.version>2.1.1</dependency.jakarta-annotation-api.version>
@@ -115,11 +113,9 @@
<dependency.jakarta-ee-json-impl.version>1.1.4</dependency.jakarta-ee-json-impl.version>
<dependency.jakarta-json-path.version>2.8.0</dependency.jakarta-json-path.version>
<dependency.json-smart.version>2.5.0</dependency.json-smart.version>
<dependency.jakarta-rpc-api.version>1.1.4</dependency.jakarta-rpc-api.version>
<alfresco.googledrive.version>3.4.2-A5</alfresco.googledrive.version>
<alfresco.aos-module.version>1.6.2-A2</alfresco.aos-module.version>
<alfresco.api-explorer.version>23.1.0-A1</alfresco.api-explorer.version> <!-- Also in alfresco-enterprise-share -->
<alfresco.googledrive.version>4.0.0</alfresco.googledrive.version>
<alfresco.aos-module.version>2.0.0</alfresco.aos-module.version>
<alfresco.api-explorer.version>23.1.0</alfresco.api-explorer.version> <!-- Also in alfresco-enterprise-share -->
<alfresco.maven-plugin.version>2.2.0</alfresco.maven-plugin.version>
<license-maven-plugin.version>2.0.1</license-maven-plugin.version>
@@ -129,10 +125,10 @@
<dependency.mysql-image.version>8</dependency.mysql-image.version>
<dependency.mariadb.version>2.7.4</dependency.mariadb.version>
<dependency.tas-utility.version>5.0.0</dependency.tas-utility.version>
<dependency.rest-assured.version>5.3.1</dependency.rest-assured.version>
<dependency.rest-assured.version>5.3.2</dependency.rest-assured.version>
<dependency.tas-email.version>2.0.0</dependency.tas-email.version>
<dependency.tas-webdav.version>1.20</dependency.tas-webdav.version>
<dependency.tas-ftp.version>1.18</dependency.tas-ftp.version>
<dependency.tas-webdav.version>1.21</dependency.tas-webdav.version>
<dependency.tas-ftp.version>1.19</dependency.tas-ftp.version>
<dependency.tas-dataprep.version>2.6</dependency.tas-dataprep.version>
<!-- AGS properties shared between community and enterprise -->
@@ -156,7 +152,7 @@
<connection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</connection>
<developerConnection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</developerConnection>
<url>https://github.com/Alfresco/alfresco-community-repo</url>
<tag>23.1.0.206</tag>
<tag>HEAD</tag>
</scm>
<distributionManagement>
@@ -219,12 +215,6 @@
<version>${dependency.jakarta-jws-api.version}</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.sun.mail</groupId>-->
<!-- <artifactId>javax.mail</artifactId>-->
<!-- <version>${dependency.java-ee-mail.version}</version>-->
<!-- </dependency>-->
<dependency>
<groupId>com.sun.mail</groupId>
<artifactId>jakarta.mail</artifactId>
@@ -287,17 +277,6 @@
<version>${dependency.json-smart.version}</version>
</dependency>
<dependency>
<groupId>jakarta.xml.rpc</groupId>
<artifactId>jakarta.xml.rpc-api</artifactId>
<version>${dependency.jakarta-rpc-api.version}</version>
</dependency>
<dependency>
<groupId>com.sun.xml.rpc</groupId>
<artifactId>jaxrpc-impl</artifactId>
<version>${dependency.jakarta-rpc-api.version}</version>
</dependency>
<!--CMIS-->
<dependency>
<groupId>org.apache.chemistry.opencmis</groupId>
@@ -594,7 +573,7 @@
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
<version>2.1</version>
<version>2.2</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
@@ -931,21 +910,6 @@
<version>${dependency.camel.version}</version>
</dependency>
<dependency>
<groupId>org.apache.taglibs</groupId>
<artifactId>taglibs-standard-spec</artifactId>
<version>${dependency.apache.taglibs.version}</version>
</dependency>
<dependency>
<groupId>org.apache.taglibs</groupId>
<artifactId>taglibs-standard-impl</artifactId>
<version>${dependency.apache.taglibs.version}</version>
</dependency>
<dependency>
<groupId>org.apache.taglibs</groupId>
<artifactId>taglibs-standard-jstlel</artifactId>
<version>${dependency.apache.taglibs.version}</version>
</dependency>
<dependency>
<groupId>org.reflections</groupId>
<artifactId>reflections</artifactId>
@@ -960,7 +924,7 @@
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.28</version>
<version>1.18.30</version>
<scope>provided</scope>
</dependency>
<dependency>
@@ -996,6 +960,16 @@
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.11.0</version>
<configuration>
<compilerArgs>
<arg>-parameters</arg>
</compilerArgs>
</configuration>
</plugin>
<!-- Ensure consistent maven-release-plugin version-->
<plugin>
<artifactId>maven-release-plugin</artifactId>
@@ -1007,7 +981,7 @@
<plugin>
<groupId>io.fabric8</groupId>
<artifactId>docker-maven-plugin</artifactId>
<version>0.43.0</version>
<version>0.43.4</version>
</plugin>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
@@ -1064,7 +1038,7 @@
<configuration>
<failOnMissing>true</failOnMissing>
<excludedScopes>provided,test</excludedScopes>
<excludedGroups>^(org\.alfresco|com\.alfresco|org\.activiti|org\.gytheio).*</excludedGroups>
<excludedGroups>^(org\.alfresco|com\.alfresco|org\.activiti).*</excludedGroups>
<failIfWarning>true</failIfWarning>
<includedLicenses>
https://raw.githubusercontent.com/Alfresco/third-party-license-overrides/master/includedLicenses.txt
@@ -1163,4 +1137,4 @@
</plugin>
</plugins>
</build>
</project>
</project>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<dependencies>
@@ -18,11 +18,6 @@
</dependency>
<!-- Jakarta... -->
<dependency>
<groupId>jakarta.xml.rpc</groupId>
<artifactId>jakarta.xml.rpc-api</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.1.0.206</version>
<version>23.1.0.256-SNAPSHOT</version>
</parent>
<dependencies>
@@ -560,21 +560,6 @@
<artifactId>alfresco-sync-events</artifactId>
<version>1.2.14</version>
</dependency>
<dependency>
<groupId>org.gytheio</groupId>
<artifactId>gytheio-messaging-camel</artifactId>
<version>${dependency.gytheio.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.camel</groupId>
<artifactId>camel-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.camel</groupId>
<artifactId>camel-jackson</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-core</artifactId>
@@ -726,7 +711,7 @@
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjrt</artifactId>
<version>1.9.19</version>
<version>${dependency.aspectj.version}</version>
</dependency>
<dependency>
<groupId>commons-net</groupId>
@@ -746,18 +731,6 @@
<version>${dependency.opencmis.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.taglibs</groupId>
<artifactId>taglibs-standard-spec</artifactId>
</dependency>
<dependency>
<groupId>org.apache.taglibs</groupId>
<artifactId>taglibs-standard-impl</artifactId>
</dependency>
<dependency>
<groupId>org.apache.taglibs</groupId>
<artifactId>taglibs-standard-jstlel</artifactId>
</dependency>
<!-- Repo Event Model-->
<dependency>
<groupId>org.alfresco</groupId>
@@ -870,9 +843,9 @@
</executions>
</plugin>
<plugin>
<groupId>com.github.m50d</groupId>
<groupId>org.codehaus.mojo</groupId>
<artifactId>aspectj-maven-plugin</artifactId>
<version>1.11.1</version>
<version>1.14.0</version>
<executions>
<execution>
<!-- phase>process-sources</phase -->
@@ -889,7 +862,17 @@
<showWeaveInfo>true</showWeaveInfo>
<source>1.8</source>
<target>1.8</target>
<additionalCompilerArgs>
<arg>-parameters</arg>
</additionalCompilerArgs>
</configuration>
<dependencies>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjtools</artifactId>
<version>${dependency.aspectj.version}</version>
</dependency>
</dependencies>
</plugin>
<plugin>

View File

@@ -196,7 +196,7 @@ public class HierarchicalSqlSessionFactoryBean extends SqlSessionFactoryBean
* @param plugins list of plugins
*
*/
public void setPlugins(Interceptor[] plugins) {
public void setPlugins(Interceptor... plugins) {
this.plugins = plugins;
}
@@ -244,7 +244,7 @@ public class HierarchicalSqlSessionFactoryBean extends SqlSessionFactoryBean
*
* @param typeHandlers Type handler list
*/
public void setTypeHandlers(TypeHandler<?>[] typeHandlers) {
public void setTypeHandlers(TypeHandler<?>... typeHandlers) {
this.typeHandlers = typeHandlers;
}
@@ -255,7 +255,7 @@ public class HierarchicalSqlSessionFactoryBean extends SqlSessionFactoryBean
*
* @param typeAliases Type aliases list
*/
public void setTypeAliases(Class<?>[] typeAliases) {
public void setTypeAliases(Class<?>... typeAliases) {
this.typeAliases = typeAliases;
}
@@ -288,7 +288,7 @@ public class HierarchicalSqlSessionFactoryBean extends SqlSessionFactoryBean
* This property being based on Spring's resource abstraction also allows for specifying
* resource patterns here: e.g. "classpath*:sqlmap/*-mapper.xml".
*/
public void setMapperLocations(Resource[] mapperLocations) {
public void setMapperLocations(Resource... mapperLocations) {
this.mapperLocations = mapperLocations;
}

View File

@@ -0,0 +1,42 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.messaging;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LoggingDeadLetterQueue
{
private static final Logger LOG = LoggerFactory.getLogger(LoggingDeadLetterQueue.class);
public void onReceive(Object message)
{
if (message != null)
{
LOG.debug("Received:\n\n{}}\n\n", message);
}
}
}

View File

@@ -0,0 +1,93 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.messaging.jackson;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serial;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
public class ObjectMapperFactory
{
private ObjectMapperFactory()
{
//no instantiation
}
public static ObjectMapper createInstance()
{
QpidJsonBodyCleanerObjectMapper mapper = new QpidJsonBodyCleanerObjectMapper();
mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY);
return mapper;
}
private static class QpidJsonBodyCleanerObjectMapper extends ObjectMapper
{
@Serial
private static final long serialVersionUID = 2568701685293341501L;
private static final String DEFAULT_ENCODING = "utf8";
public <T> T readValue(InputStream inputStream, Class<T> valueType) throws IOException
{
try
{
// Try to unmarshal normally
if (inputStream.markSupported())
{
inputStream.mark(1024 * 512);
}
return super.readValue(inputStream, valueType);
}
catch (JsonParseException e)
{
if (!inputStream.markSupported())
{
// We can't reset this stream, bail out
throw e;
}
// Reset the stream
inputStream.reset();
}
// Clean the message body and try again
StringWriter writer = new StringWriter();
IOUtils.copy(inputStream, writer, DEFAULT_ENCODING);
String content = writer.toString();
content = content.substring(content.indexOf('{'));
return readValue(content, valueType);
}
}
}

View File

@@ -1,28 +1,28 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.action.executer;
import java.io.Serializable;
@@ -34,10 +34,10 @@ import org.alfresco.repo.action.ParameterDefinitionImpl;
import org.alfresco.repo.admin.SysAdminParams;
import org.alfresco.repo.jscript.ScriptAction;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.action.Action;
import org.alfresco.service.cmr.action.ActionDefinition;
import org.alfresco.service.cmr.action.ActionService;
import org.alfresco.service.cmr.action.ParameterConstraint;
import org.alfresco.service.cmr.action.Action;
import org.alfresco.service.cmr.action.ActionDefinition;
import org.alfresco.service.cmr.action.ActionService;
import org.alfresco.service.cmr.action.ParameterConstraint;
import org.alfresco.service.cmr.action.ParameterDefinition;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.repository.NodeRef;
@@ -129,10 +129,10 @@ public class ScriptActionExecuter extends ActionExecuterAbstractBase
if (nodeService.exists(actionedUponNodeRef))
{
NodeRef scriptRef = (NodeRef)action.getParameterValue(PARAM_SCRIPTREF);
if(!isValidScriptRef(action))
{
throw new IllegalStateException("Invalid script ref path: " + scriptRef);
}
if(!isValidScriptRef(action))
{
throw new IllegalStateException("Invalid script ref path: " + scriptRef);
}
NodeRef spaceRef = this.serviceRegistry.getRuleService().getOwningNodeRef(action);
if (spaceRef == null)
{
@@ -229,19 +229,22 @@ public class ScriptActionExecuter extends ActionExecuterAbstractBase
return companyHomeRef;
}
private boolean isValidScriptRef(Action action)
{
NodeRef scriptRef = (NodeRef) action.getParameterValue(PARAM_SCRIPTREF);
ActionService actionService = this.serviceRegistry.getActionService();
ActionDefinition actDef = actionService.getActionDefinition(action.getActionDefinitionName());
ParameterDefinition parameterDef = actDef.getParameterDefintion(PARAM_SCRIPTREF);
String paramConstraintName = parameterDef.getParameterConstraintName();
if (paramConstraintName != null)
{
ParameterConstraint paramConstraint = actionService.getParameterConstraint(paramConstraintName);
return paramConstraint.isValidValue(scriptRef.toString());
}
return true;
}
private boolean isValidScriptRef(Action action)
{
NodeRef scriptRef = (NodeRef) action.getParameterValue(PARAM_SCRIPTREF);
ActionService actionService = this.serviceRegistry.getActionService();
ActionDefinition actDef = actionService.getActionDefinition(action.getActionDefinitionName());
ParameterDefinition parameterDef = actDef.getParameterDefintion(PARAM_SCRIPTREF);
if (parameterDef != null)
{
String paramConstraintName = parameterDef.getParameterConstraintName();
if (paramConstraintName != null)
{
ParameterConstraint paramConstraint = actionService.getParameterConstraint(paramConstraintName);
return paramConstraint.isValidValue(scriptRef.toString());
}
}
return true;
}
}

View File

@@ -108,7 +108,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
// --DELETE_NOT_EXISTS primaryTable.key,secondaryTable1.key1,... batch.size.property
String[] args = sql.split("[ \\t]+(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)");
if (args.length == 3 && (args[1].indexOf('.')) != -1)
if (args.length >= 3 && args[1].indexOf('.') != -1)
{
String[] tableColumnArgs = args[1].split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)");
if (tableColumnArgs.length >= 2)
@@ -133,6 +133,10 @@ public class DeleteNotExistsExecutor implements StatementExecutor
String timeoutSecString = globalProperties.getProperty(PROPERTY_TIMEOUT_SECONDS);
timeoutSec = timeoutSecString == null ? -1 : Long.parseLong(timeoutSecString);
// Only implemented in v3. In v2 the skip is not used
String skipToIdString = (args.length == 4) ? globalProperties.getProperty(args[3]) : null;
Long skipToId = skipToIdString == null ? 0L : Long.parseLong(skipToIdString);
// Compute upper limits
Long[] tableUpperLimits = new Long[tableColumnArgs.length];
Pair<String, String>[] tableColumn = new Pair[tableColumnArgs.length];
@@ -159,7 +163,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
}
}
process(tableColumn, tableUpperLimits, optionalWhereClauses);
process(tableColumn, tableUpperLimits, optionalWhereClauses, skipToId);
}
}
}
@@ -623,4 +627,10 @@ public class DeleteNotExistsExecutor implements StatementExecutor
}
}
}
protected void process(Pair<String, String>[] tableColumn, Long[] tableUpperLimits, String[] optionalWhereClauses,
Long skipToId) throws SQLException
{
process(tableColumn, tableUpperLimits, optionalWhereClauses);
}
}

View File

@@ -108,12 +108,19 @@ public class DeleteNotExistsV3Executor extends DeleteNotExistsExecutor
@Override
protected void process(Pair<String, String>[] tableColumn, Long[] tableUpperLimits, String[] optionalWhereClauses)
throws SQLException
{
process(tableColumn, tableUpperLimits, optionalWhereClauses, 0L);
}
@Override
protected void process(Pair<String, String>[] tableColumn, Long[] tableUpperLimits, String[] optionalWhereClauses, Long skipToId)
throws SQLException
{
String primaryTableName = tableColumn[0].getFirst();
String primaryColumnName = tableColumn[0].getSecond();
String primaryWhereClause = optionalWhereClauses[0];
Long primaryId = 0L;
Long primaryId = skipToId;
deletedCount = 0L;
startTime = new Date();

View File

@@ -311,12 +311,24 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
public void onCreateChildAssociation(ChildAssociationRef childAssociationRef, boolean isNewNode)
{
getEventConsolidator(childAssociationRef).onCreateChildAssociation(childAssociationRef, isNewNode);
if (!childAssociationRef.isPrimary())
{
// if this is a secondary relationship simulate node move event to store state of previous secondary parents
ChildAssociationRef oldChildAssociationRef = childAssociationWithoutParentOf(childAssociationRef);
getEventConsolidator(childAssociationRef.getChildRef()).onMoveNode(oldChildAssociationRef, childAssociationRef);
}
}
@Override
public void beforeDeleteChildAssociation(ChildAssociationRef childAssociationRef)
{
getEventConsolidator(childAssociationRef).beforeDeleteChildAssociation(childAssociationRef);
if (!childAssociationRef.isPrimary())
{
// if this is a secondary relationship simulate node move event to store state of previous secondary parents
ChildAssociationRef newChildAssociationRef = childAssociationWithoutParentOf(childAssociationRef);
getEventConsolidator(childAssociationRef.getChildRef()).onMoveNode(childAssociationRef, newChildAssociationRef);
}
}
@Override
@@ -503,6 +515,18 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
return ZonedDateTime.ofInstant(commitTimeMs, ZoneOffset.UTC);
}
private static ChildAssociationRef childAssociationWithoutParentOf(ChildAssociationRef childAssociationRef)
{
return new ChildAssociationRef(
null,
null,
childAssociationRef.getQName(),
childAssociationRef.getChildRef(),
childAssociationRef.isPrimary(),
childAssociationRef.getNthSibling()
);
}
@Override
protected void onBootstrap(ApplicationEvent applicationEvent)
{

View File

@@ -65,6 +65,7 @@ public class NodeEventConsolidator extends EventConsolidator<NodeRef, NodeResour
private QName nodeType;
private QName nodeTypeBefore;
private List<String> primaryHierarchyBefore;
private List<String> secondaryParentsBefore;
private boolean resourceBeforeAllFieldsNull = true;
public NodeEventConsolidator(NodeResourceHelper nodeResourceHelper)
@@ -144,7 +145,25 @@ public class NodeEventConsolidator extends EventConsolidator<NodeRef, NodeResour
eventTypes.add(EventType.NODE_UPDATED);
createBuilderIfAbsent(newChildAssocRef.getChildRef());
setBeforePrimaryHierarchy(helper.getPrimaryHierarchy(oldChildAssocRef.getParentRef(), true));
if (newChildAssocRef.isPrimary())
{
setBeforePrimaryHierarchy(helper.getPrimaryHierarchy(oldChildAssocRef.getParentRef(), true));
}
else
{
List<String> secondaryParents = helper.getSecondaryParents(newChildAssocRef.getChildRef());
if (newChildAssocRef.getParentRef() != null)
{
// on create secondary child association event takes place - recreate secondary parents previous state
secondaryParents.remove(newChildAssocRef.getParentRef().getId());
}
else if(oldChildAssocRef.getParentRef() != null && !secondaryParents.contains(oldChildAssocRef.getParentRef().getId()))
{
// before remove secondary child association event takes place - recreate secondary parents previous state
secondaryParents.add(oldChildAssocRef.getParentRef().getId());
}
setSecondaryParentsBefore(secondaryParents);
}
}
@Override
@@ -174,7 +193,7 @@ public class NodeEventConsolidator extends EventConsolidator<NodeRef, NodeResour
public void beforeDeleteNode(NodeRef nodeRef)
{
eventTypes.add(EventType.NODE_DELETED);
createBuilderIfAbsent(nodeRef, false);
createBuilderIfAbsent(nodeRef);
}
@Override
@@ -240,6 +259,19 @@ public class NodeEventConsolidator extends EventConsolidator<NodeRef, NodeResour
}
}
private void setSecondaryParentsBefore(List<String> secondaryParents)
{
if (this.secondaryParentsBefore == null)
{
this.secondaryParentsBefore = secondaryParents;
}
}
List<String> getSecondaryParentsBefore()
{
return secondaryParentsBefore;
}
private NodeResource buildNodeResource()
{
if (resourceBuilder == null)
@@ -283,7 +315,7 @@ public class NodeEventConsolidator extends EventConsolidator<NodeRef, NodeResour
resourceBeforeAllFieldsNull = false;
}
Map<String, Map<String, String>> localizedProps =helper.getLocalizedPropertiesBefore(changedPropsBefore, after);
Map<String, Map<String, String>> localizedProps = helper.getLocalizedPropertiesBefore(changedPropsBefore, after);
if (!localizedProps.isEmpty())
{
builder.setLocalizedProperties(localizedProps);
@@ -309,8 +341,7 @@ public class NodeEventConsolidator extends EventConsolidator<NodeRef, NodeResour
builder.setModifiedByUser(modifier);
resourceBeforeAllFieldsNull = false;
}
modifiedAt =
helper.getZonedDateTime((Date) changedPropsBefore.get(ContentModel.PROP_MODIFIED));
modifiedAt = helper.getZonedDateTime((Date) changedPropsBefore.get(ContentModel.PROP_MODIFIED));
}
// Handle case where the content does not exist on the propertiesBefore
@@ -334,6 +365,12 @@ public class NodeEventConsolidator extends EventConsolidator<NodeRef, NodeResour
resourceBeforeAllFieldsNull = false;
}
if (secondaryParentsBefore != null)
{
builder.setSecondaryParents(secondaryParentsBefore);
resourceBeforeAllFieldsNull = false;
}
if (nodeTypeBefore != null)
{
builder.setNodeType(helper.getQNamePrefixString(nodeTypeBefore));

View File

@@ -26,6 +26,7 @@
package org.alfresco.repo.event2;
import static java.util.Optional.ofNullable;
import static java.util.function.Predicate.not;
import java.io.Serializable;
import java.time.ZoneId;
@@ -38,6 +39,7 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import com.google.common.collect.Sets;
@@ -145,21 +147,23 @@ public class NodeResourceHelper implements InitializingBean
// minor: save one lookup if creator & modifier are the same
Map<String, UserInfo> mapUserCache = new HashMap<>(2);
return NodeResource.builder().setId(nodeRef.getId())
.setName((String) properties.get(ContentModel.PROP_NAME))
.setNodeType(getQNamePrefixString(type))
.setIsFile(isSubClass(type, ContentModel.TYPE_CONTENT))
.setIsFolder(isSubClass(type, ContentModel.TYPE_FOLDER))
.setCreatedByUser(getUserInfo((String) properties.get(ContentModel.PROP_CREATOR), mapUserCache))
.setCreatedAt(getZonedDateTime((Date)properties.get(ContentModel.PROP_CREATED)))
.setModifiedByUser(getUserInfo((String) properties.get(ContentModel.PROP_MODIFIER), mapUserCache))
.setModifiedAt(getZonedDateTime((Date)properties.get(ContentModel.PROP_MODIFIED)))
.setContent(getContentInfo(properties))
.setPrimaryAssocQName(getPrimaryAssocQName(nodeRef))
.setPrimaryHierarchy(PathUtil.getNodeIdsInReverse(path, false))
.setProperties(mapToNodeProperties(properties))
.setLocalizedProperties(mapToNodeLocalizedProperties(properties))
.setAspectNames(getMappedAspects(nodeRef));
return NodeResource.builder()
.setId(nodeRef.getId())
.setName((String) properties.get(ContentModel.PROP_NAME))
.setNodeType(getQNamePrefixString(type))
.setIsFile(isSubClass(type, ContentModel.TYPE_CONTENT))
.setIsFolder(isSubClass(type, ContentModel.TYPE_FOLDER))
.setCreatedByUser(getUserInfo((String) properties.get(ContentModel.PROP_CREATOR), mapUserCache))
.setCreatedAt(getZonedDateTime((Date)properties.get(ContentModel.PROP_CREATED)))
.setModifiedByUser(getUserInfo((String) properties.get(ContentModel.PROP_MODIFIER), mapUserCache))
.setModifiedAt(getZonedDateTime((Date)properties.get(ContentModel.PROP_MODIFIED)))
.setContent(getContentInfo(properties))
.setPrimaryAssocQName(getPrimaryAssocQName(nodeRef))
.setPrimaryHierarchy(PathUtil.getNodeIdsInReverse(path, false))
.setProperties(mapToNodeProperties(properties))
.setLocalizedProperties(mapToNodeLocalizedProperties(properties))
.setAspectNames(getMappedAspects(nodeRef))
.setSecondaryParents(getSecondaryParents(nodeRef));
}
private boolean isSubClass(QName className, QName ofClassQName)
@@ -413,6 +417,21 @@ public class NodeResourceHelper implements InitializingBean
return PathUtil.getNodeIdsInReverse(path, showLeaf);
}
/**
* Gathers node's secondary parents.
*
* @param nodeRef - node reference
* @return a list of node's secondary parents.
*/
public List<String> getSecondaryParents(final NodeRef nodeRef)
{
return nodeService.getParentAssocs(nodeRef).stream()
.filter(not(ChildAssociationRef::isPrimary))
.map(ChildAssociationRef::getParentRef)
.map(NodeRef::getId)
.collect(Collectors.toList());
}
public PermissionService getPermissionService()
{
return permissionService;

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2020 Alfresco Software Limited
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -48,19 +48,31 @@ public class NodePropertyFilter extends AbstractNodeEventFilter
ContentModel.PROP_CREATOR,
ContentModel.PROP_CREATED,
ContentModel.PROP_CONTENT);
// These properties should not be excluded from the properties object
private static final Set<QName> ALLOWED_PROPERTIES = Set.of(ContentModel.PROP_CASCADE_TX,
ContentModel.PROP_CASCADE_CRC);
private final List<String> nodeAspectsBlackList;
private final List<String> nodePropertiesBlackList;
public NodePropertyFilter()
{
this.nodeAspectsBlackList = parseFilterList(FILTERED_PROPERTIES);
this.nodePropertiesBlackList = parseFilterList(FILTERED_PROPERTIES);
}
@Override
public Set<QName> getExcludedTypes()
{
Set<QName> result = new HashSet<>(EXCLUDED_TOP_LEVEL_PROPS);
nodeAspectsBlackList.forEach(nodeAspect -> result.addAll(expandTypeDef(nodeAspect)));
nodePropertiesBlackList.forEach(nodeProperty-> result.addAll(expandTypeDef(nodeProperty)));
return result;
}
@Override
public boolean isExcluded(QName qName)
{
if(qName != null && ALLOWED_PROPERTIES.contains(qName)){
return false;
}
return super.isExcluded(qName);
}
}

View File

@@ -74,8 +74,6 @@ import org.alfresco.util.transaction.TransactionSupportUtil;
import org.apache.chemistry.opencmis.commons.server.CallContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.gytheio.messaging.MessageProducer;
import org.gytheio.messaging.MessagingException;
import com.google.common.base.Splitter;
import com.google.common.collect.Sets;
@@ -294,7 +292,7 @@ public abstract class AbstractEventsService extends TransactionListenerAdapter
}
/**
* Filter out event before sending them to {@link org.gytheio.messaging.MessageProducer}
* Filter out event before sending them to {@link MessageProducer}
*
* @param events the events to be filtered
*

View File

@@ -0,0 +1,57 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.events;
import java.util.Map;
import java.util.Objects;
import org.apache.camel.ProducerTemplate;
class CamelMessageProducer implements MessageProducer
{
private static final Map<String, Object> AMQP_HEADERS = Map.of("JMS_AMQP_MESSAGE_FORMAT", 0L);
private final ProducerTemplate producer;
private final String endpoint;
CamelMessageProducer(ProducerTemplate producer, String endpoint)
{
this.producer = Objects.requireNonNull(producer);
this.endpoint = Objects.requireNonNull(endpoint);
}
@Override
public void send(Object message)
{
try
{
producer.sendBodyAndHeaders(endpoint, message, AMQP_HEADERS);
}
catch (Exception e)
{
throw new MessagingException("Could not send message", e);
}
}
}

View File

@@ -30,8 +30,6 @@ import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.tenant.TenantUtil;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.gytheio.messaging.MessageProducer;
import org.gytheio.messaging.MessagingException;
public class ExceptionEventsServiceImpl extends AbstractEventsService implements ExceptionEventsService
{

View File

@@ -0,0 +1,37 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.events;
public interface MessageProducer
{
/**
* Send the given POJO message to the default queue for the producer
*
* @param message message to send
* @throws MessagingException on failure
*/
void send(Object message) throws MessagingException;
}

View File

@@ -0,0 +1,50 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.events;
import java.io.Serial;
import java.time.LocalDate;
import java.util.concurrent.atomic.AtomicInteger;
public class MessagingException extends RuntimeException
{
@Serial
private static final long serialVersionUID = 8192266871339806688L;
private static final AtomicInteger ERROR_COUNTER = new AtomicInteger();
public MessagingException(String message, Throwable cause)
{
super(buildErrorLogNumber(message), cause);
}
private static String buildErrorLogNumber(String message)
{
final LocalDate today = LocalDate.now();
message = message == null ? "" : message;
return "%02d%02d%04d %s".formatted(today.getMonthValue(), today.getDayOfMonth(), ERROR_COUNTER.getAndIncrement(), message);
}
}

View File

@@ -156,7 +156,7 @@ public class ChainingSubsystemProxyFactory extends ProxyFactoryBean
*/
@SuppressWarnings("rawtypes")
@Override
public void setInterfaces(Class[] interfaces)
public void setInterfaces(Class... interfaces)
{
super.setInterfaces(interfaces);
// Make it possible to export the object via JMX

View File

@@ -89,7 +89,7 @@ public class SubsystemProxyFactory extends ProxyFactoryBean implements Applicati
@SuppressWarnings("unchecked")
@Override
public void setInterfaces(Class[] interfaces)
public void setInterfaces(Class... interfaces)
{
super.setInterfaces(interfaces);
// Make it possible to export the object via JMX

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -3203,13 +3203,16 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl implements Extens
// Invoke policy behaviour
invokeBeforeUpdateNode(parentNodeRef);
Map<QName, Serializable> propertiesBefore = nodeDAO.getNodeProperties(parentNodeId);
// Touch the node; it is cm:auditable
boolean changed = nodeDAO.setModifiedProperties(parentNodeId, modifiedDate, modifiedByToPropagate);
if (changed)
{
Map<QName, Serializable> propertiesAfter = nodeDAO.getNodeProperties(parentNodeId);
// Invoke policy behaviour
invokeOnUpdateNode(parentNodeRef);
invokeOnUpdateProperties(parentNodeRef, propertiesBefore, propertiesAfter);
}
return null;

View File

@@ -57,6 +57,7 @@ import com.nimbusds.jose.proc.JWSVerificationKeySelector;
import com.nimbusds.jose.proc.SecurityContext;
import com.nimbusds.jose.util.ResourceRetriever;
import com.nimbusds.jwt.proc.ConfigurableJWTProcessor;
import com.nimbusds.oauth2.sdk.id.Issuer;
import com.nimbusds.openid.connect.sdk.op.OIDCProviderMetadata;
import org.alfresco.repo.security.authentication.identityservice.IdentityServiceFacade.IdentityServiceFacadeException;
@@ -91,7 +92,9 @@ import org.springframework.security.oauth2.core.ClientAuthenticationMethod;
import org.springframework.security.oauth2.core.DelegatingOAuth2TokenValidator;
import org.springframework.security.oauth2.core.OAuth2AuthenticationException;
import org.springframework.security.oauth2.core.OAuth2Error;
import org.springframework.security.oauth2.core.OAuth2ErrorCodes;
import org.springframework.security.oauth2.core.OAuth2TokenValidator;
import org.springframework.security.oauth2.core.OAuth2TokenValidatorResult;
import org.springframework.security.oauth2.core.converter.ClaimTypeConverter;
import org.springframework.security.oauth2.core.http.converter.OAuth2AccessTokenResponseHttpMessageConverter;
import org.springframework.security.oauth2.jose.jws.SignatureAlgorithm;
@@ -99,7 +102,6 @@ import org.springframework.security.oauth2.jwt.Jwt;
import org.springframework.security.oauth2.jwt.JwtClaimNames;
import org.springframework.security.oauth2.jwt.JwtClaimValidator;
import org.springframework.security.oauth2.jwt.JwtDecoder;
import org.springframework.security.oauth2.jwt.JwtIssuerValidator;
import org.springframework.security.oauth2.jwt.JwtTimestampValidator;
import org.springframework.security.oauth2.jwt.NimbusJwtDecoder;
import org.springframework.web.client.RestOperations;
@@ -375,12 +377,18 @@ public class IdentityServiceFacadeFactoryBean implements FactoryBean<IdentitySer
.map(OIDCProviderMetadata::getAuthorizationEndpointURI)
.map(URI::toASCIIString)
.orElse(null);
final String issuerUri = Optional.of(metadata)
.map(OIDCProviderMetadata::getIssuer)
.map(Issuer::getValue)
.orElseGet(config::getIssuerUrl);
return ClientRegistration
.withRegistrationId("ids")
.authorizationUri(authUri)
.tokenUri(metadata.getTokenEndpointURI().toASCIIString())
.jwkSetUri(metadata.getJWKSetURI().toASCIIString())
.issuerUri(config.getIssuerUrl())
.issuerUri(issuerUri)
.authorizationGrantType(AuthorizationGrantType.PASSWORD);
}
@@ -565,6 +573,34 @@ public class IdentityServiceFacadeFactoryBean implements FactoryBean<IdentitySer
}
}
static class JwtIssuerValidator implements OAuth2TokenValidator<Jwt>
{
private final String requiredIssuer;
public JwtIssuerValidator(String issuer)
{
this.requiredIssuer = requireNonNull(issuer, "issuer cannot be null");
}
@Override
public OAuth2TokenValidatorResult validate(Jwt token)
{
requireNonNull(token, "token cannot be null");
final Object issuer = token.getClaim(JwtClaimNames.ISS);
if (issuer != null && requiredIssuer.equals(issuer.toString()))
{
return OAuth2TokenValidatorResult.success();
}
final OAuth2Error error = new OAuth2Error(
OAuth2ErrorCodes.INVALID_TOKEN,
"The iss claim is not valid. Expected `%s` but got `%s`.".formatted(requiredIssuer, issuer),
"https://tools.ietf.org/html/rfc6750#section-3.1");
return OAuth2TokenValidatorResult.failure(error);
}
}
private static boolean isDefined(String value)
{
return value != null && !value.isBlank();

View File

@@ -1,9 +1,9 @@
--DELETE_NOT_EXISTS_V3 alf_prop_root.id,alf_audit_app.disabled_paths_id,alf_audit_entry.audit_values_id,alf_prop_unique_ctx.prop1_id system.delete_not_exists.batchsize
--DELETE_NOT_EXISTS_V3 alf_prop_root.id,alf_audit_app.disabled_paths_id,alf_audit_entry.audit_values_id,alf_prop_unique_ctx.prop1_id system.delete_not_exists.batchsize system.delete_not_exists.alf_prop_root.skipToId
--DELETE_NOT_EXISTS_V3 alf_prop_value.id,alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize
--DELETE_NOT_EXISTS_V3 alf_prop_value.id,alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize system.delete_not_exists.alf_prop_value.skipToId
--DELETE_NOT_EXISTS_V3 alf_prop_string_value.id,alf_prop_value.long_value."persisted_type in (3, 5, 6)",alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize
--DELETE_NOT_EXISTS_V3 alf_prop_string_value.id,alf_prop_value.long_value."persisted_type in (3, 5, 6)",alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize system.delete_not_exists.alf_prop_string_value.skipToId
--DELETE_NOT_EXISTS_V3 alf_prop_serializable_value.id,alf_prop_value.long_value.persisted_type=4,alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize
--DELETE_NOT_EXISTS_V3 alf_prop_serializable_value.id,alf_prop_value.long_value.persisted_type=4,alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize system.delete_not_exists.alf_prop_serializable_value.skipToId
--DELETE_NOT_EXISTS_V3 alf_prop_double_value.id,alf_prop_value.long_value.persisted_type=2,alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize
--DELETE_NOT_EXISTS_V3 alf_prop_double_value.id,alf_prop_value.long_value.persisted_type=2,alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize system.delete_not_exists.alf_prop_double_value.skipToId

View File

@@ -1,9 +1,9 @@
--DELETE_NOT_EXISTS_V3 alf_prop_root.id,alf_audit_app.disabled_paths_id,alf_audit_entry.audit_values_id,alf_prop_unique_ctx.prop1_id system.delete_not_exists.batchsize
--DELETE_NOT_EXISTS_V3 alf_prop_root.id,alf_audit_app.disabled_paths_id,alf_audit_entry.audit_values_id,alf_prop_unique_ctx.prop1_id system.delete_not_exists.batchsize system.delete_not_exists.alf_prop_root.skipToId
--DELETE_NOT_EXISTS_V3 alf_prop_value.id,alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize
--DELETE_NOT_EXISTS_V3 alf_prop_value.id,alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize system.delete_not_exists.alf_prop_value.skipToId
--DELETE_NOT_EXISTS_V3 alf_prop_string_value.id,alf_prop_value.long_value."persisted_type in (3, 5, 6)",alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize
--DELETE_NOT_EXISTS_V3 alf_prop_string_value.id,alf_prop_value.long_value."persisted_type in (3, 5, 6)",alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize system.delete_not_exists.alf_prop_string_value.skipToId
--DELETE_NOT_EXISTS_V3 alf_prop_serializable_value.id,alf_prop_value.long_value.persisted_type=4,alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize
--DELETE_NOT_EXISTS_V3 alf_prop_serializable_value.id,alf_prop_value.long_value.persisted_type=4,alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize system.delete_not_exists.alf_prop_serializable_value.skipToId
--DELETE_NOT_EXISTS_V3 alf_prop_double_value.id,alf_prop_value.long_value.persisted_type=2,alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize
--DELETE_NOT_EXISTS_V3 alf_prop_double_value.id,alf_prop_value.long_value.persisted_type=2,alf_audit_app.app_name_id,alf_audit_entry.audit_user_id,alf_prop_link.key_prop_id,alf_prop_link.value_prop_id,alf_prop_unique_ctx.value1_prop_id,alf_prop_unique_ctx.value2_prop_id,alf_prop_unique_ctx.value3_prop_id system.delete_not_exists.batchsize system.delete_not_exists.alf_prop_double_value.skipToId

View File

@@ -1221,7 +1221,7 @@ contentPropertyRestrictions.whitelist=
repo.event2.enabled=true
# Type and aspect filters which should be excluded
# Note: System folders node types are added by default
repo.event2.filter.nodeTypes=sys:*, fm:*, cm:thumbnail, cm:failedThumbnail, cm:rating, rma:rmsite include_subtypes
repo.event2.filter.nodeTypes=sys:*, fm:*, cm:thumbnail, cm:failedThumbnail, cm:rating, rma:rmsite include_subtypes, usr:user
repo.event2.filter.nodeAspects=sys:*
repo.event2.filter.childAssocTypes=rn:rendition
# Comma separated list of users which should be excluded
@@ -1231,6 +1231,7 @@ repo.event2.filter.users=
repo.event2.topic.endpoint=amqp:topic:alfresco.repo.event2
# Specifies if messages should be enqueued in in-memory queue or sent directly to the topic
repo.event2.queue.skip=false
#repo.event2.topic.endpoint=amqp:topic:VirtualTopic.alfresco.repo.event2
# Thread pool for async enqueue of repo events
repo.event2.queue.enqueueThreadPool.priority=1
repo.event2.queue.enqueueThreadPool.coreSize=8
@@ -1254,6 +1255,12 @@ system.prop_table_cleaner.algorithm=V2
system.delete_not_exists.pauseAndRecoverBatchSize=500000
#Duration of the pause in milliseconds (default 10s)
system.delete_not_exists.pauseAndRecoverTime=10000
#Skip ids on job execution
system.delete_not_exists.alf_prop_root.skipToId=0
system.delete_not_exists.alf_prop_value.skipToId=0
system.delete_not_exists.alf_prop_string_value.skipToId=0
system.delete_not_exists.alf_prop_serializable_value.skipToId=0
system.delete_not_exists.alf_prop_double_value.skipToId=0
# --Node cleanup batch - default settings
system.node_cleanup.delete_batchSize=1000

View File

@@ -187,7 +187,7 @@
<property name="startDelay" value="${system.cronJob.startDelayMilliseconds}"/>
<property name="jobDetail">
<bean id="upgradePasswordHashJobDetail" class="org.springframework.scheduling.quartz.JobDetailFactoryBean">
<property name="jobClass" value="org.alfresco.repo.admin.patch.AsynchronousPatch$AsynchronousPatchJob"/>
<property name="jobClass" value="org.alfresco.repo.security.authentication.UpgradePasswordHashWorker$UpgradePasswordHashJob"/>
<property name="jobDataAsMap">
<map>
<entry key="upgradePasswordHashWorker" value-ref="upgradePasswordHashWorker"/>

View File

@@ -9,9 +9,9 @@
http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-3.0.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<bean id="eventsDispatcher" class="org.gytheio.messaging.camel.CamelMessageProducer">
<property name="producer" ref="camelProducerTemplate" />
<property name="endpoint" value="direct:alfresco.events" />
<bean id="eventsDispatcher" class="org.alfresco.repo.events.CamelMessageProducer">
<constructor-arg ref="camelProducerTemplate" />
<constructor-arg value="direct:alfresco.events" />
</bean>
<bean id="eventsRegistry" class="org.alfresco.sync.events.EventRegistryImpl">

View File

@@ -11,7 +11,7 @@
<context:component-scan base-package="org.alfresco.messaging.camel.configuration"/>
<bean id="messagingObjectMapper" class="org.gytheio.messaging.jackson.ObjectMapperFactory"
<bean id="messagingObjectMapper" class="org.alfresco.messaging.jackson.ObjectMapperFactory"
factory-method="createInstance" />
<bean id="defaultDataFormat" class="org.apache.camel.component.jackson.JacksonDataFormat">
@@ -96,6 +96,6 @@
</bean>
<!-- In the default routes, this is where a message goes if no queue is specified -->
<bean id="deadLetterQueue" class="org.gytheio.messaging.LoggingDeadLetterQueue" />
<bean id="deadLetterQueue" class="org.alfresco.messaging.LoggingDeadLetterQueue" />
</beans>

View File

@@ -158,7 +158,7 @@ public class AccessAuditorTest
{
Object[] args = invocation.getArguments();
Map<String, Serializable> auditMap = (Map<String, Serializable>)args[1];
if ("/alfresco-access/transaction".equals(args[0]))
if ("/alfresco-access/transaction".equals(args[0]) && !"updateNodeProperties".equals(auditMap.get("action")))
{
auditMapList.add(auditMap);
}

View File

@@ -86,7 +86,7 @@ public class DeleteNotExistsV3ExecutorTest
{
scriptExecutor.executeScriptUrl("scriptexec/${db.script.dialect}/delete-not-exists/test-data1.sql");
String sql = "--DELETE_NOT_EXISTS_V3 temp_tst_tbl_1.id,temp_tst_tbl_2.tbl_2_id,temp_tst_tbl_3.tbl_3_id,temp_tst_tbl_4.tbl_4_id system.delete_not_exists.batchsize";
String sql = "--DELETE_NOT_EXISTS_V3 temp_tst_tbl_1.id,temp_tst_tbl_2.tbl_2_id,temp_tst_tbl_3.tbl_3_id,temp_tst_tbl_4.tbl_4_id system.delete_not_exists.batchsize system.delete_not_exists.temp_tst_tbl_1.skipToId";
int line = 1;
File scriptFile = Mockito.mock(File.class);
Properties properties = Mockito.mock(Properties.class);
@@ -101,8 +101,8 @@ public class DeleteNotExistsV3ExecutorTest
{
when(properties.getProperty(DeleteNotExistsV3Executor.PROPERTY_READ_ONLY)).thenReturn("true");
when(properties.getProperty(DeleteNotExistsV3Executor.PROPERTY_TIMEOUT_SECONDS)).thenReturn("-1");
DeleteNotExistsV3Executor DeleteNotExistsV3Executor = createDeleteNotExistsV3Executor(dialect, connection, sql, line, scriptFile, properties);
DeleteNotExistsV3Executor.execute();
DeleteNotExistsV3Executor deleteNotExistsV3Executor = createDeleteNotExistsV3Executor(dialect, connection, sql, line, scriptFile, properties);
deleteNotExistsV3Executor.execute();
List<String> res = jdbcTmpl.queryForList(select, String.class);
assertEquals(7, res.size());
@@ -117,8 +117,8 @@ public class DeleteNotExistsV3ExecutorTest
{
when(properties.getProperty(DeleteNotExistsV3Executor.PROPERTY_READ_ONLY)).thenReturn("false");
when(properties.getProperty(DeleteNotExistsV3Executor.PROPERTY_TIMEOUT_SECONDS)).thenReturn("-1");
DeleteNotExistsV3Executor DeleteNotExistsV3Executor = createDeleteNotExistsV3Executor(dialect, connection, sql, line, scriptFile, properties);
DeleteNotExistsV3Executor.execute();
DeleteNotExistsV3Executor deleteNotExistsV3Executor = createDeleteNotExistsV3Executor(dialect, connection, sql, line, scriptFile, properties);
deleteNotExistsV3Executor.execute();
List<String> res = jdbcTmpl.queryForList(select, String.class);
assertEquals(5, res.size());
@@ -137,7 +137,7 @@ public class DeleteNotExistsV3ExecutorTest
{
scriptExecutor.executeScriptUrl("scriptexec/${db.script.dialect}/delete-not-exists/test-data1.sql");
String sql = "--DELETE_NOT_EXISTS_V3 temp_tst_tbl_1.id,temp_tst_tbl_2.tbl_2_id,temp_tst_tbl_3.tbl_3_id,temp_tst_tbl_4.tbl_4_id system.delete_not_exists.batchsize";
String sql = "--DELETE_NOT_EXISTS_V3 temp_tst_tbl_1.id,temp_tst_tbl_2.tbl_2_id,temp_tst_tbl_3.tbl_3_id,temp_tst_tbl_4.tbl_4_id system.delete_not_exists.batchsize system.delete_not_exists.temp_tst_tbl_1.skipToId";
int line = 1;
File scriptFile = Mockito.mock(File.class);
Properties properties = Mockito.mock(Properties.class);
@@ -150,8 +150,8 @@ public class DeleteNotExistsV3ExecutorTest
{
when(properties.getProperty(DeleteNotExistsV3Executor.PROPERTY_DELETE_BATCH_SIZE)).thenReturn("1");
when(properties.getProperty(DeleteNotExistsV3Executor.PROPERTY_READ_ONLY)).thenReturn("false");
DeleteNotExistsV3Executor DeleteNotExistsV3Executor = createDeleteNotExistsV3Executor(dialect, connection, sql, line, scriptFile, properties);
DeleteNotExistsV3Executor.execute();
DeleteNotExistsV3Executor deleteNotExistsV3Executor = createDeleteNotExistsV3Executor(dialect, connection, sql, line, scriptFile, properties);
deleteNotExistsV3Executor.execute();
List<String> res = jdbcTmpl.queryForList(select, String.class);
assertEquals(5, res.size());
@@ -170,7 +170,7 @@ public class DeleteNotExistsV3ExecutorTest
{
scriptExecutor.executeScriptUrl("scriptexec/${db.script.dialect}/delete-not-exists/test-data1.sql");
String sql = "--DELETE_NOT_EXISTS_V3 temp_tst_tbl_1.id,temp_tst_tbl_2.tbl_2_id,temp_tst_tbl_3.tbl_3_id,temp_tst_tbl_4.tbl_4_id system.delete_not_exists.batchsize";
String sql = "--DELETE_NOT_EXISTS_V3 temp_tst_tbl_1.id,temp_tst_tbl_2.tbl_2_id,temp_tst_tbl_3.tbl_3_id,temp_tst_tbl_4.tbl_4_id system.delete_not_exists.batchsize system.delete_not_exists.temp_tst_tbl_1.skipToId";
int line = 1;
File scriptFile = Mockito.mock(File.class);
Properties properties = Mockito.mock(Properties.class);
@@ -198,4 +198,42 @@ public class DeleteNotExistsV3ExecutorTest
}
}
}
}
@Test()
public void testSkip() throws Exception
{
scriptExecutor.executeScriptUrl("scriptexec/${db.script.dialect}/delete-not-exists/test-data1.sql");
String sql = "--DELETE_NOT_EXISTS_V3 temp_tst_tbl_1.id,temp_tst_tbl_2.tbl_2_id,temp_tst_tbl_3.tbl_3_id,temp_tst_tbl_4.tbl_4_id system.delete_not_exists.batchsize system.delete_not_exists.temp_tst_tbl_1.skipToId";
int line = 1;
File scriptFile = Mockito.mock(File.class);
Properties properties = Mockito.mock(Properties.class);
String select = "select id from temp_tst_tbl_1 order by id ASC";
try (Connection connection = dataSource.getConnection())
{
connection.setAutoCommit(true);
{
when(properties.getProperty(DeleteNotExistsV3Executor.PROPERTY_BATCH_SIZE)).thenReturn("2");
when(properties.getProperty(DeleteNotExistsV3Executor.PROPERTY_READ_ONLY)).thenReturn("false");
when(properties.getProperty(DeleteNotExistsV3Executor.PROPERTY_TIMEOUT_SECONDS)).thenReturn("-1");
when(properties.getProperty("system.delete_not_exists.temp_tst_tbl_1.skipToId")).thenReturn("6");
DeleteNotExistsV3Executor deleteNotExistsV3Executor = createDeleteNotExistsV3Executor(dialect, connection, sql, line, scriptFile, properties);
deleteNotExistsV3Executor.execute();
List<String> res = jdbcTmpl.queryForList(select, String.class);
assertEquals(7, res.size());
// We are only processing Ids after 6, so all ids < 6 must remain untouched
assertEquals("1", res.get(0));
assertEquals("2", res.get(1));
assertEquals("3", res.get(2));
assertEquals("4", res.get(3));
assertEquals("5", res.get(4));
assertEquals("10", res.get(5));
assertEquals("11", res.get(6));
}
}
}
}

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2020 Alfresco Software Limited
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -28,6 +28,7 @@ package org.alfresco.repo.event2;
import java.util.Arrays;
import java.util.List;
import java.util.stream.IntStream;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.event.v1.model.ChildAssociationResource;
@@ -64,12 +65,11 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(),
resultRepoEvent.getType());
retryingTransactionHelper.doInTransaction(() ->
nodeService.addChild(
parentNodeRef,
childNodeRef,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE, assocLocalName)));
retryingTransactionHelper.doInTransaction(() -> nodeService.addChild(
parentNodeRef,
childNodeRef,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE, assocLocalName)));
List<ChildAssociationRef> childAssociationRefs = retryingTransactionHelper.doInTransaction(() ->
nodeService.getChildAssocs(parentNodeRef));
@@ -77,10 +77,32 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals(1, childAssociationRefs.size());
assertFalse(childAssociationRefs.get(0).isPrimary());
checkNumOfEvents(3);
checkNumOfEvents(4);
final RepoEvent<EventData<ChildAssociationResource>> childAssocRepoEvent = getRepoEventWithoutWait(3);
// node event
final RepoEvent<EventData<NodeResource>> nodeRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(), nodeRepoEvent.getType());
assertNotNull("Repo event ID is not available.", nodeRepoEvent.getId());
assertNotNull("Source is not available", nodeRepoEvent.getSource());
assertEquals("Repo event source is not available.",
"/" + descriptorService.getCurrentRepositoryDescriptor().getId(),
nodeRepoEvent.getSource().toString());
assertNotNull("Repo event creation time is not available.", nodeRepoEvent.getTime());
assertEquals("Invalid repo event datacontenttype", "application/json",
nodeRepoEvent.getDatacontenttype());
assertNotNull(nodeRepoEvent.getDataschema());
assertEquals(EventJSONSchema.NODE_UPDATED_V1.getSchema(), nodeRepoEvent.getDataschema());
final EventData<NodeResource> nodeResourceEventData = getEventData(nodeRepoEvent);
assertNotNull("Event data group ID is not available. ", nodeResourceEventData.getEventGroupId());
assertNotNull("resourceBefore property is not available", nodeResourceEventData.getResourceBefore());
final NodeResource nodeResource = getNodeResource(nodeRepoEvent);
final NodeResource nodeResourceBefore = getNodeResourceBefore(nodeRepoEvent);
assertNotSame("Secondary parents actual and earlier state should differ", nodeResource.getSecondaryParents(), nodeResourceBefore.getSecondaryParents());
// child association event
final RepoEvent<EventData<ChildAssociationResource>> childAssocRepoEvent = getFilteredEvent(EventType.CHILD_ASSOC_CREATED, 0);
assertEquals("Wrong repo event type.", EventType.CHILD_ASSOC_CREATED.getType(), childAssocRepoEvent.getType());
assertNotNull("Repo event ID is not available.", childAssocRepoEvent.getId());
assertNotNull("Source is not available", childAssocRepoEvent.getSource());
@@ -93,16 +115,18 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
assertNotNull(childAssocRepoEvent.getDataschema());
assertEquals(EventJSONSchema.CHILD_ASSOC_CREATED_V1.getSchema(), childAssocRepoEvent.getDataschema());
final EventData<ChildAssociationResource> nodeResourceEventData = getEventData(childAssocRepoEvent);
// EventData attributes
assertNotNull("Event data group ID is not available. ", nodeResourceEventData.getEventGroupId());
assertNull("resourceBefore property is not available", nodeResourceEventData.getResourceBefore());
final EventData<ChildAssociationResource> childAssocResourceEventData = getEventData(childAssocRepoEvent);
assertNotNull("Event data group ID is not available. ", childAssocResourceEventData.getEventGroupId());
assertNull("resourceBefore property is not available", childAssocResourceEventData.getResourceBefore());
final ChildAssociationResource childAssociationResource = getChildAssocResource(childAssocRepoEvent);
assertEquals("Wrong parent", parentNodeRef.getId(), childAssociationResource.getParent().getId());
assertEquals("Wrong child", childNodeRef.getId(), childAssociationResource.getChild().getId());
assertEquals("Wrong assoc type", "cm:contains", childAssociationResource.getAssocType());
assertEquals("Wrong assoc name", "ce:" + assocLocalName, childAssociationResource.getAssocQName());
assertEquals("Node and child association events should have same eventGroupId", nodeResourceEventData.getEventGroupId(), childAssocResourceEventData.getEventGroupId());
assertTrue("Wrong node's secondary parents", nodeResource.getSecondaryParents().contains(childAssociationResource.getParent().getId()));
}
@Test
@@ -131,7 +155,7 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals(1, childAssociationRefs.size());
assertFalse(childAssociationRefs.get(0).isPrimary());
checkNumOfEvents(3);
checkNumOfEvents(4);
retryingTransactionHelper.doInTransaction(() ->
nodeService.removeChildAssociation(childAssociationRef));
@@ -141,10 +165,32 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals(0, childAssociationRefs.size());
checkNumOfEvents(4);
checkNumOfEvents(6);
final RepoEvent<EventData<ChildAssociationResource>> childAssocRepoEvent = getRepoEventWithoutWait(4);
// node repo event
final RepoEvent<EventData<NodeResource>> nodeRepoEvent = getRepoEventWithoutWait(5);
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(), nodeRepoEvent.getType());
assertNotNull("Repo event ID is not available.", nodeRepoEvent.getId());
assertNotNull("Source is not available", nodeRepoEvent.getSource());
assertEquals("Repo event source is not available.",
"/" + descriptorService.getCurrentRepositoryDescriptor().getId(),
nodeRepoEvent.getSource().toString());
assertNotNull("Repo event creation time is not available.", nodeRepoEvent.getTime());
assertEquals("Invalid repo event datacontenttype", "application/json",
nodeRepoEvent.getDatacontenttype());
assertNotNull(nodeRepoEvent.getDataschema());
assertEquals(EventJSONSchema.NODE_UPDATED_V1.getSchema(), nodeRepoEvent.getDataschema());
final EventData<NodeResource> nodeResourceEventData = getEventData(nodeRepoEvent);
assertNotNull("Event data group ID is not available. ", nodeResourceEventData.getEventGroupId());
assertNotNull("resourceBefore property is not available", nodeResourceEventData.getResourceBefore());
final NodeResource nodeResource = getNodeResource(nodeRepoEvent);
final NodeResource nodeResourceBefore = getNodeResourceBefore(nodeRepoEvent);
assertNotSame("Secondary parents actual and earlier state should differ", nodeResource.getSecondaryParents(), nodeResourceBefore.getSecondaryParents());
// child association repo event
final RepoEvent<EventData<ChildAssociationResource>> childAssocRepoEvent = getFilteredEvent(EventType.CHILD_ASSOC_DELETED, 0);
assertEquals("Wrong repo event type.", EventType.CHILD_ASSOC_DELETED.getType(), childAssocRepoEvent.getType());
assertNotNull("Repo event ID is not available. ", childAssocRepoEvent.getId());
assertNotNull("Source is not available", childAssocRepoEvent.getSource());
@@ -156,15 +202,17 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
assertNotNull(childAssocRepoEvent.getDataschema());
assertEquals(EventJSONSchema.CHILD_ASSOC_DELETED_V1.getSchema(), childAssocRepoEvent.getDataschema());
final EventData<ChildAssociationResource> nodeResourceEventData = getEventData(childAssocRepoEvent);
// EventData attributes
assertNotNull("Event data group ID is not available. ", nodeResourceEventData.getEventGroupId());
assertNull("resourceBefore property is not available", nodeResourceEventData.getResourceBefore());
final EventData<ChildAssociationResource> childAssocResourceEventData = getEventData(childAssocRepoEvent);
assertNotNull("Event data group ID is not available. ", childAssocResourceEventData.getEventGroupId());
assertNull("resourceBefore property is not available", childAssocResourceEventData.getResourceBefore());
final ChildAssociationResource childAssociationResource = getChildAssocResource(childAssocRepoEvent);
assertEquals("Wrong parent", parentNodeRef.getId(), childAssociationResource.getParent().getId());
assertEquals("Wrong child", childNodeRef.getId(), childAssociationResource.getChild().getId());
assertEquals("Wrong assoc type", "cm:contains", childAssociationResource.getAssocType());
assertEquals("Node and child association events should have same eventGroupId", nodeResourceEventData.getEventGroupId(), childAssocResourceEventData.getEventGroupId());
assertTrue("Wrong node's secondary parents", nodeResourceBefore.getSecondaryParents().contains(childAssociationResource.getParent().getId()));
}
@Test
@@ -179,17 +227,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() ->
nodeService.addChild(
@@ -212,12 +253,15 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
return null;
});
checkNumOfEvents(7);
// 3 assoc.child.Created events should be created
checkNumOfEvents(8);
// 1 node.Updated events should be created
List<RepoEvent<EventData<NodeResource>>> nodeUpdateEvent = getFilteredEvents(EventType.NODE_UPDATED);
assertEquals("Wrong association events number", 1, nodeUpdateEvent.size());
// 3 assoc.child.Created events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_CREATED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
}
@Test
@@ -231,17 +275,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
List<NodeRef> parents = Arrays.asList(parent1NodeRef, parent2NodeRef, parent3NodeRef);
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() -> {
for (NodeRef parent : parents)
@@ -268,10 +305,14 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
return null;
});
checkNumOfEvents(7);
checkNumOfEvents(8);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(5);
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(), resultRepoEvent.getType());
// 3 assoc.child.Created events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_CREATED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
// All events in the transaction should have the same eventGroupId
String assocEventGroupID1 = getEventData(childAssocEvents.get(0)).getEventGroupId();
@@ -294,17 +335,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
for (NodeRef parent : parents)
{
@@ -330,10 +364,15 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
return null;
});
checkNumOfEvents(7);
checkNumOfEvents(10);
// 3 node.Updated events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> nodeUpdateEvents = getFilteredEvents(EventType.NODE_UPDATED);
assertEquals("Wrong node update events number", 3, nodeUpdateEvents.size());
// 3 assoc.child.Created events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_CREATED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
assertEquals(parent1NodeRef.getId(), getChildAssocResource(childAssocEvents.get(0)).getParent().getId());
assertEquals(childNodeRef.getId(), getChildAssocResource(childAssocEvents.get(0)).getChild().getId());
@@ -360,17 +399,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() -> {
for (NodeRef child : children)
@@ -388,10 +420,15 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
return null;
});
checkNumOfEvents(7);
checkNumOfEvents(10);
// 3 node.Updated events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> nodeUpdateEvents = getFilteredEvents(EventType.NODE_UPDATED);
assertEquals("Wrong node update events number", 3, nodeUpdateEvents.size());
// 3 assoc.child.Created events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_CREATED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
}
@Test
@@ -406,17 +443,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
for (NodeRef child : children)
{
@@ -432,10 +462,15 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
return null;
});
checkNumOfEvents(7);
checkNumOfEvents(10);
// 3 node.Updated events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> nodeUpdateEvents = getFilteredEvents(EventType.NODE_UPDATED);
assertEquals("Wrong node update events number", 3, nodeUpdateEvents.size());
// 3 assoc.child.Created events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_CREATED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
assertEquals(parentNodeRef.getId(), getChildAssocResource(childAssocEvents.get(0)).getParent().getId());
assertEquals(child1NodeRef.getId(), getChildAssocResource(childAssocEvents.get(0)).getChild().getId());
@@ -462,17 +497,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() ->
nodeService.addChild(parents, childNodeRef, ContentModel.ASSOC_CONTAINS,
@@ -501,15 +529,19 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
return null;
});
checkNumOfEvents(10);
checkNumOfEvents(12);
// 2 node.Updated events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> nodeUpdateEvents = getFilteredEvents(EventType.NODE_UPDATED);
assertEquals("Wrong node update events number", 2, nodeUpdateEvents.size());
// 3 assoc.child.Deleted events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_DELETED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
}
@Test
public void testDeleteAssociationOneParentMultipleChildrenDifferentTransactions()
public void testDeleteAssociationMultipleParentOneChildrenDifferentTransactions()
{
final NodeRef parent1NodeRef = createNode(ContentModel.TYPE_FOLDER);
final NodeRef parent2NodeRef = createNode(ContentModel.TYPE_FOLDER);
@@ -520,17 +552,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() ->
nodeService.addChild(parents, childNodeRef, ContentModel.ASSOC_CONTAINS,
@@ -557,7 +582,7 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
nodeService.removeChildAssociation(childAssociationRef));
}
checkNumOfEvents(10);
checkNumOfEvents(14);
// 3 assoc.child.Deleted events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_DELETED);
@@ -588,17 +613,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() -> {
for (NodeRef child : children)
@@ -619,11 +637,15 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
deleteNode(parentNodeRef);
checkNumOfEvents(11);
checkNumOfEvents(17);
// 6 node.Updated events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> nodeUpdateEvents = getFilteredEvents(EventType.NODE_UPDATED);
assertEquals("Wrong node update events number", 6, nodeUpdateEvents.size());
// 3 assoc.child.Deleted events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_DELETED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
}
@Test
@@ -638,17 +660,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(4);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(3);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(4);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2, 3, 4).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() ->
nodeService.addChild(parents, childNodeRef, ContentModel.ASSOC_CONTAINS,
@@ -670,11 +685,15 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
deleteNode(childNodeRef);
checkNumOfEvents(11);
checkNumOfEvents(12);
// 2 node.Updated events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> nodeUpdateEvents = getFilteredEvents(EventType.NODE_UPDATED);
assertEquals("Wrong node update events number", 2, nodeUpdateEvents.size());
// 3 assoc.child.Deleted events should be created
List<RepoEvent<EventData<ChildAssociationResource>>> childAssocEvents = getFilteredEvents(EventType.CHILD_ASSOC_DELETED);
assertEquals("Wrong association events number",3, childAssocEvents.size());
assertEquals("Wrong association events number", 3, childAssocEvents.size());
}
@Test
@@ -685,11 +704,10 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(2);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEventWithoutWait(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
IntStream.of(1, 2).forEach(i -> {
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEventWithoutWait(i);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
});
retryingTransactionHelper.doInTransaction(() ->
{
@@ -708,13 +726,14 @@ public class ChildAssociationRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals(1, childAssociationRefs.size());
assertFalse(childAssociationRefs.get(0).isPrimary());
checkNumOfEvents(4);
checkNumOfEvents(5);
// Check the node events occur before the child association event
List<RepoEvent<?>> repoEvents = getRepoEventsContainer().getEvents();
assertEquals("org.alfresco.event.node.Created", repoEvents.get(0).getType());
assertEquals("org.alfresco.event.node.Created", repoEvents.get(1).getType());
assertEquals("org.alfresco.event.node.Updated", repoEvents.get(2).getType());
assertEquals("org.alfresco.event.assoc.child.Created", repoEvents.get(3).getType());
assertEquals(EventType.NODE_CREATED.getType(), repoEvents.get(0).getType());
assertEquals(EventType.NODE_CREATED.getType(), repoEvents.get(1).getType());
assertEquals(EventType.NODE_UPDATED.getType(), repoEvents.get(2).getType());
assertEquals(EventType.NODE_UPDATED.getType(), repoEvents.get(3).getType());
assertEquals(EventType.CHILD_ASSOC_CREATED.getType(), repoEvents.get(4).getType());
}
}

View File

@@ -27,24 +27,38 @@ package org.alfresco.repo.event2;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.BDDMockito.given;
import static org.mockito.BDDMockito.then;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.when;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.event.v1.model.EventType;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.junit.Before;
import org.junit.Test;
public class EventConsolidatorUnitTest
{
private NodeResourceHelper nodeResourceHelper = mock(NodeResourceHelper.class);
private final NodeResourceHelper nodeResourceHelper = mock(NodeResourceHelper.class);
private NodeEventConsolidator eventConsolidator;
@Before
public void setUp() throws Exception
{
eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
}
@Test
public void testGetMappedAspectsBeforeRemovedAndAddedEmpty()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
Set<String> currentAspects = new HashSet<>();
currentAspects.add("cm:geographic");
currentAspects.add("cm:auditable");
@@ -57,7 +71,6 @@ public class EventConsolidatorUnitTest
@Test
public void testGetMappedAspectsBefore_AspectRemoved()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
Set<String> currentAspects = new HashSet<>();
@@ -79,7 +92,6 @@ public class EventConsolidatorUnitTest
@Test
public void testGetMappedAspectsBefore_AspectAdded()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
Set<String> currentAspects = new HashSet<>();
@@ -102,7 +114,6 @@ public class EventConsolidatorUnitTest
@Test
public void testGetMappedAspectsBefore_AspectAddedAndRemoved()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
Set<String> currentAspects = new HashSet<>();
@@ -125,7 +136,6 @@ public class EventConsolidatorUnitTest
@Test
public void testGetMappedAspectsBefore_AspectRemovedAndAdded()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.removeAspect(ContentModel.ASSOC_CONTAINS);
@@ -150,8 +160,6 @@ public class EventConsolidatorUnitTest
@Test
public void testGetMappedAspectsBefore_AspectAddedTwiceRemovedOnce()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
@@ -178,8 +186,6 @@ public class EventConsolidatorUnitTest
@Test
public void testGetMappedAspectsBefore_AspectRemovedTwiceAddedOnce()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
@@ -206,7 +212,6 @@ public class EventConsolidatorUnitTest
@Test
public void testGetMappedAspectsBefore_FilteredAspectAdded()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASPECT_COPIEDFROM);
Set<String> currentAspects = new HashSet<>();
@@ -227,7 +232,6 @@ public class EventConsolidatorUnitTest
@Test
public void testAddAspect()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
assertEquals(1, eventConsolidator.getAspectsAdded().size());
@@ -238,7 +242,6 @@ public class EventConsolidatorUnitTest
@Test
public void testRemoveAspect()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.removeAspect(ContentModel.ASSOC_CONTAINS);
assertEquals(0, eventConsolidator.getAspectsAdded().size());
@@ -249,7 +252,6 @@ public class EventConsolidatorUnitTest
@Test
public void testAddAspectRemoveAspect()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.removeAspect(ContentModel.ASSOC_CONTAINS);
@@ -260,7 +262,6 @@ public class EventConsolidatorUnitTest
@Test
public void testRemoveAspectAddAspect()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.removeAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
@@ -271,7 +272,6 @@ public class EventConsolidatorUnitTest
@Test
public void testAddAspectTwiceRemoveAspectOnce()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.removeAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
@@ -284,7 +284,6 @@ public class EventConsolidatorUnitTest
@Test
public void testAddAspectOnceRemoveAspectTwice()
{
NodeEventConsolidator eventConsolidator = new NodeEventConsolidator(nodeResourceHelper);
eventConsolidator.removeAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.addAspect(ContentModel.ASSOC_CONTAINS);
eventConsolidator.removeAspect(ContentModel.ASSOC_CONTAINS);
@@ -293,4 +292,83 @@ public class EventConsolidatorUnitTest
assertEquals(1, eventConsolidator.getAspectsRemoved().size());
assertTrue(eventConsolidator.getAspectsRemoved().contains(ContentModel.ASSOC_CONTAINS));
}
@Test
public void testOnMoveNodeWithPrimaryParent()
{
ChildAssociationRef oldAssociationMock = mock(ChildAssociationRef.class);
ChildAssociationRef newAssociationMock = mock(ChildAssociationRef.class);
NodeRef parentRefMock = mock(NodeRef.class);
given(newAssociationMock.isPrimary()).willReturn(true);
given(oldAssociationMock.getParentRef()).willReturn(parentRefMock);
eventConsolidator.onMoveNode(oldAssociationMock, newAssociationMock);
then(newAssociationMock).should().getChildRef();
then(newAssociationMock).should().isPrimary();
then(newAssociationMock).shouldHaveNoMoreInteractions();
then(nodeResourceHelper).should().getPrimaryHierarchy(parentRefMock, true);
assertTrue("Node event consolidator should contain event type: UPDATED", eventConsolidator.getEventTypes().contains(EventType.NODE_UPDATED));
}
@Test
public void testOnMoveNodeAfterSecondaryParentAdded()
{
ChildAssociationRef oldAssociationMock = mock(ChildAssociationRef.class);
ChildAssociationRef newAssociationMock = mock(ChildAssociationRef.class);
NodeRef nodeRefMock = mock(NodeRef.class);
NodeRef parentRefMock = mock(NodeRef.class);
List<String> secondaryParentsMock = mock(List.class);
given(newAssociationMock.isPrimary()).willReturn(false);
given(newAssociationMock.getChildRef()).willReturn(nodeRefMock);
given(newAssociationMock.getParentRef()).willReturn(parentRefMock);
given(parentRefMock.getId()).willReturn("parent-id");
given(nodeResourceHelper.getSecondaryParents(any(NodeRef.class))).willReturn(secondaryParentsMock);
// when
eventConsolidator.onMoveNode(oldAssociationMock, newAssociationMock);
then(newAssociationMock).should().isPrimary();
then(newAssociationMock).should(times(2)).getChildRef();
then(newAssociationMock).should(times(2)).getParentRef();
then(newAssociationMock).shouldHaveNoMoreInteractions();
then(oldAssociationMock).shouldHaveNoInteractions();
then(nodeResourceHelper).should().getSecondaryParents(nodeRefMock);
then(secondaryParentsMock).should().remove("parent-id");
then(secondaryParentsMock).shouldHaveNoMoreInteractions();
assertTrue("Node event consolidator should contain event type: UPDATED", eventConsolidator.getEventTypes().contains(EventType.NODE_UPDATED));
assertEquals(secondaryParentsMock, eventConsolidator.getSecondaryParentsBefore());
}
@Test
public void testOnMoveNodeBeforeSecondaryParentRemoved()
{
ChildAssociationRef oldAssociationMock = mock(ChildAssociationRef.class);
ChildAssociationRef newAssociationMock = mock(ChildAssociationRef.class);
NodeRef nodeRefMock = mock(NodeRef.class);
NodeRef parentRefMock = mock(NodeRef.class);
List<String> secondaryParentsMock = mock(List.class);
given(newAssociationMock.isPrimary()).willReturn(false);
given(newAssociationMock.getChildRef()).willReturn(nodeRefMock);
given(oldAssociationMock.getParentRef()).willReturn(parentRefMock);
given(parentRefMock.getId()).willReturn("parent-id");
given(nodeResourceHelper.getSecondaryParents(any(NodeRef.class))).willReturn(secondaryParentsMock);
// when
eventConsolidator.onMoveNode(oldAssociationMock, newAssociationMock);
then(newAssociationMock).should().isPrimary();
then(newAssociationMock).should(times(2)).getChildRef();
then(newAssociationMock).should().getParentRef();
then(newAssociationMock).shouldHaveNoMoreInteractions();
then(oldAssociationMock).should(times(3)).getParentRef();
then(oldAssociationMock).shouldHaveNoMoreInteractions();
then(nodeResourceHelper).should().getSecondaryParents(nodeRefMock);
then(secondaryParentsMock).should().contains("parent-id");
then(secondaryParentsMock).should().add("parent-id");
then(secondaryParentsMock).shouldHaveNoMoreInteractions();
assertTrue("Node event consolidator should contain event type: NODE_UPDATED", eventConsolidator.getEventTypes().contains(EventType.NODE_UPDATED));
assertEquals(secondaryParentsMock, eventConsolidator.getSecondaryParentsBefore());
}
}

View File

@@ -119,6 +119,9 @@ public class EventFilterUnitTest
assertTrue("System properties are excluded by default.",
propertyFilter.isExcluded(ContentModel.PROP_NODE_DBID));
assertFalse("Property cascadeTx is not excluded", propertyFilter.isExcluded(ContentModel.PROP_CASCADE_TX));
assertFalse("Property cascadeCRC is not excluded", propertyFilter.isExcluded(ContentModel.PROP_CASCADE_CRC));
assertFalse(propertyFilter.isExcluded(ContentModel.PROP_TITLE));
}

View File

@@ -28,14 +28,43 @@ package org.alfresco.repo.event2;
import static org.alfresco.repo.event2.NodeResourceHelper.getLocalizedPropertiesBefore;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.BDDMockito.given;
import static org.mockito.BDDMockito.then;
import static org.mockito.Mockito.mock;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class NodeResourceHelperUnitTest
{
@Mock
private NodeService nodeServiceMock;
@InjectMocks
private NodeResourceHelper nodeResourceHelper;
@Before
public void setUp() throws Exception
{
MockitoAnnotations.openMocks(this);
}
@Test
public void shouldExtractOnlyRelevantPropertiesForBeforeNode()
{
@@ -111,4 +140,39 @@ public class NodeResourceHelperUnitTest
return this;
}
}
@Test
public void testGetSecondaryParents()
{
NodeRef nodeRefMock = mock(NodeRef.class);
NodeRef parentRefMock = mock(NodeRef.class);
ChildAssociationRef secondaryParentMock = mock(ChildAssociationRef.class);
given(nodeServiceMock.getParentAssocs(any(NodeRef.class))).willReturn(List.of(secondaryParentMock));
given(secondaryParentMock.isPrimary()).willReturn(false);
given(secondaryParentMock.getParentRef()).willReturn(parentRefMock);
// when
List<String> secondaryParents = nodeResourceHelper.getSecondaryParents(nodeRefMock);
then(nodeServiceMock).should().getParentAssocs(nodeRefMock);
then(nodeServiceMock).shouldHaveNoMoreInteractions();
then(secondaryParentMock).should().isPrimary();
then(secondaryParentMock).should().getParentRef();
then(secondaryParentMock).shouldHaveNoMoreInteractions();
then(parentRefMock).should().getId();
then(parentRefMock).shouldHaveNoMoreInteractions();
assertNotNull(secondaryParents);
}
@Test
public void testGetNoneSecondaryParents()
{
NodeRef nodeRefMock = mock(NodeRef.class);
// when
List<String> secondaryParents = nodeResourceHelper.getSecondaryParents(nodeRefMock);
assertNotNull(secondaryParents);
assertTrue(secondaryParents.isEmpty());
}
}

View File

@@ -87,7 +87,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
});
checkNumOfEvents(2);
resultRepoEvent = getRepoEvent(2);
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(),
resultRepoEvent.getType());
@@ -227,7 +227,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
});
checkNumOfEvents(2);
resultRepoEvent = getRepoEvent(2);
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(), resultRepoEvent.getType());
@@ -625,7 +625,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
// Create active model
CustomModelDefinition modelDefinition =
retryingTransactionHelper.doInTransaction(() -> customModelService.createCustomModel(model, true));
retryingTransactionHelper.doInTransaction(() -> customModelService.createCustomModel(model, true));
assertNotNull(modelDefinition);
assertEquals(modelName, modelDefinition.getName().getLocalName());
@@ -635,8 +635,11 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
Collection<TypeDefinition> types = modelDefinition.getTypeDefinitions();
assertEquals(1, types.size());
// we should have only 2 events, node.Created and node.Updated
checkNumOfEvents(2);
// node.Created event should be generated for the model
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEvent(1);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getFilteredEvent(EventType.NODE_CREATED, 0);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
NodeResource nodeResource = getNodeResource(resultRepoEvent);
assertEquals("Incorrect node type was found", "cm:dictionaryModel", nodeResource.getNodeType());
@@ -647,9 +650,9 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals(ContentModel.TYPE_CONTENT, nodeService.getType(nodeRef));
// node.Created event should be generated
resultRepoEvent = getRepoEvent(2);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
resultRepoEvent = getRepoEvent(3);
nodeResource = getNodeResource(resultRepoEvent);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
assertEquals("cm:content node type was not found", "cm:content", nodeResource.getNodeType());
QName typeQName = QName.createQName("{" + namespacePair.getFirst()+ "}" + typeName);
@@ -661,15 +664,15 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
return null;
});
// we should have 3 events, node.Created for the model, node.Created for the node and node.Updated
checkNumOfEvents(3);
// we should have 4 events, node.Created for the model, node.Updated for the parent, node.Created for the node and node.Updated
checkNumOfEvents(4);
resultRepoEvent = getRepoEvent(3);
resultRepoEvent = getRepoEvent(4);
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(), resultRepoEvent.getType());
nodeResource = getNodeResource(resultRepoEvent);
assertEquals("Incorrect node type was found", namespacePair.getSecond() + QName.NAMESPACE_PREFIX + typeName, nodeResource.getNodeType());
NodeResource resourceBefore = getNodeResourceBefore(3);
NodeResource resourceBefore = getNodeResourceBefore(4);
assertEquals("Incorrect node type was found", "cm:content", resourceBefore.getNodeType());
assertNull(resourceBefore.getId());
assertNull(resourceBefore.getContent());
@@ -788,7 +791,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
});
checkNumOfEvents(4);
NodeResource resourceBefore = getNodeResourceBefore(4);
NodeResource resource = getNodeResource(4);
@@ -808,7 +811,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
assertNull(resourceBefore.getModifiedByUser());
assertNull(resourceBefore.getCreatedAt());
assertNull(resourceBefore.getCreatedByUser());
assertNull(resourceBefore.getProperties());
assertNotNull(resourceBefore.getProperties());
assertNull(resourceBefore.getAspectNames());
assertNotNull(resourceBefore.getPrimaryHierarchy());
assertNull("Content should have been null.", resource.getContent());
@@ -818,7 +821,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
assertNotNull(resource.getModifiedByUser());
assertNotNull(resource.getAspectNames());
assertNull(resource.getContent());
assertTrue(resource.getProperties().isEmpty());
assertFalse(resource.getProperties().isEmpty());
}
@Test
@@ -1020,7 +1023,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
NodeResource resource = getNodeResource(1);
final Set<String> originalAspects = resource.getAspectNames();
assertNotNull(originalAspects);
retryingTransactionHelper.doInTransaction(() -> {
// Add cm:geographic aspect with default value
nodeService.addAspect(nodeRef, ContentModel.ASPECT_GEOGRAPHIC, null);

View File

@@ -31,15 +31,20 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Map;
import java.util.UUID;
import org.alfresco.repo.security.authentication.identityservice.IdentityServiceFacadeFactoryBean.JwtDecoderProvider;
import org.alfresco.repo.security.authentication.identityservice.IdentityServiceFacadeFactoryBean.JwtIssuerValidator;
import org.junit.Test;
import org.springframework.security.oauth2.client.registration.ClientRegistration.ProviderDetails;
import org.springframework.security.oauth2.core.OAuth2Error;
import org.springframework.security.oauth2.core.OAuth2TokenValidatorResult;
import org.springframework.security.oauth2.jwt.Jwt;
import org.springframework.security.oauth2.jwt.JwtDecoder;
public class IdentityServiceFacadeFactoryBeanTest
{
private static final String EXPECTED_ISSUER = "expected-issuer";
@Test
public void shouldCreateJwtDecoderWithoutIDSWhenPublicKeyIsProvided()
{
@@ -62,4 +67,53 @@ public class IdentityServiceFacadeFactoryBeanTest
.containsEntry(USERNAME_CLAIM, "piotrek");
}
@Test
public void shouldFailWithNotMatchingIssuerURIs()
{
final JwtIssuerValidator issuerValidator = new JwtIssuerValidator(EXPECTED_ISSUER);
final OAuth2TokenValidatorResult validationResult = issuerValidator.validate(tokenWithIssuer("different-issuer"));
assertThat(validationResult).isNotNull();
assertThat(validationResult.hasErrors()).isTrue();
assertThat(validationResult.getErrors()).hasSize(1);
final OAuth2Error error = validationResult.getErrors().iterator().next();
assertThat(error).isNotNull();
assertThat(error.getDescription()).contains(EXPECTED_ISSUER, "different-issuer");
}
@Test
public void shouldFailWithNullIssuerURI()
{
final JwtIssuerValidator issuerValidator = new JwtIssuerValidator(EXPECTED_ISSUER);
final OAuth2TokenValidatorResult validationResult = issuerValidator.validate(tokenWithIssuer(null));
assertThat(validationResult).isNotNull();
assertThat(validationResult.hasErrors()).isTrue();
assertThat(validationResult.getErrors()).hasSize(1);
final OAuth2Error error = validationResult.getErrors().iterator().next();
assertThat(error).isNotNull();
assertThat(error.getDescription()).contains(EXPECTED_ISSUER, "null");
}
@Test
public void shouldSucceedWithMatchingIssuerURI()
{
final JwtIssuerValidator issuerValidator = new JwtIssuerValidator(EXPECTED_ISSUER);
final OAuth2TokenValidatorResult validationResult = issuerValidator.validate(tokenWithIssuer(EXPECTED_ISSUER));
assertThat(validationResult).isNotNull();
assertThat(validationResult.hasErrors()).isFalse();
assertThat(validationResult.getErrors()).isEmpty();
}
private Jwt tokenWithIssuer(String issuer)
{
return Jwt.withTokenValue(UUID.randomUUID().toString())
.issuer(issuer)
.header("JUST", "FOR TESTING")
.build();
}
}

View File

@@ -9,7 +9,7 @@ services:
ports:
- "8090:8090"
postgres:
image: postgres:14.4
image: postgres:15.4
profiles: ["default", "with-transform-core-aio", "postgres", "with-mtls-transform-core-aio"]
environment:
- POSTGRES_PASSWORD=alfresco
@@ -56,4 +56,4 @@ services:
CLIENT_SSL_TRUST_STORE: "file:/tengineAIO.truststore"
CLIENT_SSL_TRUST_STORE_PASSWORD: "password"
CLIENT_SSL_TRUST_STORE_TYPE: "JCEKS"
CLIENT_SSL_TRUST_STORE_TYPE: "JCEKS"