Compare commits

...

132 Commits
15.11 ... 17.9

Author SHA1 Message Date
Travis CI User
9503e790df [maven-release-plugin][skip ci] prepare release 17.9 2022-06-02 05:41:42 +00:00
kavitshah-gl
c0947cef35 Feature/apps 2755 UI conversion (#1127)
* RM-2366 - UI to Rest Conversion

* RM-2366 - UI to Rest Conversion Skipping Test Cases

* RM-2366 - Correcting RDAdmin UserName

* RM-2366 - Adding License Header

* Adding Test case for FileUnfiledRecordsTests

* CreateElectronicRecordsTests

* Added TestClass for InplaceRecordSearchTests

* Added TestClass for SearchRecordsTests and Utility Changes for InplaceRecordSearchTests

* Revert "RM-2366 - UI to Rest Conversion Skipping Test Cases"

This reverts commit 37828788de.

* Refactoring code cleanup

* AddRelationshipToHoldRecord added in comunity repo

* AddRelationshipToHoldRecord added in branch

* Adding SearchRecordsTests for Community AGS Records

* Adding License Header

* Adding TestCases for AddRelationshipTests

* Adding TestCases for AddRelationshipTests

* Analysing Test Failure in Travis

* Analysing Test Failure in Travis

* Analysing Test Failure in Travis

* Revert "Analysing Test Failure in Travis"

This reverts commit b02df3edcd.

* Analysing Test Failure in Travis

* [skip repo][skip db][skip tas]

* [skip repo][skip db][skip tas]

* [skip repo][skip db][skip tas]

* [skip repo][skip db][skip tas]

* [skip repo][skip db][skip tas]

* Fix all review comments

Co-authored-by: sbisht <shishuraj.bisht@globallogic.com>
2022-06-02 09:34:21 +05:30
Travis CI User
8deeed78ef [maven-release-plugin][skip ci] prepare for next development iteration 2022-05-31 13:45:27 +00:00
Travis CI User
39ffc0a00c [maven-release-plugin][skip ci] prepare release 17.8 2022-05-31 13:45:25 +00:00
Marcin Strankowski
0885fb157a ACS-3013: Uppercasing services used to allow security proxy (#1141)
Not much happens in class, check node, create version.
Added Beans including security interceptors, but VersionService has no restrictions and for node service all there is are a few checks, so it likely never was a security issue to begin with.
2022-05-31 15:04:43 +02:00
Travis CI User
0f5dab60ea [maven-release-plugin][skip ci] prepare for next development iteration 2022-05-30 12:14:00 +00:00
Travis CI User
aa70ee42d6 [maven-release-plugin][skip ci] prepare release 17.7 2022-05-30 12:13:57 +00:00
Kacper Magdziarz
b7bd6aa215 [ACS-3006] Revert changes made to implement a validation logic (#1139)
* Revert "ACS-2744 Private action validation logic (#1051)"

This reverts commit 64998b5c53.

* Revert "[ACS-2747] Prevent private action execution from the V1 HTTP API. Added end-to-end test method. (#1108)"

This reverts commit 8edfd4bdce.
2022-05-30 13:18:44 +02:00
Travis CI User
e1b6203cdc [maven-release-plugin][skip ci] prepare for next development iteration 2022-05-29 00:09:49 +00:00
Travis CI User
69a3bda874 [maven-release-plugin][skip ci] prepare release 17.6 2022-05-29 00:09:47 +00:00
Alfresco CI User
c8037d7b21 [force] Force release for 2022-05-29. 2022-05-29 00:03:38 +00:00
Travis CI User
e6382d8f6c [maven-release-plugin][skip ci] prepare for next development iteration 2022-05-25 12:15:11 +00:00
Travis CI User
6ddd2f0539 [maven-release-plugin][skip ci] prepare release 17.5 2022-05-25 12:15:08 +00:00
Kacper Magdziarz
93718dc296 ACS-2888 - add usage of bean with security interceptor. (#1135) 2022-05-25 13:25:48 +02:00
Travis CI User
c5b42dc0f2 [maven-release-plugin][skip ci] prepare for next development iteration 2022-05-24 11:40:08 +00:00
Travis CI User
3f65bc6057 [maven-release-plugin][skip ci] prepare release 17.4 2022-05-24 11:40:05 +00:00
Kristian Dimitrov
e3d56ad557 ACS-2827: Add Search Tests To CMIS-TAS (#1125)
* ACS-2827: Initial commit

* ACS-2827: Fix build

* ACS-2827: Rename test files

* ACS-2827: Increase retry time
2022-05-24 11:51:28 +01:00
Travis CI User
5a92d7f013 [maven-release-plugin][skip ci] prepare for next development iteration 2022-05-24 09:24:57 +00:00
Travis CI User
1de387abc0 [maven-release-plugin][skip ci] prepare release 17.3 2022-05-24 09:24:54 +00:00
Damian Ujma
83e06193d1 ACS-2800: Replace throwing NPE to CmisInvalidArgumentException (#1130)
* ACS-2800 Throw CmisInvalidArgumentException if type cannot be found

* ACS-2800 Reformat code
2022-05-24 10:43:09 +02:00
Travis CI User
02a2d88712 [maven-release-plugin][skip ci] prepare for next development iteration 2022-05-22 00:09:24 +00:00
Travis CI User
589b99e17d [maven-release-plugin][skip ci] prepare release 17.2 2022-05-22 00:09:21 +00:00
Alfresco CI User
74d8b50ac0 [force] Force release for 2022-05-22. 2022-05-22 00:03:24 +00:00
alandavis
906b6d4e5d amp.min.version needs to be 7.0.0 not 7.1.0 as was the case for 23.1 (which starts at 1 rather than 0)
[skip ci] as it is okay for now
2022-05-19 18:41:15 +01:00
Travis CI User
5f90fd945f [maven-release-plugin][skip ci] prepare for next development iteration 2022-05-19 15:41:42 +00:00
Travis CI User
3670437c2e [maven-release-plugin][skip ci] prepare release 17.1 2022-05-19 15:41:37 +00:00
alandavis
a128916bb4 Use 17.1-SNAPSHOT for ACS 7.3.0
[skip tests] as we have just run them
[no downstream] as we want to skip tests in enterprise repo too
2022-05-19 16:35:40 +01:00
Travis CI User
ea7de7e1e7 [maven-release-plugin][skip ci] prepare for next development iteration 2022-05-19 12:02:38 +00:00
Travis CI User
11acd44283 [maven-release-plugin][skip ci] prepare release 16.1 2022-05-19 12:02:35 +00:00
alandavis
af41e4b54c Change Master branch to 7.3.0 2022-05-19 12:09:27 +01:00
Travis CI User
cf8f27721a [maven-release-plugin][skip ci] prepare for next development iteration 2022-05-17 21:00:09 +00:00
Travis CI User
f04b1d2e33 [maven-release-plugin][skip ci] prepare release 23.1.0.30 2022-05-17 21:00:06 +00:00
montgolfiere
1f9e861e03 ACS-2925: Elasticsearch - refactor DB switching to not use Solr-specific properties / debug log (#1089)
* ACS-2925: Elasticsearch - refactor DB switching to not use Solr-specific properties / debug log

- as per TP suggestion, thanks

- new generic query.* props

  - query.cmis.queryConsistency
  - query.fts.queryConsistency
  - query.hybrid.enabled

- deprecate current solr.query.* props (eg. upgrade)

  - solr.query.cmis.queryConsistency
  - solr.query.fts.queryConsistency
  - solr.query.hybrid.enabled


- another attempt using two inject properties (deprecated + new). See option C1 in ACS-2925

- also unrelated lic header change (LocalPipelineTransform.java) to fix build warning
2022-05-17 21:22:00 +01:00
Marcin Strankowski
f5c1e26a9b ACS-2850: fix for intermittent failure on expiryLock test (#1083)
* ACS-2850: fix for intermittent failure on expiryLock test

First issue was int rounding of '(expirationDate - new Date())/1000', which for really close values of timeout set and passed through lockInfo could be rounded down to 0. Replaced by a rounding up formula.

Second issue was treating timeout == 0 as timeout.infinite. WebDav Infinite timeout has it's own marker "-1", in case of 0, lock was automatically turned into infinite time lock, while it should be an exception instead as creating a lock for 0s timeout is a programming error - the lock would immediatelly be expired upon creation (perhaps even with a date in the past, seeing as the expiryDate is filled).
Changes made clarify the intent behind calling the method instead of obfuscating it by passing the int value through date operations
2022-05-17 17:10:32 +02:00
Travis CI User
cd8b3594aa [maven-release-plugin][skip ci] prepare for next development iteration 2022-05-17 14:03:53 +00:00
Travis CI User
57e3c09981 [maven-release-plugin][skip ci] prepare release 23.1.0.29 2022-05-17 14:03:51 +00:00
mikolajbrzezinski
1ccb8a2164 Mnt 22905/case sensitive query fix (#1107)
* useCQ = true

* useCQ back to original

* useCQ = true

* Copyright Update

* useCQ restored, Javascrpit changed

* Javascript changes to filter

* PR comments requested change

* Revert "PR comments requested change"

This reverts commit 0673b6c3ff.

* Revert "useCQ restored, Javascrpit changed"

This reverts commit 00b79b5aca.

* Revert "Copyright Update"

This reverts commit 76d1f1c005.

* Revert "useCQ = true"

This reverts commit 215ad952f5.

* Revert "useCQ back to original"

This reverts commit deb5e82218.

* Revert "useCQ = true"

This reverts commit 115910ffc1.

* test change

* Initial changes

* Further changes

* Space deleted

* jobtitle search

* Restore check sorting and mock

* Avoid null [hint:useCQ]

* Wrong sign

* Fix

* Clean up

* Initial changes

* Rename Method
2022-05-17 15:17:56 +02:00
Travis CI User
26c412bd80 [maven-release-plugin][skip ci] prepare for next development iteration 2022-05-15 00:10:01 +00:00
Travis CI User
e4552968c6 [maven-release-plugin][skip ci] prepare release 23.1.0.28 2022-05-15 00:09:59 +00:00
Alfresco CI User
2fa31de761 [force] Force release for 2022-05-15. 2022-05-15 00:03:21 +00:00
alandavis
04ae83c74c Revert "ACS-2864 Use maven props in AGS test version.properties so we don't have to update the value"
This reverts commit b36e21ad04.
2022-05-09 17:13:50 +01:00
alandavis
b36e21ad04 ACS-2864 Use maven props in AGS test version.properties so we don't have to update the value
(cherry picked from commit 4a4bb2de02)
2022-05-09 16:22:37 +01:00
Travis CI User
9a615b50ea [maven-release-plugin][skip ci] prepare for next development iteration 2022-05-09 11:58:37 +00:00
Travis CI User
169e5efca2 [maven-release-plugin][skip ci] prepare release 23.1.0.27 2022-05-09 11:58:35 +00:00
kcichonczyk
8edfd4bdce [ACS-2747] Prevent private action execution from the V1 HTTP API. Added end-to-end test method. (#1108) 2022-05-09 13:14:10 +02:00
Travis CI User
810cd9f067 [maven-release-plugin][skip ci] prepare for next development iteration 2022-05-06 04:36:18 +00:00
Travis CI User
63f5bc534b [maven-release-plugin][skip ci] prepare release 23.1.0.26 2022-05-06 04:36:15 +00:00
Sbisht19
ebb6e333d3 Origin/feature/apps 1164 alfresco logs (#1102)
* Added test group for alfresco logs test cases to make them skipped in single pipeline

* Added import statement for TestGroup

* Added import statement for TestGroup

Co-authored-by: Suneet Gupta <suneet.gupta@hyland.com>
2022-05-05 20:57:57 +05:30
Travis CI User
8f141aed1e [maven-release-plugin][skip ci] prepare for next development iteration 2022-05-04 11:22:08 +00:00
Travis CI User
2a734c5a78 [maven-release-plugin][skip ci] prepare release 23.1.0.25 2022-05-04 11:22:05 +00:00
evasques
a7cceff6bf MNT-22968 - Bump Freemarker (#1093) 2022-05-04 10:39:33 +01:00
Alfresco CI User
7cafcb6a6b [force] Force release for 2022-05-01. 2022-05-01 00:03:58 +00:00
Travis CI User
652e10e5a3 [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-24 00:10:33 +00:00
Travis CI User
ea7a1631c2 [maven-release-plugin][skip ci] prepare release 23.1.0.24 2022-04-24 00:10:31 +00:00
Alfresco CI User
e18aab83c5 [force] Force release for 2022-04-24. 2022-04-24 00:03:07 +00:00
Travis CI User
d6dce92351 [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-22 13:06:28 +00:00
Travis CI User
5c31774753 [maven-release-plugin][skip ci] prepare release 23.1.0.23 2022-04-22 13:06:26 +00:00
alandavis
e331e4bd08 ACS-2768 tidy up start-compose.sh 2022-04-22 13:21:44 +01:00
Travis CI User
c9d27e55d6 [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-21 17:28:10 +00:00
Travis CI User
d647c96ae4 [maven-release-plugin][skip ci] prepare release 23.1.0.22 2022-04-21 17:28:03 +00:00
Kacper Magdziarz
5a6f9a60f3 ACS-2768 Fix of Automate update of t-router and t-engine references in repositories (#1081) 2022-04-21 17:47:50 +01:00
Travis CI User
ce2c0e2ac2 [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-21 10:21:11 +00:00
Travis CI User
dfa9e754d5 [maven-release-plugin][skip ci] prepare release 23.1.0.21 2022-04-21 10:21:07 +00:00
Kacper Magdziarz
860065cf6c ACS-2768 Automate update of t-router and t-engine references in repositories (#1067)
* ACS-2768 reduce places need update of t-router and t-engine references in repositories
replace alfresco-transform-model with alfresco-transform-core property

Co-authored-by: alandavis <alan.davis@alfresco.com>
2022-04-21 11:11:43 +02:00
Travis CI User
2d7f8fba87 [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-20 13:14:27 +00:00
Travis CI User
c2c4171756 [maven-release-plugin][skip ci] prepare release 23.1.0.20 2022-04-20 13:14:24 +00:00
Marcin Strankowski
af884cbb0e Fix/acs 2851 intermittent integration full tests bulk 2 (#1074)
* ACS-2851: Reverting commented out unstable test, as the intermittent failure seems unreproducible
2022-04-20 14:39:30 +02:00
Travis CI User
fb0be16155 [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-20 09:57:27 +00:00
Travis CI User
f9d95136b2 [maven-release-plugin][skip ci] prepare release 23.1.0.19 2022-04-20 09:57:25 +00:00
Damian Ujma
26453c4112 ACS-2674 - fix/ACS-2674_delete_existing_rendition (#1073)
* ACS-2674 Preventive delete an existing rendition of the file

* ACS-2674 Correct the comment
2022-04-20 11:03:27 +02:00
Travis CI User
cb26098e8b [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-17 00:08:03 +00:00
Travis CI User
6766bfcfdf [maven-release-plugin][skip ci] prepare release 23.1.0.18 2022-04-17 00:08:01 +00:00
Alfresco CI User
48df1e25ed [force] Force release for 2022-04-17. 2022-04-17 00:03:05 +00:00
Travis CI User
52918e4ff5 [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-13 21:45:40 +00:00
Travis CI User
40d70e797c [maven-release-plugin][skip ci] prepare release 23.1.0.17 2022-04-13 21:45:37 +00:00
alandavis
6011868759 Pick up t-core 2.6.0-A3 2022-04-13 19:45:13 +01:00
Travis CI User
79ad480ddb [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-13 09:44:54 +00:00
Travis CI User
c12d8f98be [maven-release-plugin][skip ci] prepare release 23.1.0.16 2022-04-13 09:44:52 +00:00
Damian Ujma
d2faadac57 ACS-2794 Use SearchEngineResultSet interface rather than concrete implementation (#1063) 2022-04-13 11:10:22 +02:00
Travis CI User
7f1d28a417 [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-12 14:14:52 +00:00
Travis CI User
fab02e5484 [maven-release-plugin][skip ci] prepare release 23.1.0.15 2022-04-12 14:14:48 +00:00
Marcin Strankowski
5274687ec1 Feature/acs 2787 split travis jobs to reduce max time (#1062)
* ACS-2787: Splitting AGS 01 job to 2 parts (01, 04)

For MySQL the job executes for 31-35 minutes, split this job into 2 parts to reduce max execution time for community-repo

* ACS-2787: Intermittent test failure commented out, issue created ACS-2850

* ACS-2787: Commenting out intermittent failure, issue created ACS-2851
2022-04-12 15:40:08 +02:00
Travis CI User
b52e67b7df [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-12 13:27:02 +00:00
Travis CI User
77f414dbf1 [maven-release-plugin][skip ci] prepare release 23.1.0.14 2022-04-12 13:26:59 +00:00
dependabot[bot]
c45ce63bd2 Bump docker-maven-plugin from 0.38.0 to 0.39.1 (#987)
Bumps [docker-maven-plugin](https://github.com/fabric8io/docker-maven-plugin) from 0.38.0 to 0.39.1.
- [Release notes](https://github.com/fabric8io/docker-maven-plugin/releases)
- [Changelog](https://github.com/fabric8io/docker-maven-plugin/blob/master/doc/changelog.md)
- [Commits](https://github.com/fabric8io/docker-maven-plugin/compare/v0.38.0...v0.39.1)

---
updated-dependencies:
- dependency-name: io.fabric8:docker-maven-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-04-12 14:38:13 +02:00
Travis CI User
0a784a0674 [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-10 00:08:08 +00:00
Travis CI User
1da1506160 [maven-release-plugin][skip ci] prepare release 23.1.0.13 2022-04-10 00:08:05 +00:00
Alfresco CI User
79dd212428 [force] Force release for 2022-04-10. 2022-04-10 00:02:57 +00:00
Travis CI User
fd859a4191 [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-08 10:41:14 +00:00
Travis CI User
e61f6df323 [maven-release-plugin][skip ci] prepare release 23.1.0.12 2022-04-08 10:41:09 +00:00
Piotr Żurek
64998b5c53 ACS-2744 Private action validation logic (#1051) 2022-04-08 11:56:31 +02:00
Travis CI User
784c9d47b3 [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-07 14:00:59 +00:00
Travis CI User
b5617c3bd9 [maven-release-plugin][skip ci] prepare release 23.1.0.11 2022-04-07 14:00:56 +00:00
Vítor Moreira
53777cd5b9 MNT-22946: bump spring version to 5.3.18 (#1054) 2022-04-07 14:05:07 +01:00
Travis CI User
ddb8ad16ac [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-07 11:00:32 +00:00
Travis CI User
a3ed766957 [maven-release-plugin][skip ci] prepare release 23.1.0.10 2022-04-07 11:00:30 +00:00
dependabot[bot]
22a0343c41 Bump dependency.webscripts.version from 8.28 to 8.29 (#1052) 2022-04-07 09:58:16 +00:00
Travis CI User
43fb3cfdc5 [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-07 08:32:04 +00:00
Travis CI User
8accceb08e [maven-release-plugin][skip ci] prepare release 23.1.0.9 2022-04-07 08:32:01 +00:00
Marcin Strankowski
71eab3e347 Feature/acs 2787 improve build speed of community repo (#1049)
* ACS-2787: Improving cache management for community repo
2022-04-07 09:33:32 +02:00
alandavis
8d378fc340 ACS-2765 t-model is now part of t-core 2.6.0-A2 2022-04-07 08:03:59 +01:00
Travis CI User
73a8ac61ed [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-06 11:50:05 +00:00
Travis CI User
67d3159650 [maven-release-plugin][skip ci] prepare release 23.1.0.8 2022-04-06 11:50:02 +00:00
alandavis
ec8a760122 ACS-2765 Upgrade to t-core 2.5.7 2022-04-06 12:04:39 +01:00
Travis CI User
3d0ceca6da [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-05 15:32:17 +00:00
Travis CI User
8c25d1f7e6 [maven-release-plugin][skip ci] prepare release 23.1.0.7 2022-04-05 15:32:14 +00:00
Travis CI User
1e0972eaa1 [maven-release-plugin][skip ci] prepare for next development iteration 2022-04-03 00:13:20 +00:00
Travis CI User
616c666114 [maven-release-plugin][skip ci] prepare release 23.1.0.6 2022-04-03 00:13:17 +00:00
Alfresco CI User
8520548e72 [force] Force release for 2022-04-03. 2022-04-03 00:03:10 +00:00
Travis CI User
63f7f07465 [maven-release-plugin][skip ci] prepare for next development iteration 2022-03-28 09:04:17 +00:00
Travis CI User
35a3ff8032 [maven-release-plugin][skip ci] prepare release 23.1.0.5 2022-03-28 09:04:14 +00:00
Kacper Magdziarz
695bf90891 [ACS-962] - Attempt to fix concurrency failing in test. (#1019)
ACS-962 Extract waitForMethodToFinish for TestHelper to avoid code duplication.
2022-03-28 10:18:16 +02:00
Travis CI User
d4dcc4fe2c [maven-release-plugin][skip ci] prepare for next development iteration 2022-03-27 00:12:34 +00:00
Travis CI User
ee44b6afa2 [maven-release-plugin][skip ci] prepare release 23.1.0.4 2022-03-27 00:12:31 +00:00
Alfresco CI User
28b580b9fc [force] Force release for 2022-03-27. 2022-03-27 00:02:43 +00:00
Travis CI User
7dc7a743f5 [maven-release-plugin][skip ci] prepare for next development iteration 2022-03-24 12:02:47 +00:00
Travis CI User
4bc92df38e [maven-release-plugin][skip ci] prepare release 23.1.0.3 2022-03-24 12:02:43 +00:00
Alfresco CI User
048df99d91 [force] Force release for 2022-03-24. 2022-03-24 11:52:55 +00:00
Travis CI User
b477d122f3 [maven-release-plugin][skip ci] prepare for next development iteration 2022-03-22 12:28:22 +00:00
Travis CI User
0cc2fe46cf [maven-release-plugin][skip ci] prepare release 23.1.0.2 2022-03-22 12:28:19 +00:00
Tom Page
671e1934bd ACS-2735 Update hardcoded version number to 23.1.0. 2022-03-22 11:42:09 +00:00
Tom Page
d0452aaddb Revert "Revert "ACS-2608 Support force directive. (#992)""
This reverts commit 464db778fa.
2022-03-22 10:45:16 +00:00
Tom Page
464db778fa Revert "ACS-2608 Support force directive. (#992)"
This reverts commit 905e813aa4.
2022-03-22 10:44:55 +00:00
Tom Page
905e813aa4 ACS-2608 Support force directive. (#992)
* ACS-2608 Support force directive.

* ACS-2608 Rename bash variable to match convention.

* ACS-2608 Use 14.121.x for branch.

* [force 7.2.0-A27.1] ACS-2608 Test force release.

* [force 7.2.0-A27.1] ACS-2608 Fix regex for force.

* [maven-release-plugin][skip ci] prepare release 14.121.1

* [maven-release-plugin][skip ci] prepare for next development iteration

* [force 7.2.0-A27.2] ACS-2608 Test force release.

* [maven-release-plugin][skip ci] prepare release 14.121.2

* [maven-release-plugin][skip ci] prepare for next development iteration

* [force 7.2.0-A27.3] ACS-2608 Test force release.

* [maven-release-plugin][skip ci] prepare release 14.121.3

* [maven-release-plugin][skip ci] prepare for next development iteration

* [force 7.2.0-A27.4] ACS-2608 Test force release.

* [maven-release-plugin][skip ci] prepare release 14.121.4

* [maven-release-plugin][skip ci] prepare for next development iteration

* ACS-2608 Revert change to version.

Co-authored-by: Travis CI User <build@alfresco.com>
2022-03-22 10:35:09 +00:00
alandavis
b55ba47c5e amp.min.version now has a 1 as its 1st digit with the new Hyland version number scheme such as 23.1 2022-03-21 17:11:03 +00:00
alandavis
0607bb1485 Set acs.version.major/minor versions 23.1.0 2022-03-21 17:07:53 +00:00
Travis CI User
f7c8298308 [maven-release-plugin][skip ci] prepare for next development iteration 2022-03-21 13:23:25 +00:00
Travis CI User
86de88ebaa [maven-release-plugin][skip ci] prepare release 23.1.0.1 2022-03-21 13:23:22 +00:00
alandavis
76ba0a38e5 Switch master branch ready for 23.1.0 development 2022-03-21 12:38:08 +00:00
Travis CI User
3ad642f388 [maven-release-plugin][skip ci] prepare for next development iteration 2022-03-21 11:30:57 +00:00
Travis CI User
e113eb3561 [maven-release-plugin][skip ci] prepare release 14.146 2022-03-21 11:30:55 +00:00
alandavis
1dc1852a69 Increment version.schema to next multiple of 1000. i.e. 17000 for the next major/minor version 2022-03-21 10:18:29 +00:00
89 changed files with 3603 additions and 1487 deletions

View File

@@ -26,13 +26,16 @@ branches:
env:
global:
- TRANSFORMERS_TAG=$(mvn help:evaluate -Dexpression=dependency.alfresco-transform-core.version -q -DforceStdout)
- TAS_SCRIPTS=../alfresco-community-repo/packaging/tests/scripts
- TAS_ENVIRONMENT=./packaging/tests/environment
- LOG_WARN="-Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"
stages:
- name: prepare
if: commit_message !~ /\[skip tests\]/ AND commit_message !~ /\[force[^\]]*\]/
- name: test
if: commit_message !~ /\[skip tests\]/
if: commit_message !~ /\[skip tests\]/ AND commit_message !~ /\[force[^\]]*\]/
- name: release
if: commit_message !~ /\[no release\]/ AND (branch = master OR branch =~ /release\/.*/) AND type != pull_request AND fork = false
- name: update_downstream
@@ -43,7 +46,13 @@ install: travis_retry travis_wait 40 bash scripts/travis/build.sh
jobs:
include:
- name: "Prepare"
stage: prepare
install: skip
script: travis_retry travis_wait 80 bash scripts/travis/prepare.sh
- name: "Source Clear Scan (SCA)"
stage: test
if: branch = master OR branch =~ /release\/.*/
# Run Veracode
install: skip
@@ -51,43 +60,49 @@ jobs:
- name: "Core, Data-Model, Repository - AllUnitTestsSuite - Build and test"
if: commit_message !~ /\[skip repo\]/
install: skip
script:
- travis_retry mvn -B test -pl core,data-model
- travis_retry mvn -B test -pl "repository,mmt" "-Dtest=AllUnitTestsSuite,AllMmtUnitTestSuite"
- travis_retry mvn -B test -pl core,data-model -am -DfailIfNoTests=false
- travis_retry mvn -B test -pl "repository,mmt" -am "-Dtest=AllUnitTestsSuite,AllMmtUnitTestSuite" -DfailIfNoTests=false
- name: "Repository - AppContext01TestSuite"
if: commit_message !~ /\[skip repo\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext01TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:${TRANSFORMERS_TAG}
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AppContext01TestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - AppContext02TestSuite"
if: commit_message !~ /\[skip repo\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext02TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AppContext02TestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - AppContext03TestSuite"
if: commit_message !~ /\[skip repo\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext03TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:${TRANSFORMERS_TAG}
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AppContext03TestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - AppContext04TestSuite"
if: commit_message !~ /\[skip repo\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext04TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:${TRANSFORMERS_TAG}
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AppContext04TestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - AppContext05TestSuite"
if: commit_message !~ /\[skip repo\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
@@ -95,142 +110,161 @@ jobs:
- cp repository/src/test/resources/realms/alfresco-realm.json "${HOME}/tmp"
- export HOST_IP=$(hostname -I | cut -f1 -d' ')
- docker run -d -e KEYCLOAK_USER=admin -e KEYCLOAK_PASSWORD=admin -e DB_VENDOR=h2 -p 8999:8080 -e KEYCLOAK_IMPORT=/tmp/alfresco-realm.json -v $HOME/tmp/alfresco-realm.json:/tmp/alfresco-realm.json alfresco/alfresco-identity-service:1.2
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext05TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco "-Didentity-service.auth-server-url=http://${HOST_IP}:8999/auth"
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AppContext05TestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco "-Didentity-service.auth-server-url=http://${HOST_IP}:8999/auth"
- name: "Repository - AppContext06TestSuite"
if: commit_message !~ /\[skip repo\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext06TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:${TRANSFORMERS_TAG}
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AppContext06TestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - AppContextExtraTestSuite"
if: commit_message !~ /\[skip repo\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContextExtraTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:${TRANSFORMERS_TAG}
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AppContextExtraTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - MiscContextTestSuite"
if: commit_message !~ /\[skip repo\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
script: travis_wait 20 mvn -B test -pl repository -Dtest=MiscContextTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:${TRANSFORMERS_TAG}
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=MiscContextTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - SearchTestSuite"
if: commit_message !~ /\[skip repo\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=SearchTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco -Dindex.subsystem.name=solr6
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=SearchTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco -Dindex.subsystem.name=solr6
- name: "Repository - MariaDB 10.2.18 tests"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
install: skip
before_script:
- docker run -d -p 3307:3306 --name mariadb -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mariadb:10.2.18 --transaction-isolation=READ-COMMITTED --max-connections=300 --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
- name: "Repository - MariaDB 10.4 tests"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
install: skip
before_script:
- docker run -d -p 3307:3306 --name mariadb -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mariadb:10.4 --transaction-isolation=READ-COMMITTED --max-connections=300 --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
- name: "Repository - MariaDB 10.5 tests"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
install: skip
before_script:
- docker run -d -p 3307:3306 --name mariadb -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mariadb:10.5 --transaction-isolation=READ-COMMITTED --max-connections=300 --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
- name: "Repository - MariaDB 10.6 tests"
# We run tests on the latest version of MariaDB on pull requests plus the normal master and release branches - ignored on feature branches
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ ) OR commit_message =~ /\[db\]/ OR commit_message =~ /\[latest db\]/
install: skip
before_script:
- docker run -d -p 3307:3306 --name mariadb -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mariadb:10.6 --transaction-isolation=READ-COMMITTED --max-connections=300 --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
- name: "Repository - MySQL 5.7.23 tests"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
install: skip
before_script:
- docker run -d -p 3307:3306 -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mysql:5.7.23 --transaction-isolation='READ-COMMITTED' --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=com.mysql.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=com.mysql.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - MySQL 8 tests"
# We run tests on the latest version of MySQL on pull requests plus the normal master and release branches - ignored on feature branches
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ ) OR commit_message =~ /\[db\]/ OR commit_message =~ /\[latest db\]/
install: skip
before_script:
- docker run -d -p 3307:3306 -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mysql:8 --transaction-isolation='READ-COMMITTED'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=com.mysql.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=com.mysql.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - PostgreSQL 10.9 tests"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:10.9 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - PostgreSQL 11.7 tests"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - PostgreSQL 11.12 tests"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.12 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - PostgreSQL 12.4 tests"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:12.4 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - PostgreSQL 12.7 tests"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:12.7 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - PostgreSQL 13.1 tests"
# We only run DB tests on the latest version of PostgreSQL on feature branches
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - PostgreSQL 13.3 tests"
# We only run DB tests on the latest version of PostgreSQL on feature branches
if: commit_message !~ /\[skip db\]/ OR commit_message =~ /\[latest db\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - Messaging tests"
if: commit_message !~ /\[skip repo\]/
install: skip
before_script:
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
script: travis_wait 20 mvn -B test -pl repository -Dtest=CamelRoutesTest,CamelComponentsTest
script: travis_wait 20 mvn -B test -pl repository -am -Dtest=CamelRoutesTest,CamelComponentsTest -DfailIfNoTests=false
- name: "Remote-api - AppContext01TestSuite"
if: commit_message !~ /\[skip repo\]/
install: travis_retry travis_wait 40 env REQUIRES_INSTALLED_ARTIFACTS=true bash scripts/travis/build.sh
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
@@ -238,30 +272,34 @@ jobs:
- name: "Remote-api - AppContext02TestSuite"
if: commit_message !~ /\[skip repo\]/
install: travis_retry travis_wait 40 env REQUIRES_INSTALLED_ARTIFACTS=true bash scripts/travis/build.sh
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:${TRANSFORMERS_TAG}
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext02TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Remote-api - AppContext03TestSuite"
if: commit_message !~ /\[skip repo\]/
install: travis_retry travis_wait 40 env REQUIRES_INSTALLED_ARTIFACTS=true bash scripts/travis/build.sh
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:${TRANSFORMERS_TAG}
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext03TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Remote-api - AppContext04TestSuite"
if: commit_message !~ /\[skip repo\]/
install: travis_retry travis_wait 40 env REQUIRES_INSTALLED_ARTIFACTS=true bash scripts/travis/build.sh
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:${TRANSFORMERS_TAG}
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext04TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Remote-api - AppContextExtraTestSuite"
if: commit_message !~ /\[skip repo\]/
install: travis_retry travis_wait 40 env REQUIRES_INSTALLED_ARTIFACTS=true bash scripts/travis/build.sh
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
@@ -270,99 +308,126 @@ jobs:
- name: "REST API TAS tests part1"
# TAS tests are generally skipped on feature branches as they will be repeated on the enterprise repo or community packaging builds
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
install: travis_retry travis_wait 40 env REQUIRES_LOCAL_IMAGES=true bash scripts/travis/build.sh
before_script:
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
script: travis_wait 60 mvn -B install -f packaging/tests/tas-restapi/pom.xml -Pall-tas-tests,run-restapi-part1 -Denvironment=default -DrunBugs=false
script: travis_wait 60 mvn -B verify -f packaging/tests/tas-restapi/pom.xml -Pall-tas-tests,run-restapi-part1 -Denvironment=default -DrunBugs=false
- name: "REST API TAS tests part2"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
install: travis_retry travis_wait 40 env REQUIRES_LOCAL_IMAGES=true bash scripts/travis/build.sh
before_script:
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
script: travis_wait 60 mvn -B install -f packaging/tests/tas-restapi/pom.xml -Pall-tas-tests,run-restapi-part2 -Denvironment=default -DrunBugs=false
script: travis_wait 60 mvn -B verify -f packaging/tests/tas-restapi/pom.xml -Pall-tas-tests,run-restapi-part2 -Denvironment=default -DrunBugs=false
- name: "REST API TAS tests part3"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
install: travis_retry travis_wait 40 env REQUIRES_LOCAL_IMAGES=true bash scripts/travis/build.sh
before_script:
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
script: travis_wait 60 mvn -B install -f packaging/tests/tas-restapi/pom.xml -Pall-tas-tests,run-restapi-part3 -Denvironment=default -DrunBugs=false
script: travis_wait 60 mvn -B verify -f packaging/tests/tas-restapi/pom.xml -Pall-tas-tests,run-restapi-part3 -Denvironment=default -DrunBugs=false
- name: "CMIS TAS tests - BROWSER binding"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
install: travis_retry travis_wait 40 env REQUIRES_LOCAL_IMAGES=true bash scripts/travis/build.sh
before_script:
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
script: travis_wait 40 mvn -B install -f packaging/tests/tas-cmis/pom.xml -Pall-tas-tests,run-cmis-browser -Denvironment=default -DrunBugs=false
script: travis_wait 40 mvn -B verify -f packaging/tests/tas-cmis/pom.xml -Pall-tas-tests,run-cmis-browser -Denvironment=default -DrunBugs=false
- name: "CMIS TAS tests - ATOM binding"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
install: travis_retry travis_wait 40 env REQUIRES_LOCAL_IMAGES=true bash scripts/travis/build.sh
before_script:
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
script: travis_wait 40 mvn -B install -f packaging/tests/tas-cmis/pom.xml -Pall-tas-tests,run-cmis-atom -Denvironment=default -DrunBugs=false
script: travis_wait 40 mvn -B verify -f packaging/tests/tas-cmis/pom.xml -Pall-tas-tests,run-cmis-atom -Denvironment=default -DrunBugs=false
- name: "CMIS TAS tests - WEBSERVICES binding"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
install: travis_retry travis_wait 40 env REQUIRES_LOCAL_IMAGES=true bash scripts/travis/build.sh
before_script:
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
script: travis_wait 40 mvn -B install -f packaging/tests/tas-cmis/pom.xml -Pall-tas-tests,run-cmis-webservices -Denvironment=default -DrunBugs=false
script: travis_wait 40 mvn -B verify -f packaging/tests/tas-cmis/pom.xml -Pall-tas-tests,run-cmis-webservices -Denvironment=default -DrunBugs=false
- name: "Email TAS tests"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
install: travis_retry travis_wait 40 env REQUIRES_LOCAL_IMAGES=true bash scripts/travis/build.sh
before_script:
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal.yml
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
script: travis_wait 30 mvn -B install -f packaging/tests/tas-email/pom.xml -Pall-tas-tests -Denvironment=default -DrunBugs=false
script: travis_wait 30 mvn -B verify -f packaging/tests/tas-email/pom.xml -Pall-tas-tests -Denvironment=default -DrunBugs=false
- name: "WebDAV TAS tests"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
install: travis_retry travis_wait 40 env REQUIRES_LOCAL_IMAGES=true bash scripts/travis/build.sh
before_script:
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal.yml
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
script: travis_wait 30 mvn -B install -f packaging/tests/tas-webdav/pom.xml -Pall-tas-tests -Denvironment=default -DrunBugs=false
script: travis_wait 30 mvn -B verify -f packaging/tests/tas-webdav/pom.xml -Pall-tas-tests -Denvironment=default -DrunBugs=false
- name: "Integration TAS tests"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
install: travis_retry travis_wait 40 env REQUIRES_LOCAL_IMAGES=true bash scripts/travis/build.sh
before_script:
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal.yml
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
script: travis_wait 30 mvn -B install -f packaging/tests/tas-integration/pom.xml -Pall-tas-tests -Denvironment=default -DrunBugs=false
script: travis_wait 30 mvn -B verify -f packaging/tests/tas-integration/pom.xml -Pall-tas-tests -Denvironment=default -DrunBugs=false
- name: "Share Services - ShareServicesTestSuite"
if: commit_message !~ /\[skip repo\]/
install: skip
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
script: travis_wait 20 mvn -B test -pl :alfresco-share-services -Dtest=ShareServicesTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
script: travis_wait 20 mvn -B test -pl :alfresco-share-services -am -Dtest=ShareServicesTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "AGS Unit & Integration Tests 01 (PostgreSQL)"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip ags\]/) OR commit_message =~ /\[ags\]/
install: travis_retry travis_wait 40 env REQUIRES_INSTALLED_ARTIFACTS=true bash scripts/travis/build.sh
script: travis_retry travis_wait 80 mvn -B verify -Dmaven.javadoc.skip=true -Dmaven.source.skip=true -Pags -Pstart-postgres -PagsAllTestSuitePt1 -f amps/ags/pom.xml ${LOG_WARN}
- name: "AGS Integration Tests 02 (PostgreSQL)"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip ags\]/) OR commit_message =~ /\[ags\]/
install: travis_retry travis_wait 40 env REQUIRES_INSTALLED_ARTIFACTS=true bash scripts/travis/build.sh
script: travis_retry travis_wait 80 mvn -B verify -Dmaven.javadoc.skip=true -Dmaven.source.skip=true -Pags -Pstart-postgres -PagsAllTestSuitePt2 -f amps/ags/pom.xml ${LOG_WARN}
- name: "AGS Integration Tests 03 (PostgreSQL)"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip ags\]/) OR commit_message =~ /\[ags\]/
install: travis_retry travis_wait 40 env REQUIRES_INSTALLED_ARTIFACTS=true bash scripts/travis/build.sh
script: travis_retry travis_wait 80 mvn -B verify -Dmaven.javadoc.skip=true -Dmaven.source.skip=true -Pags -Pstart-postgres -PagsAllTestSuitePt3 -f amps/ags/pom.xml ${LOG_WARN}
- name: "AGS Unit & Integration Tests 04 (PostgreSQL)"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip ags\]/) OR commit_message =~ /\[ags\]/
install: travis_retry travis_wait 40 env REQUIRES_INSTALLED_ARTIFACTS=true bash scripts/travis/build.sh
script: travis_retry travis_wait 80 mvn -B verify -Dmaven.javadoc.skip=true -Dmaven.source.skip=true -Pags -Pstart-postgres -PagsAllTestSuitePt4 -f amps/ags/pom.xml ${LOG_WARN}
- name: "AGS Unit & Integration Tests 01 (MySQL) "
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip ags\]/) OR commit_message =~ /\[ags on MySQL\]/
install: travis_retry travis_wait 40 env REQUIRES_INSTALLED_ARTIFACTS=true bash scripts/travis/build.sh
script: travis_retry travis_wait 80 mvn -B verify -Dmaven.javadoc.skip=true -Dmaven.source.skip=true -Pags -Pstart-mysql -PagsAllTestSuitePt1 -f amps/ags/pom.xml ${LOG_WARN}
- name: "AGS Integration Tests 02 (MySQL) "
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip ags\]/) OR commit_message =~ /\[ags on MySQL\]/
install: travis_retry travis_wait 40 env REQUIRES_INSTALLED_ARTIFACTS=true bash scripts/travis/build.sh
script: travis_retry travis_wait 80 mvn -B verify -Dmaven.javadoc.skip=true -Dmaven.source.skip=true -Pags -Pstart-mysql -PagsAllTestSuitePt2 -f amps/ags/pom.xml ${LOG_WARN}
- name: "AGS Integration Tests 03 (MySQL) "
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip ags\]/) OR commit_message =~ /\[ags on MySQL\]/
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip ags\]/) OR commit_message =~ /\[ags\]/
install: travis_retry travis_wait 40 env REQUIRES_INSTALLED_ARTIFACTS=true bash scripts/travis/build.sh
script: travis_retry travis_wait 80 mvn -B verify -Dmaven.javadoc.skip=true -Dmaven.source.skip=true -Pags -Pstart-mysql -PagsAllTestSuitePt3 -f amps/ags/pom.xml ${LOG_WARN}
- name: "AGS Unit & Integration Tests 04 (MySQL) "
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip ags\]/) OR commit_message =~ /\[ags on MySQL\]/
install: travis_retry travis_wait 40 env REQUIRES_INSTALLED_ARTIFACTS=true bash scripts/travis/build.sh
script: travis_retry travis_wait 80 mvn -B verify -Dmaven.javadoc.skip=true -Dmaven.source.skip=true -Pags -Pstart-mysql -PagsAllTestSuitePt4 -f amps/ags/pom.xml ${LOG_WARN}
- name: "AGS Community Rest API Tests"
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip ags\]/) OR commit_message =~ /\[ags\]/
install: travis_retry travis_wait 40 env REQUIRES_LOCAL_IMAGES=true bash scripts/travis/build.sh
addons:
artifacts:
paths:
@@ -373,11 +438,12 @@ jobs:
before_script:
- ${TAS_SCRIPTS}/start-compose.sh ./amps/ags/rm-community/rm-community-repo/docker-compose.yml
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8080/alfresco"
script: travis_wait 40 mvn -B test -pl :alfresco-governance-services-automation-community-rest-api -Dskip.automationtests=false -Pags
script: travis_wait 40 mvn -B test -pl :alfresco-governance-services-automation-community-rest-api -am -DfailIfNoTests=false -Dskip.automationtests=false -Pags
after_script: bash amps/ags/travis/scripts/getLogs.sh
- name: "Push to Nexus"
stage: release
install: skip
before_script: bash scripts/travis/verify_release_tag.sh
script: travis_wait 40 bash scripts/travis/maven_release.sh

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-automation-community-repo</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<build>

View File

@@ -26,8 +26,6 @@
*/
package org.alfresco.rest.v0;
import static org.testng.AssertJUnit.assertTrue;
import java.text.MessageFormat;
import org.alfresco.rest.core.v0.BaseAPI;
@@ -38,7 +36,9 @@ import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import static org.testng.AssertJUnit.assertTrue;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertNotNull;
/**
* Methods to make API requests using v0 API on Records Management Custom Model Reference Definitions
*
@@ -57,6 +57,8 @@ public class CustomDefinitionsAPI extends BaseAPI
* create reference endpoint
*/
private static final String CREATE_RELATIONSHIP_API_ENDPOINT = "{0}node/{1}/customreferences";
private static final String GET_RELATIONSHIP_API_ENDPOINT = "{0}node/{1}/relationships";
private static final String DELETE_RELATIONSHIP_API_ENDPOINT = "{0}node/{1}/targetnode/{2}/uniqueName/{3}";
/**
* logger
@@ -141,4 +143,48 @@ public class CustomDefinitionsAPI extends BaseAPI
assertTrue("Creating relationship from " + recordNodeIdFrom + " to " + recordNodeIdTo + " failed.", success);
}
public void createRelationship(
String adminUser,
String adminPassword,
int expectedStatus,
String recordNodeIdFrom,
String recordNodeIdTo,
CustomDefinitions relationshipType) {
//create the request body
JSONObject requestParams = new JSONObject();
requestParams.put("toNode", NODE_REF_WORKSPACE_SPACES_STORE + recordNodeIdTo);
requestParams.put("refId", getCustomReferenceId(adminUser, adminPassword, relationshipType
.getDefinition()));
//send the API request to create the relationship
JSONObject setRelationshipStatus = doPostRequest(adminUser, adminPassword, requestParams,
MessageFormat.format(CREATE_RELATIONSHIP_API_ENDPOINT, "{0}", NODE_PREFIX + recordNodeIdFrom));
//check the response
assertEquals("POST request for createRelationship was not successful.", expectedStatus, setRelationshipStatus.getJSONObject("status").get("code"));
}
public JSONObject getRelationshipDetails(
String adminUser,
String adminPassword,
String nodeRef) {
//send the API request to create the relationship
JSONObject relationshipDetails = doGetRequest(adminUser, adminPassword,
MessageFormat.format(GET_RELATIONSHIP_API_ENDPOINT, "{0}", NODE_PREFIX + nodeRef));
//check the response
assertNotNull("The Relationship detail is not found for the Noderef " + nodeRef, relationshipDetails);
return relationshipDetails;
}
public void deleteRelationship(
String adminUser,
String adminPassword,
String recordNodeIdFrom,
String recordNodeIdTo,
String relationshipUniqueName) {
//send the API request to create the relationship
JSONObject setRelationshipStatus = doDeleteRequest(adminUser, adminPassword,
MessageFormat.format(DELETE_RELATIONSHIP_API_ENDPOINT, "{0}", NODE_PREFIX + recordNodeIdFrom,NODE_PREFIX + recordNodeIdTo,relationshipUniqueName));
//check the response
boolean success = (setRelationshipStatus != null) && setRelationshipStatus.getBoolean("success");
assertTrue("Deleting relationship from " + recordNodeIdFrom + " to " + recordNodeIdTo + " failed.", success);
}
}

View File

@@ -73,6 +73,7 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import org.alfresco.utility.model.TestGroup;
/**
* API tests for declaring document as record and filing it immediately to a record folder location within the file plan
@@ -257,7 +258,7 @@ public class DeclareAndFileDocumentAsRecordTests extends BaseRMRestTest
* Then I receive an error indicating that I have attempted to declare and file a document into an invalid record folder
* And the document is not declared as a record
*/
@Test (dataProvider = "invalidDestinationPaths")
@Test (dataProvider = "invalidDestinationPaths",groups = { TestGroup.NOT_SUPPORTED_ON_SINGLE_PIPELINE })
public void declareAndFileToInvalidLocationUsingActionsAPI(String containerPath, String expectedException) throws Exception
{
STEP("Declare document as record with an invalid location parameter value");

View File

@@ -62,6 +62,7 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import org.alfresco.utility.model.TestGroup;
/**
* API tests for declaring a document version as record and filing to a record folder location within the file plan
@@ -207,7 +208,7 @@ public class FileVersionAsRecordTests extends BaseRMRestTest
* record folder
* And the document is not declared as a version record
*/
@Test (dataProvider = "invalidDestinationPaths")
@Test (dataProvider = "invalidDestinationPaths", groups = { TestGroup.NOT_SUPPORTED_ON_SINGLE_PIPELINE })
public void declareVersionAndFileToInvalidLocationUsingActionsAPI(String containerPath, String expectedException) throws Exception
{
STEP("Declare document as record version with an invalid location parameter value");

View File

@@ -0,0 +1,202 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.rm.community.records;
import static org.alfresco.rest.core.v0.BaseAPI.RM_SITE_ID;
import static org.alfresco.rest.rm.community.base.TestData.HOLD_DESCRIPTION;
import static org.alfresco.rest.rm.community.base.TestData.HOLD_REASON;
import static org.alfresco.rest.rm.community.util.CommonTestUtils.generateTestPrefix;
import java.util.Collections;
import org.alfresco.rest.rm.community.base.BaseRMRestTest;
import org.alfresco.rest.rm.community.model.custom.CustomDefinitions;
import org.alfresco.rest.rm.community.model.record.Record;
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategory;
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategoryChild;
import org.alfresco.rest.v0.CustomDefinitionsAPI;
import org.alfresco.rest.v0.HoldsAPI;
import org.alfresco.rest.v0.RMRolesAndActionsAPI;
import org.alfresco.rest.v0.RecordsAPI;
import org.alfresco.rest.v0.RecordCategoriesAPI;
import org.alfresco.test.AlfrescoTest;
import org.apache.commons.lang.StringUtils;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.testng.annotations.Test;
import static org.apache.commons.httpclient.HttpStatus.SC_INTERNAL_SERVER_ERROR;
import static org.apache.commons.httpclient.HttpStatus.SC_OK;
/**
* Add Relationship tests
* @author Kavit Shah
*/
public class AddRelationshipTests extends BaseRMRestTest
{
private final String TEST_PREFIX = generateTestPrefix(AddRelationshipTests.class);
private final String CATEGORY = TEST_PREFIX + "category";
private final String HOLD1 = TEST_PREFIX + "hold1";
private final String FOLDER = TEST_PREFIX + "RM_2709_1814_FOLDER";
private final String RECORD1 = TEST_PREFIX + "RM_2709_1814_RECORD_ONE";
private final String RECORD2 = TEST_PREFIX + "RM_1814_RECORD_TWO";
private String hold1NodeRef;
@Autowired
private HoldsAPI holdsAPI;
@Autowired
private RecordsAPI recordsAPI;
@Autowired
private CustomDefinitionsAPI customDefinitionsAPI;
@Autowired
private RMRolesAndActionsAPI rmRolesAndActionsAPI;
@Autowired
private RecordCategoriesAPI recordCategoriesAPI;
@Test (priority = 1)
@AlfrescoTest (jira = "RM-1814")
public void addRelationshipToHoldRecord()
{
String CATEGORY_RELATIONSHIP = CATEGORY + "To Hold";
//create RM site
createRMSiteIfNotExists();
//create record category, record folder and records
RecordCategory recordCategory = createCategoryIfDoesNotExist(CATEGORY_RELATIONSHIP);
RecordCategoryChild recordCategoryChild = createRecordFolderInCategory(FOLDER, recordCategory);
createRecordItems(recordCategoryChild, RECORD1);
Record record2 = createRecordItems(recordCategoryChild, RECORD2);
//create Hold
hold1NodeRef = holdsAPI.createHoldAndGetNodeRef(getAdminUser().getUsername(),
getAdminUser().getPassword(), HOLD1, HOLD_REASON, HOLD_DESCRIPTION);
//add RECORD2 to holds
holdsAPI.addItemsToHolds(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(),
SC_OK, Collections.singletonList(record2.getId()),
Collections.singletonList(hold1NodeRef));
// get records nodeRefs
String elRecordFullName1 = recordsAPI.getRecordFullName(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(), FOLDER, RECORD1);
String elRecordNodeRef1 = recordsAPI.getRecordNodeRef(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(), elRecordFullName1, "/" + CATEGORY_RELATIONSHIP + "/" + FOLDER);
String elRecordFullName2 = recordsAPI.getRecordFullName(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(), FOLDER, RECORD2);
String elRecordNodeRef2 = recordsAPI.getRecordNodeRef(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(), elRecordFullName2, "/" + CATEGORY_RELATIONSHIP + "/" + FOLDER);
// create Relationship
customDefinitionsAPI.createRelationship(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(),
SC_INTERNAL_SERVER_ERROR,
formatNodeRef(elRecordNodeRef1),
formatNodeRef(elRecordNodeRef2),
CustomDefinitions.ATTACHMENT);
//delete preconditions
deletePrecondition();
}
@Test (priority = 2)
@AlfrescoTest (jira = "RM-1874")
public void deleteRelationship()
{
String CATEGORY_RELATIONSHIP = CATEGORY + "deleteRelationship";
// create RM site
createRMSiteIfNotExists();
// create record category, record folder and records
RecordCategory recordCategory = createCategoryIfDoesNotExist(CATEGORY_RELATIONSHIP);
RecordCategoryChild recordCategoryChild = createRecordFolderInCategory(FOLDER, recordCategory);
createRecordItems(recordCategoryChild, RECORD1);
createRecordItems(recordCategoryChild, RECORD2);
// Add Relationship
String elRecordFullName1 = recordsAPI.getRecordFullName(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(), FOLDER, RECORD1);
String elRecordNodeRef1 = recordsAPI.getRecordNodeRef(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(), elRecordFullName1, "/" + CATEGORY_RELATIONSHIP + "/" + FOLDER);
String elRecordFullName2 = recordsAPI.getRecordFullName(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(), FOLDER, RECORD2);
String elRecordNodeRef2 = recordsAPI.getRecordNodeRef(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(), elRecordFullName2, "/" + CATEGORY_RELATIONSHIP + "/" + FOLDER);
customDefinitionsAPI.createRelationship(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(),
formatNodeRef(elRecordNodeRef1),
formatNodeRef(elRecordNodeRef2),
CustomDefinitions.ATTACHMENT);
// Get RelationshipDetails
JSONObject relationshipDetails = customDefinitionsAPI.getRelationshipDetails(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(),
formatNodeRef(elRecordNodeRef1));
// Delete RelationshipDetails
customDefinitionsAPI.deleteRelationship(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(),
formatNodeRef(elRecordNodeRef1),
formatNodeRef(elRecordNodeRef2),
relationshipUniqueName(relationshipDetails));
// delete category
tearDown(CATEGORY_RELATIONSHIP);
}
private void deletePrecondition()
{
holdsAPI.deleteHold(getAdminUser(), hold1NodeRef);
}
private Record createRecordItems(RecordCategoryChild recordCategoryChild, String record) {
return createElectronicRecord(recordCategoryChild.getId(), record);
}
private RecordCategory createCategoryIfDoesNotExist(String CATEGORY_ALL) {
return createRootCategory(getDataUser().usingAdmin().getAdminUser(), CATEGORY_ALL);
}
private RecordCategoryChild createRecordFolderInCategory(String FOLDER_SEARCH, RecordCategory recordCategory) {
return createFolder(getDataUser().usingAdmin().getAdminUser(), recordCategory.getId(), FOLDER_SEARCH);
}
private String formatNodeRef(String nodeRef) {
return StringUtils.remove(nodeRef,"workspace://SpacesStore/");
}
private void tearDown(String category) {
rmRolesAndActionsAPI.deleteAllItemsInContainer(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(), RM_SITE_ID, FOLDER);
rmRolesAndActionsAPI.deleteAllItemsInContainer(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(), RM_SITE_ID, category);
recordCategoriesAPI.deleteCategory(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(), category);
}
private String relationshipUniqueName(JSONObject relationshipDetails) {
return relationshipDetails.getJSONObject("data").getJSONArray("items").getJSONObject(0).getJSONObject("node")
.get("relationshipUniqueName").toString();
}
}

View File

@@ -0,0 +1,148 @@
/*-
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.rm.community.records;
import static java.util.Arrays.asList;
import static org.alfresco.rest.rm.community.util.CommonTestUtils.generateTestPrefix;
import static org.alfresco.rest.rm.community.utils.CoreUtil.createBodyForMoveCopy;
import static org.alfresco.rest.rm.community.utils.CoreUtil.toContentModel;
import static org.alfresco.rest.rm.community.utils.FilePlanComponentsUtil.createElectronicRecordModel;
import static org.alfresco.rest.rm.community.utils.FilePlanComponentsUtil.getFile;
import static org.alfresco.rest.rm.community.utils.FilePlanComponentsUtil.IMAGE_FILE;
import static org.alfresco.rest.rm.community.utils.FilePlanComponentsUtil.createRecordModel;
import static org.alfresco.utility.data.RandomData.getRandomName;
import static org.alfresco.utility.report.log.Step.STEP;
import static org.springframework.http.HttpStatus.CREATED;
import static org.springframework.http.HttpStatus.OK;
import org.alfresco.rest.model.RestNodeModel;
import org.alfresco.rest.rm.community.base.BaseRMRestTest;
import org.alfresco.rest.rm.community.model.record.Record;
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategory;
import org.alfresco.rest.rm.community.requests.gscore.api.RecordFolderAPI;
import org.alfresco.test.AlfrescoTest;
import org.alfresco.utility.model.UserModel;
import org.springframework.beans.factory.annotation.Autowired;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.alfresco.rest.v0.RMRolesAndActionsAPI;
/**
* This class contains the tests for
* CreateElectronicRecordsTests Action REST API
*
* @author Shishuraj Bisht
*/
public class CreateElectronicRecordsTests extends BaseRMRestTest {
private RecordCategory rootCategory;
private UserModel updateUser;
/**
* data prep services
*/
@Autowired
private RMRolesAndActionsAPI rmRolesAndActionsAPI;
private final String TEST_PREFIX = generateTestPrefix(CreateElectronicRecordsTests.class);
private final String RM_ADMIN = TEST_PREFIX + "rm_admin";
@BeforeClass (alwaysRun = true)
public void preConditions()
{
STEP("Create RM Site");
createRMSiteIfNotExists();
STEP("Create RM Admin user");
rmRolesAndActionsAPI.createUserAndAssignToRole(getAdminUser().getUsername(), getAdminUser().getPassword(), RM_ADMIN,
getAdminUser().getPassword(),
"Administrator");
STEP("Create root level category");
rootCategory = createRootCategory(getRandomName("Category"));
STEP("Create the record folder1 inside the rootCategory");
String recordFolder1 = createCategoryFolderInFilePlan().getId();
}
/**
* Test v0 methods to create and get electronic records.
*/
@Test
@AlfrescoTest (jira = "RM-2768")
public void createElectronicRecordTest() throws Exception {
//create electronic record in record folder
String recordFolder1 = createRecordFolder(rootCategory.getId(), getRandomName("recFolder")).getId();
RecordFolderAPI recordFolderAPI = getRestAPIFactory().getRecordFolderAPI();
Record electronicRecord = recordFolderAPI.createRecord(createElectronicRecordModel(), recordFolder1, getFile(IMAGE_FILE));
assertStatusCode(CREATED);
STEP("Check the electronic record has been created");
assertStatusCode(CREATED);
// Get recordsAPI instance initialised to updateUser
org.alfresco.rest.rm.community.requests.gscore.api.RecordsAPI recordsAPI = getRestAPIFactory().getRecordsAPI(updateUser);
for (Record record: asList(electronicRecord)) {
recordsAPI.getRecord(record.getId());
assertStatusCode(OK);
// Generate update metadata
String newName = getModifiedPropertyValue(record.getName());
String newTitle = getModifiedPropertyValue(record.getProperties().getTitle());
String newDescription = getModifiedPropertyValue(record.getProperties().getDescription());
// Update record
recordsAPI.updateRecord(createRecordModel(newName, newDescription, newTitle), record.getId());
assertStatusCode(OK);
}
// move the record from one folder1 to folder2
STEP("Create the record folder2 inside the rootCategory");
String recordFolder2 = createCategoryFolderInFilePlan().getId();
STEP("Move record from folder1 to folder2");
RestNodeModel electronicDocRestNodeModel = getRestAPIFactory()
.getNodeAPI(toContentModel(electronicRecord.getId()))
.move(createBodyForMoveCopy(recordFolder2));
assertStatusCode(OK);
}
private String getModifiedPropertyValue(String originalValue) {
/* to be used to append to modifications */
String MODIFIED_PREFIX = "modified_";
return MODIFIED_PREFIX + originalValue;
}
@AfterClass (alwaysRun = true)
public void deletePreConditions() {
STEP("Delete the created rootCategory along with corresponding record folders/records present in it");
getRestAPIFactory().getRecordCategoryAPI().deleteRecordCategory(rootCategory.getId());
}
}

View File

@@ -0,0 +1,145 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.rm.community.records;
import org.alfresco.dataprep.CMISUtil;
import org.alfresco.rest.rm.community.base.BaseRMRestTest;
import org.alfresco.rest.rm.community.model.record.Record;
import org.alfresco.rest.v0.RMRolesAndActionsAPI;
import org.alfresco.rest.v0.RecordsAPI;
import org.alfresco.test.AlfrescoTest;
import org.alfresco.utility.constants.UserRole;
import org.alfresco.utility.model.FileModel;
import org.alfresco.utility.model.FolderModel;
import org.alfresco.utility.model.SiteModel;
import org.alfresco.utility.model.UserModel;
import org.springframework.beans.factory.annotation.Autowired;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentAlias.UNFILED_RECORDS_CONTAINER_ALIAS;
import static org.alfresco.rest.rm.community.util.CommonTestUtils.generateTestPrefix;
import static org.alfresco.utility.report.log.Step.STEP;
import static org.springframework.http.HttpStatus.CREATED;
import static org.testng.Assert.assertTrue;
/**
* This class contains the tests for
* Create the Document, marking them as Record, Hiding them using Site Collaborator
* The Rm_Admin user then verofy if he is able to access the documents using Rest Api
*
* @author Kavit Shah
*/
public class DeclareInPlaceRecordTests extends BaseRMRestTest {
private final String TEST_PREFIX = generateTestPrefix(DeclareInPlaceRecordTests.class);
private final String RM_ADMIN = TEST_PREFIX + "rm_admin";
private UserModel testUser;
private UserModel RmAdminUser;
private SiteModel testSite;
private FolderModel testFolder;
/**
* data prep services
*/
@Autowired
private RMRolesAndActionsAPI rmRolesAndActionsAPI;
@Autowired
private RecordsAPI recordsAPI;
@BeforeClass(alwaysRun = true)
public void preConditions() {
STEP("Create RM Site");
createRMSiteIfNotExists();
STEP("Create RM Admin user");
rmRolesAndActionsAPI.createUserAndAssignToRole(getAdminUser().getUsername(), getAdminUser().getPassword(), RM_ADMIN,
getAdminUser().getPassword(),
"Administrator");
RmAdminUser = new UserModel(RM_ADMIN,getAdminUser().getPassword());
STEP("Create collab_user user");
testUser = getDataUser().createRandomTestUser();
testSite = dataSite.usingAdmin().createPublicRandomSite();
// invite collab_user to Collaboration site with Contributor role
getDataUser().addUserToSite(testUser, testSite, UserRole.SiteContributor);
testFolder = dataContent.usingSite(testSite).usingUser(testUser).createFolder();
}
@Test
@AlfrescoTest(jira = "RM-2366")
public void declareInplaceRecord() {
// Upload document in a folder in a collaboration site
FileModel uploadedDocHidden = dataContent.usingSite(testSite)
.usingUser(testUser)
.usingResource(testFolder)
.createContent(CMISUtil.DocumentType.TEXT_PLAIN);
// declare uploadedDocument as record
Record uploadedRecordHidden = getRestAPIFactory().getFilesAPI(testUser).declareAsRecord(uploadedDocHidden.getNodeRefWithoutVersion());
assertStatusCode(CREATED);
recordsAPI.hideRecord(testUser.getUsername(),testUser.getPassword(),uploadedRecordHidden.getId());
// Upload document in a folder in a collaboration site
FileModel uploadedDocWithoutHidden = dataContent.usingSite(testSite)
.usingUser(testUser)
.usingResource(testFolder)
.createContent(CMISUtil.DocumentType.TEXT_PLAIN);
Record uploadedRecordWithoutHidden = getRestAPIFactory().getFilesAPI(testUser).declareAsRecord(uploadedDocWithoutHidden.getNodeRefWithoutVersion());
assertStatusCode(CREATED);
assertTrue(isRecordChildOfUnfiledContainer(uploadedRecordHidden.getId()), uploadedRecordHidden.getId() + " doesn't exist in Unfiled Records");
assertTrue(isRecordChildOfUnfiledContainer(uploadedRecordWithoutHidden.getId()), uploadedRecordWithoutHidden.getId() + " doesn't exist in Unfiled Records");
}
@AfterClass(alwaysRun = true)
public void deletePreConditions() {
STEP("Delete the records created in the test");
getRestAPIFactory()
.getUnfiledContainersAPI(RmAdminUser)
.getUnfiledContainerChildren(UNFILED_RECORDS_CONTAINER_ALIAS)
.getEntries()
.stream()
.forEach(x -> getRestAPIFactory()
.getRecordsAPI()
.deleteRecord(x.getEntry().getId()));
}
private boolean isRecordChildOfUnfiledContainer(String recordId) {
return getRestAPIFactory()
.getUnfiledContainersAPI(RmAdminUser)
.getUnfiledContainerChildren(UNFILED_RECORDS_CONTAINER_ALIAS)
.getEntries()
.stream()
.anyMatch(c -> c.getEntry().getId().equals(recordId));
}
}

View File

@@ -0,0 +1,226 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.rm.community.records;
import org.alfresco.dataprep.CMISUtil;
import org.alfresco.rest.core.v0.BaseAPI;
import org.alfresco.rest.model.RestNodeModel;
import org.alfresco.rest.rm.community.base.BaseRMRestTest;
import org.alfresco.rest.rm.community.model.record.Record;
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategory;
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategoryChild;
import org.alfresco.rest.rm.community.requests.gscore.api.RecordFolderAPI;
import org.alfresco.rest.rm.community.requests.gscore.api.UnfiledRecordFolderAPI;
import org.alfresco.rest.v0.RMRolesAndActionsAPI;
import org.alfresco.rest.v0.RecordsAPI;
import org.alfresco.test.AlfrescoTest;
import org.alfresco.utility.constants.UserRole;
import org.alfresco.utility.model.FileModel;
import org.alfresco.utility.model.SiteModel;
import org.alfresco.utility.model.UserModel;
import org.apache.chemistry.opencmis.client.api.CmisObject;
import org.apache.http.HttpResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.HashMap;
import java.util.Map;
import static org.alfresco.rest.core.v0.APIUtils.convertHTTPResponseToJSON;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentAlias.UNFILED_RECORDS_CONTAINER_ALIAS;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentType.UNFILED_RECORD_FOLDER_TYPE;
import static org.alfresco.rest.rm.community.util.CommonTestUtils.generateTestPrefix;
import static org.alfresco.rest.rm.community.utils.CoreUtil.createBodyForMoveCopy;
import static org.alfresco.rest.rm.community.utils.CoreUtil.toContentModel;
import static org.alfresco.utility.data.RandomData.getRandomAlphanumeric;
import static org.alfresco.utility.data.RandomData.getRandomName;
import static org.alfresco.utility.report.log.Step.STEP;
import static org.springframework.http.HttpStatus.CREATED;
public class FileUnfiledRecordsTests extends BaseRMRestTest {
private final String TEST_PREFIX = generateTestPrefix(FileUnfiledRecordsTests.class);
private final String RM_ADMIN = TEST_PREFIX + "rm_admin";
public static final String NODE_REF_WORKSPACE_SPACES_STORE = "workspace://SpacesStore/";
private UserModel testUser;
private SiteModel testSite;
private String unfiledRecordFolderId;
private UserModel RmAdminUser;
private RecordCategory rootCategory;
private RecordCategoryChild recordFolder;
private final String recordName = "RM-2790 record";
private final String recordTitle = recordName + " title";
private final String recordDescription = recordName + " description";
/**
* data prep services
*/
@Autowired
private RMRolesAndActionsAPI rmRolesAndActionsAPI;
@Autowired
private RecordsAPI recordsAPI;
@BeforeClass(alwaysRun = true)
public void preConditions() {
STEP("Create RM Site");
createRMSiteIfNotExists();
STEP("Create RM Admin user");
rmRolesAndActionsAPI.createUserAndAssignToRole(getAdminUser().getUsername(), getAdminUser().getPassword(), RM_ADMIN,
getAdminUser().getPassword(),
"Administrator");
RmAdminUser = new UserModel(RM_ADMIN, getAdminUser().getPassword());
STEP("Create collab_user user");
testUser = getDataUser().createRandomTestUser();
testSite = dataSite.usingAdmin().createPublicRandomSite();
// invite collab_user to Collaboration site with Contributor role
getDataUser().addUserToSite(testUser, testSite, UserRole.SiteContributor);
}
@Test
@AlfrescoTest(jira = "RM-2790")
public void fileUnfiledRecords() throws Exception {
STEP("Upload the document to test site and then make it reacord");
// Upload document in a folder in a collaboration site
FileModel uploadedDocbyCollabUser = dataContent.usingSite(testSite)
.usingUser(testUser)
.createContent(CMISUtil.DocumentType.TEXT_PLAIN);
// declare uploadedDocument as record
Record uploadedDocRecordbyCollabUser = getRestAPIFactory().getFilesAPI(testUser)
.declareAsRecord(uploadedDocbyCollabUser.getNodeRefWithoutVersion());
assertStatusCode(CREATED);
STEP("Create root level category");
rootCategory = createRootCategory(RmAdminUser, getRandomName("Category"));
STEP("Create the record folder inside the rootCategory");
recordFolder = createFolder(RmAdminUser, rootCategory.getId(), getRandomName("Folder"));
STEP("Create a non-electronic record by completing some of the fields");
Map<Enum<?>, String> non_electronic_records_properties = new HashMap<>();
non_electronic_records_properties.put(BaseAPI.RMProperty.TITLE, recordTitle);
non_electronic_records_properties.put(BaseAPI.RMProperty.DESCRIPTION, recordDescription);
non_electronic_records_properties.put(BaseAPI.RMProperty.NAME, recordName);
non_electronic_records_properties.put(BaseAPI.RMProperty.PHYSICAL_SIZE, "");
non_electronic_records_properties.put(BaseAPI.RMProperty.NUMBER_OF_COPIES, "");
non_electronic_records_properties.put(BaseAPI.RMProperty.SHELF, "");
non_electronic_records_properties.put(BaseAPI.RMProperty.STORAGE_LOCATION, "");
non_electronic_records_properties.put(BaseAPI.RMProperty.BOX, "");
non_electronic_records_properties.put(BaseAPI.RMProperty.FILE, "");
HttpResponse nonElectronicRecordHttpResponse = recordsAPI.createNonElectronicRecord(getAdminUser().getUsername(),
getAdminUser().getPassword(), non_electronic_records_properties, rootCategory.getName(), recordFolder.getName());
String nonElectronicRecordId = getNodeRef(nonElectronicRecordHttpResponse);
STEP("Check the non-electronic record has been created");
assertStatusCode(CREATED);
STEP("Create a electronic record by completing some of the fields");
Map<BaseAPI.RMProperty, String> electronic_records_properties = new HashMap<>();
electronic_records_properties.put(BaseAPI.RMProperty.DESCRIPTION, recordDescription);
electronic_records_properties.put(BaseAPI.RMProperty.NAME, recordName);
recordsAPI.uploadElectronicRecord(RmAdminUser.getUsername(),
RmAdminUser.getPassword(), electronic_records_properties, recordFolder.getName(), CMISUtil.DocumentType.TEXT_PLAIN);
CmisObject electronicRecord = recordsAPI.getRecord(RmAdminUser.getUsername(),
RmAdminUser.getPassword(),recordFolder.getName(), electronic_records_properties.get(BaseAPI.RMProperty.NAME));
STEP("Check the electronic record has been created");
assertStatusCode(CREATED);
STEP("Create a root folder under FilePlan - Unfiled");
String unFiledFolder = createUnFileFolder();
STEP("Move all the Unfiled Records to unFiledFolder");
RestNodeModel uploadDocRestNodeModel = getRestAPIFactory()
.getNodeAPI(toContentModel(uploadedDocRecordbyCollabUser.getId()))
.move(createBodyForMoveCopy(unFiledFolder));
RestNodeModel nonElectronicDocRestNodeModel = getRestAPIFactory()
.getNodeAPI(toContentModel(nonElectronicRecordId))
.move(createBodyForMoveCopy(unFiledFolder));
RestNodeModel electronicDocRestNodeModel = getRestAPIFactory()
.getNodeAPI(toContentModel(electronicRecord.getId()))
.move(createBodyForMoveCopy(unFiledFolder));
STEP("Move all the Record present in the unFiledFolder to Folder inside Root Category");
getRestAPIFactory()
.getNodeAPI(toContentModel(uploadDocRestNodeModel.getId()))
.move(createBodyForMoveCopy(recordFolder.getId()));
getRestAPIFactory()
.getNodeAPI(toContentModel(nonElectronicDocRestNodeModel.getId()))
.move(createBodyForMoveCopy(recordFolder.getId()));
getRestAPIFactory()
.getNodeAPI(toContentModel(electronicDocRestNodeModel.getId()))
.move(createBodyForMoveCopy(recordFolder.getId()));
getRestAPIFactory().getRecordsAPI().deleteRecord(uploadDocRestNodeModel.getId());
getRestAPIFactory().getRecordsAPI().deleteRecord(nonElectronicDocRestNodeModel.getId());
getRestAPIFactory().getRecordsAPI().deleteRecord(electronicDocRestNodeModel.getId());
UnfiledRecordFolderAPI unfiledRecordFoldersAPI = getRestAPIFactory().getUnfiledRecordFoldersAPI();
unfiledRecordFoldersAPI.deleteUnfiledRecordFolder(unFiledFolder);
RecordFolderAPI recordFolderAPI = getRestAPIFactory().getRecordFolderAPI();
String recordFolderId = recordFolder.getId();
recordFolderAPI.deleteRecordFolder(recordFolderId);
}
@AfterClass (alwaysRun = true)
public void deletePreConditions()
{
STEP("Delete the created rootCategory along with corresponding record folders/records present in it");
getRestAPIFactory().getRecordCategoryAPI().deleteRecordCategory(rootCategory.getId());
}
private String createUnFileFolder() {
String categoryName = "RM-2790 record Category name " + getRandomAlphanumeric();
unfiledRecordFolderId = createUnfiledContainerChild(UNFILED_RECORDS_CONTAINER_ALIAS,
categoryName + getRandomAlphanumeric(), UNFILED_RECORD_FOLDER_TYPE).getId();
return unfiledRecordFolderId;
}
private String getNodeRef(HttpResponse httpResponse) {
return convertHTTPResponseToJSON(httpResponse).getString("persistedObject")
.replace(NODE_REF_WORKSPACE_SPACES_STORE, "");
}
}

View File

@@ -0,0 +1,179 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.rm.community.records;
import org.alfresco.dataprep.CMISUtil;
import org.alfresco.rest.rm.community.base.BaseRMRestTest;
import org.alfresco.rest.rm.community.model.record.Record;
import org.alfresco.rest.v0.RecordsAPI;
import org.alfresco.utility.Utility;
import org.alfresco.utility.constants.UserRole;
import org.alfresco.utility.model.FileModel;
import org.alfresco.utility.model.SiteModel;
import org.alfresco.utility.model.UserModel;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import static org.alfresco.utility.report.log.Step.STEP;
import static org.junit.Assert.assertNotNull;
import static org.springframework.http.HttpStatus.CREATED;
import static org.springframework.test.util.AssertionErrors.assertTrue;
import static org.testng.Assert.fail;
public class InplaceRecordSearchTests extends BaseRMRestTest {
private UserModel siteCollaborator, siteConsumer, nonSiteMember;
private SiteModel privateSite;
private Record uploadedDocRecordbyCollabUser;
private FileModel uploadedDocbyCollabUser;
@Autowired
private RecordsAPI recordsAPI;
@BeforeClass(alwaysRun = true)
public void preConditions() {
STEP("Create RM Site");
createRMSiteIfNotExists();
// And a private collaboration site
privateSite = dataSite.usingAdmin().createPrivateRandomSite();
// And a site collaborator
siteCollaborator = getDataUser().createRandomTestUser();
getDataUser().addUserToSite(siteCollaborator, privateSite, UserRole.SiteCollaborator);
// And a site consumer
siteConsumer = getDataUser().createRandomTestUser();
getDataUser().addUserToSite(siteConsumer, privateSite, UserRole.SiteConsumer);
nonSiteMember = getDataUser().createRandomTestUser();
}
/**
* Given a RM site
* And a private collaboration site
* And a site collaborator
* And a site consumer
* And a user who is not a member of the site
* And a document that isn't a record
* When the collaborator declares it as a record
* Then the collaborator can browse to the record in the document library
* And can find the record using live search
* And can find the record using advanced search
* And the consumer can browse to the record in the document library
* And can find the record using live search
* And can find the record using advanced search
* And the user who is not a member of the site can't find the record using live search
* And can't find the record using advanced search
*/
@Test
public void searchForInplaceRecord() {
// And a document that isn't a record
uploadedDocbyCollabUser = dataContent.usingSite(privateSite)
.usingUser(siteCollaborator)
.createContent(CMISUtil.DocumentType.TEXT_PLAIN);
assertNotNull(uploadedDocbyCollabUser.getNodeRef());
// declare uploadedDocument as record
uploadedDocRecordbyCollabUser = getRestAPIFactory().getFilesAPI(siteCollaborator)
.declareAsRecord(uploadedDocbyCollabUser.getNodeRefWithoutVersion());
assertStatusCode(CREATED);
assertNotNull(uploadedDocRecordbyCollabUser.getId());
STEP("Allow the Document to be index for it to be available");
try
{
Utility.sleep(1000, 40000, () ->
{
JSONObject siteConsumerSearchJson = getSearchApi().liveSearchForDocuments(siteConsumer.getUsername(),
siteConsumer.getPassword(),
uploadedDocbyCollabUser.getName());
assertTrue("Site Consumer not able to find the document.",siteConsumerSearchJson.getJSONArray("items").length() != 0);
});
}
catch (InterruptedException e)
{
fail("InterruptedException received while waiting for results.");
}
try
{
Utility.sleep(1000, 40000, () ->
{
JSONObject siteCollaboratorSearchJson = getSearchApi().liveSearchForDocuments(siteCollaborator.getUsername(),
siteCollaborator.getPassword(),
uploadedDocbyCollabUser.getName());
assertTrue("Site Collaborator not able to find the document.",siteCollaboratorSearchJson.getJSONArray("items").length() != 0);
});
}
catch (InterruptedException e)
{
fail("InterruptedException received while waiting for results.");
}
JSONObject nonSiteMemberSearchJson = getSearchApi().liveSearchForDocuments(nonSiteMember.getUsername(),
nonSiteMember.getPassword(),
uploadedDocbyCollabUser.getName());
assertTrue("Non Site Member is able to access restricted document.",nonSiteMemberSearchJson.getJSONArray("items").isEmpty());
}
/**
* Given @see {@link #searchForInplaceRecord()}
* When the collaboration user hides the record in the collaboration site
* Then the collaborator can not browse to the record in the document library
* And can't find the record using live search
* And can't find the record using advanced search
*/
@Test(dependsOnMethods = {"searchForInplaceRecord"})
public void usersCantFindRecordAfterHide() {
recordsAPI.hideRecord(siteCollaborator.getUsername(),siteCollaborator.getPassword(),uploadedDocRecordbyCollabUser.getId());
JSONObject siteCollaboratorSearchJson = getSearchApi().liveSearchForDocuments(siteCollaborator.getUsername(),
siteCollaborator.getPassword(),
uploadedDocbyCollabUser.getName());
assertTrue("Site Collaborator able to find the document after it is hidden.",siteCollaboratorSearchJson.getJSONArray("items").isEmpty());
}
@AfterClass
public void tearDown() {
// clean-up collab site
dataSite.usingAdmin().deleteSite(privateSite);
// clean-up users siteCollaborator, siteConsumer, nonSiteMember
dataUser.deleteUser(siteCollaborator);
dataUser.deleteUser(siteConsumer);
dataUser.deleteUser(nonSiteMember);
}
}

View File

@@ -0,0 +1,539 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.rm.community.records;
import org.alfresco.dataprep.CMISUtil;
import org.alfresco.rest.core.v0.BaseAPI;
import org.alfresco.rest.rm.community.base.BaseRMRestTest;
import org.alfresco.rest.rm.community.model.record.RecordContent;
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategory;
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategoryChild;
import org.alfresco.rest.rm.community.model.unfiledcontainer.UnfiledContainerChild;
import org.alfresco.rest.rm.community.requests.gscore.api.UnfiledContainerAPI;
import org.alfresco.rest.v0.RMRolesAndActionsAPI;
import org.alfresco.rest.v0.RecordsAPI;
import org.alfresco.rest.v0.SearchAPI;
import org.alfresco.utility.Utility;
import org.alfresco.utility.model.UserModel;
import org.apache.http.HttpResponse;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.json.JSONArray;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.HashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.alfresco.rest.rm.community.base.TestData.ELECTRONIC_RECORD_NAME;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentAlias.UNFILED_RECORDS_CONTAINER_ALIAS;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentType.CONTENT_TYPE;
import static org.alfresco.rest.rm.community.model.user.UserPermissions.*;
import static org.alfresco.rest.rm.community.util.CommonTestUtils.generateTestPrefix;
import static org.alfresco.rest.rm.community.utils.FilePlanComponentsUtil.createTempFile;
import static org.alfresco.utility.report.log.Step.STEP;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
import static org.testng.Assert.assertTrue;
/**
* Tests the search of records in Records Search page
* @author Kavit Shah
*/
public class SearchRecordsTests extends BaseRMRestTest {
private Optional<UserModel> nonRmSiteUser, rm_user_search, rm_manager, rm_admin_search;
/** The default password used when creating test users. */
public static final String ROLE_RM_MANAGER = "RecordsManager";
private final String TEST_PREFIX = generateTestPrefix(SearchRecordsTests.class);
private final String CATEGORY_ALL = TEST_PREFIX + "everybody's category";
private final String FOLDER_SEARCH = TEST_PREFIX + "basic search folder";
private final String FOLDER_ADMIN_ONLY = TEST_PREFIX + "rm admin category";
private final String CATEGORY_ADMIN_ONLY = TEST_PREFIX + "rm admin category";
public static final String ROLE_RM_USER = "User";
public static final String ADMIN = "Administrator";
private final String ELECTRONIC_RECORD = TEST_PREFIX + " Electronic";
private final String UNFILED_ELECTRONIC_RECORD = TEST_PREFIX + " Unfiled Electronic";
private final String NON_ELECTRONIC_RECORD = TEST_PREFIX + " Non-Electronic";
private final String ADMIN_ELECTRONIC_RECORD = TEST_PREFIX + " admin Electronic";
public static final String TITLE = "Title";
public static final String DESCRIPTION = "Description";
public static final String TEST_CONTENT = "This is some test content";
private RecordCategory categoryAll, category_Admin_Only;
@Autowired
private RMRolesAndActionsAPI rmRolesAndActionsAPI;
@Autowired
private SearchAPI searchAPI;
@Autowired
private RecordsAPI recordsAPI;
@BeforeClass (alwaysRun = true)
public void createRecordsForSearch()
{
createRMSiteIfNotExists();
nonRmSiteUser = Optional.ofNullable(getDataUser().createRandomTestUser());
// create RM manager and RM user
createRMManager();
createRMUser();
createRMAdmin();
categoryAll = createCategoryIfDoesNotExist(CATEGORY_ALL);
createRecordFolderInCategory(FOLDER_SEARCH, categoryAll);
category_Admin_Only = createCategoryIfDoesNotExist(CATEGORY_ADMIN_ONLY);
createRecordFolderInCategory(FOLDER_ADMIN_ONLY,category_Admin_Only);
// upload records in folder in category and in Unfiled Records
uploadElectronicRecordInContainer(ELECTRONIC_RECORD, FOLDER_SEARCH);
createNonElectronicRecordInContainer(NON_ELECTRONIC_RECORD, CATEGORY_ALL, FOLDER_SEARCH);
uploadElectronicRecordInContainer(ADMIN_ELECTRONIC_RECORD, FOLDER_ADMIN_ONLY);
UnfiledContainerChild electronicRecord = UnfiledContainerChild.builder()
.name(UNFILED_ELECTRONIC_RECORD)
.nodeType(CONTENT_TYPE)
.content(RecordContent.builder().mimeType("text/plain").build())
.build();
getRecordsFromUnfiledRecordsContainer(electronicRecord);
}
/**
* Given I have created record category X which contains record folder Y which contains record Z
* And I have selected to display record category id in the search results
* When I issue a record search whose results will contain record X
* Then record X is displayed in the results
* And the record category X's ID is also displayed in search result meta-data for record X
*/
@Test(priority = 1)
public void searchResultsWithRecordCategoryIdentifier() {
AtomicBoolean electronicRecordFound = new AtomicBoolean(false);
AtomicReference<JSONArray> items = new AtomicReference<>();
AtomicBoolean recordCategoryIdentifier = new AtomicBoolean(false);
STEP("Open the record search page and search by the items created");
try {
Utility.sleep(1000, 40000, () -> {
JSONObject searchResult = (searchAPI
.rmSearch(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(),
"rm",
"keywords:" + TEST_PREFIX + "*",
"records/true,undeclared/true,vital/false,folders/false,categories/false,frozen/false,cutoff/false",
"rma:identifier/asc"));
items.set((JSONArray) searchResult.get("items"));
assertFalse("Site Consumer not able to find the document.", ((JSONArray)searchResult.get("items")).isEmpty());
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
STEP("Check that the records from file plan have the record category identifier displayed");
List searchList = IntStream.range(0, items.get().length()).mapToObj(i-> items.get().get(i)).collect(Collectors.toList());
searchList.stream().forEach(x -> {
Map<String, String> reconstructedUtilMap = Arrays.stream(x.toString().split(","))
.map(s -> s.split(":"))
.collect(Collectors.toMap(s -> s[0], s -> s[1]));
if(reconstructedUtilMap.get("\"name\"").contains(TEST_PREFIX + " Electronic")) {
electronicRecordFound.set(true);
}
});
assertFalse("The File Name with the Prefix " + TEST_PREFIX + " as Electronic Record was not found.", !electronicRecordFound.get());
STEP("Change the search filter to return only record folders and record categories");
try {
Utility.sleep(1000, 40000, () -> {
JSONObject searchResult = (searchAPI
.rmSearch(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(),
"rm",
"keywords:" + TEST_PREFIX + "*",
"records/false,undeclared/true,vital/false,folders/true,categories/true,frozen/false,cutoff/false",
"rma:identifier/asc"));
items.set((JSONArray) searchResult.get("items"));
assertFalse("Site Consumer not able to find the document.", ((JSONArray)searchResult.get("items")).isEmpty());
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
STEP("Check that the records folders and categories don't have a record category identifier displayed");
List recordFolderSearchList = IntStream.range(0, items.get().length()).mapToObj(i-> items.get().get(i)).collect(Collectors.toList());
recordFolderSearchList.stream().forEach(x -> {
Map<String, String> reconstructedUtilMap = Arrays.stream(x.toString().split(","))
.map(s -> s.split(":"))
.collect(Collectors.toMap(s -> s[0], s -> s[1]));
if(null != reconstructedUtilMap.get("\"rma_recordCategoryIdentifier\"")) {
recordCategoryIdentifier.set(true);
}
});
assertFalse("Record Category Identifier displayed for " + TEST_PREFIX + ".", recordCategoryIdentifier.get());
}
/**
* User with RM User role can see the records he has permission over and all in Unfiled Records
* <p>
* Given that I am a RM User
* I can see only the records in File Plan I have permission over and all in Unfiled Records
*/
@Test (priority = 2)
public void nonRMUserSearchResults() {
try {
Utility.sleep(1000, 40000, () -> {
List<String> stringList = (searchAPI
.searchForDocumentsAsUser(nonRmSiteUser.get().getUsername(),
nonRmSiteUser.get().getPassword(),
ELECTRONIC_RECORD));
assertFalse("The file with search term " + ELECTRONIC_RECORD + " was found using RM Not Site User "+ nonRmSiteUser.get().getUsername(),getResult(ELECTRONIC_RECORD,stringList));
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
try {
Utility.sleep(1000, 40000, () -> {
List<String> stringList = (searchAPI
.searchForDocumentsAsUser(nonRmSiteUser.get().getUsername(),
nonRmSiteUser.get().getPassword(),
UNFILED_ELECTRONIC_RECORD));
assertFalse("The file with search term " + UNFILED_ELECTRONIC_RECORD + " was not found using RM Not Site User "+ nonRmSiteUser.get().getUsername(),getResult(UNFILED_ELECTRONIC_RECORD,stringList));
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
try {
Utility.sleep(1000, 40000, () -> {
List<String> stringList = (searchAPI
.searchForDocumentsAsUser(nonRmSiteUser.get().getUsername(),
nonRmSiteUser.get().getPassword(),
NON_ELECTRONIC_RECORD));
assertFalse("The file with search term " + NON_ELECTRONIC_RECORD + " was not found using RM Not Site User "+ nonRmSiteUser.get().getUsername(),getResult(NON_ELECTRONIC_RECORD,stringList));
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
try {
Utility.sleep(1000, 40000, () -> {
List<String> stringList = searchAPI
.searchForDocumentsAsUser(nonRmSiteUser.get().getUsername(),
nonRmSiteUser.get().getPassword(),
ADMIN_ELECTRONIC_RECORD);
assertFalse("The file with search term " + ADMIN_ELECTRONIC_RECORD + " was not found using RM Not Site User "+ nonRmSiteUser.get().getUsername(),getResult(ADMIN_ELECTRONIC_RECORD,stringList));
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
}
/**
* User with RM User role can see the records he has permission over and all in Unfiled Records
* <p>
* Given that I am a RM User
* I can see only the records in File Plan I have permission over and all in Unfiled Records
*/
@Test (priority = 3)
public void rmUserSearchResults() {
getRestAPIFactory().getRMUserAPI().addUserPermission(categoryAll.getId(), rm_user_search.get(), PERMISSION_READ_RECORDS);
getRestAPIFactory().getRMUserAPI().addUserPermission(categoryAll.getId(), rm_user_search.get(), PERMISSION_FILE_RECORDS);
try {
Utility.sleep(1000, 40000, () -> {
List<String> stringList = (searchAPI
.searchForDocumentsAsUser(rm_user_search.get().getUsername(),
rm_user_search.get().getPassword(),
ELECTRONIC_RECORD));
assertTrue(getResult(ELECTRONIC_RECORD,stringList),"The file with search term" + ELECTRONIC_RECORD + " was not found using RM User "+ rm_user_search.get().getUsername());
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
try {
Utility.sleep(1000, 40000, () -> {
List<String> stringList = (searchAPI
.searchForDocumentsAsUser(rm_user_search.get().getUsername(),
rm_user_search.get().getPassword(),
UNFILED_ELECTRONIC_RECORD));
assertTrue(getResult(UNFILED_ELECTRONIC_RECORD,stringList),"The file with search term" + UNFILED_ELECTRONIC_RECORD + " was not found using RM User "+ rm_user_search.get().getUsername());
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
try {
Utility.sleep(1000, 40000, () -> {
List<String> stringList = (searchAPI
.searchForDocumentsAsUser(rm_user_search.get().getUsername(),
rm_user_search.get().getPassword(),
NON_ELECTRONIC_RECORD));
assertTrue(getResult(NON_ELECTRONIC_RECORD,stringList),"The file with search term" + NON_ELECTRONIC_RECORD + " was not found using RM User "+ rm_user_search.get().getUsername());
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
try {
Utility.sleep(1000, 40000, () -> {
List<String> stringList = searchAPI
.searchForDocumentsAsUser(rm_user_search.get().getUsername(),
rm_user_search.get().getPassword(),
ADMIN_ELECTRONIC_RECORD);
assertFalse("The file with search term" + ADMIN_ELECTRONIC_RECORD + " was not found using RM User "+ rm_user_search.get().getUsername(),getResult(ADMIN_ELECTRONIC_RECORD,stringList));
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
}
/**
* User with RM Manager role can see the records he has permission over and all in Unfiled Records
* <p>
* Given that I am a RM Manager
* I can see only the records in File Plan I have permission over and all in Unfiled Records
*/
@Test (priority = 4)
public void rmManagerSearchResults() {
getRestAPIFactory().getRMUserAPI().addUserPermission(categoryAll.getId(), rm_manager.get(), PERMISSION_READ_RECORDS);
try {
Utility.sleep(1000, 40000, () -> {
List<String> stringList = (searchAPI
.searchForDocumentsAsUser(rm_manager.get().getUsername(),
rm_manager.get().getPassword(),
ELECTRONIC_RECORD));
assertTrue(getResult(ELECTRONIC_RECORD,stringList),"The file with search term " + ELECTRONIC_RECORD + " was not found using RM manager User "+ rm_manager.get().getUsername());
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
try {
Utility.sleep(1000, 40000, () -> {
List<String> stringList = (searchAPI
.searchForDocumentsAsUser(rm_manager.get().getUsername(),
rm_manager.get().getPassword(),
UNFILED_ELECTRONIC_RECORD));
assertTrue(getResult(UNFILED_ELECTRONIC_RECORD,stringList),"The file with search term " + UNFILED_ELECTRONIC_RECORD + " was not found using RM manager User "+ rm_manager.get().getUsername());
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
try {
Utility.sleep(1000, 40000, () -> {
List<String> stringList = (searchAPI
.searchForDocumentsAsUser(rm_manager.get().getUsername(),
rm_manager.get().getPassword(),
NON_ELECTRONIC_RECORD));
assertTrue(getResult(NON_ELECTRONIC_RECORD,stringList),"The file with search term " + NON_ELECTRONIC_RECORD + " was not found using RM manager User "+ rm_manager.get().getUsername());
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
try {
Utility.sleep(1000, 40000, () -> {
List<String> stringList = searchAPI
.searchForDocumentsAsUser(rm_manager.get().getUsername(),
rm_manager.get().getPassword(),
ADMIN_ELECTRONIC_RECORD);
assertFalse("The file with search term" + ADMIN_ELECTRONIC_RECORD + " was found using RM manager User "+ rm_manager.get().getUsername(),getResult(ADMIN_ELECTRONIC_RECORD,stringList));
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
}
/**
* User with RM Administrator role can see all the records
*
* Given that I am a RM Administrator
* I can see all the records in File Plan and Unfiled Records through RM Search and Advanced Search
*/
@Test(priority = 5)
public void rmAdminSearchResults() {
try {
Utility.sleep(1000, 40000, () -> {
List<String> stringList = (searchAPI
.searchForDocumentsAsUser(rm_admin_search.get().getUsername(),
rm_admin_search.get().getPassword(),
ELECTRONIC_RECORD));
assertTrue(getResult(ELECTRONIC_RECORD,stringList),"The file with search term " + ELECTRONIC_RECORD + " was not found using RM Admin User "+ rm_admin_search.get().getUsername());
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
try {
Utility.sleep(1000, 40000, () -> {
List<String> stringList = (searchAPI
.searchForDocumentsAsUser(rm_admin_search.get().getUsername(),
rm_admin_search.get().getPassword(),
UNFILED_ELECTRONIC_RECORD));
assertTrue(getResult(UNFILED_ELECTRONIC_RECORD,stringList),"The file with search term " + UNFILED_ELECTRONIC_RECORD + " was not found using RM Admin User "+ rm_admin_search.get().getUsername());
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
try {
Utility.sleep(1000, 40000, () -> {
List<String> stringList = (searchAPI
.searchForDocumentsAsUser(rm_admin_search.get().getUsername(),
rm_admin_search.get().getPassword(),
NON_ELECTRONIC_RECORD));
assertTrue(getResult(NON_ELECTRONIC_RECORD,stringList),"The file with search term " + NON_ELECTRONIC_RECORD + " was not found using RM Admin User "+ rm_admin_search.get().getUsername());
});
}
catch (InterruptedException e) {
fail("InterruptedException received while waiting for results.");
}
}
private void createRMManager() {
// create RM manager
rm_manager = Optional.ofNullable(getDataUser().createRandomTestUser());
rmRolesAndActionsAPI.assignRoleToUser(
getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(),
rm_manager.get().getUsername(),
ROLE_RM_MANAGER
);
}
private void createRMUser() {
// create RM manager
rm_user_search = Optional.ofNullable(getDataUser().createRandomTestUser());
rmRolesAndActionsAPI.assignRoleToUser(
getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(),
rm_user_search.get().getUsername(),
ROLE_RM_USER
);
}
private void createRMAdmin() {
// create RM Admin
rm_admin_search = Optional.ofNullable(getDataUser().createRandomTestUser());
rmRolesAndActionsAPI.assignRoleToUser(
getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(),
rm_admin_search.get().getUsername(),
ADMIN
);
}
private RecordCategory createCategoryIfDoesNotExist(String CATEGORY_ALL) {
return createRootCategory(getDataUser().usingAdmin().getAdminUser(), CATEGORY_ALL);
}
private RecordCategoryChild createRecordFolderInCategory(String FOLDER_SEARCH, RecordCategory recordCategory) {
return createFolder(getDataUser().usingAdmin().getAdminUser(), recordCategory.getId(), FOLDER_SEARCH);
}
private void uploadElectronicRecordInContainer(String electronic_record, String folder_search) {
recordsAPI.uploadElectronicRecord(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(),
getDefaultElectronicRecordProperties(electronic_record), folder_search, CMISUtil.DocumentType.TEXT_PLAIN);
}
protected HttpResponse createNonElectronicRecordInContainer(String name, String categoryName, String folderName) {
Map<BaseAPI.RMProperty, String> defaultProperties = new HashMap<>();
defaultProperties.put(BaseAPI.RMProperty.NAME, name);
defaultProperties.put(BaseAPI.RMProperty.TITLE, TITLE);
defaultProperties.put(BaseAPI.RMProperty.DESCRIPTION, DESCRIPTION);
return recordsAPI.createNonElectronicRecord(getDataUser().usingAdmin().getAdminUser().getUsername(),
getDataUser().usingAdmin().getAdminUser().getPassword(), defaultProperties, categoryName, folderName);
}
public Map<BaseAPI.RMProperty, String> getDefaultElectronicRecordProperties(String recordName) {
Map<BaseAPI.RMProperty, String> defaultProperties = new HashMap<>();
defaultProperties.put(BaseAPI.RMProperty.NAME, recordName);
defaultProperties.put(BaseAPI.RMProperty.TITLE, TITLE);
defaultProperties.put(BaseAPI.RMProperty.DESCRIPTION, DESCRIPTION);
defaultProperties.put(BaseAPI.RMProperty.CONTENT, TEST_CONTENT);
return defaultProperties;
}
@AfterClass(alwaysRun = true)
public void standardSearchTeardown() {
// delete categories
deleteRecordCategory(categoryAll.getId());
deleteRecordCategory(category_Admin_Only.getId());
// delete users
Optional.of(nonRmSiteUser).ifPresent(x -> getDataUser().deleteUser(x.get()));
Optional.of(rm_user_search).ifPresent(x -> getDataUser().deleteUser(x.get()));
Optional.of(rm_manager).ifPresent(x -> getDataUser().deleteUser(x.get()));
Optional.of(rm_admin_search).ifPresent(x -> getDataUser().deleteUser(x.get()));
}
private boolean getResult(String partialRecordName, List<String> searchResults) {
if(null != searchResults) {
for (String searchResult : searchResults) {
if (searchResult.startsWith(partialRecordName)) {
return true;
}
}
}
return false;
}
private Object[][] getRecordsFromUnfiledRecordsContainer(UnfiledContainerChild electronicRecord)
{
UnfiledContainerAPI unfiledContainersAPI = getRestAPIFactory().getUnfiledContainersAPI();
return new String[][] {
{ unfiledContainersAPI.uploadRecord(electronicRecord, UNFILED_RECORDS_CONTAINER_ALIAS,
createTempFile(ELECTRONIC_RECORD_NAME, ELECTRONIC_RECORD_NAME)).getId()}
};
}
}

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<modules>

View File

@@ -1,4 +1,3 @@
TRANSFORMERS_TAG=2.5.7
SOLR6_TAG=2.0.3
POSTGRES_TAG=13.3
ACTIVEMQ_TAG=5.16.1

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<properties>
@@ -361,6 +361,12 @@
<integrationTestSuite>**/AllTestSuitePt3.class</integrationTestSuite>
</properties>
</profile>
<profile>
<id>agsAllTestSuitePt4</id>
<properties>
<integrationTestSuite>**/AllTestSuitePt4.class</integrationTestSuite>
</properties>
</profile>
<profile>
<id>use-mysql</id>

View File

@@ -43,19 +43,16 @@ import org.junit.runner.RunWith;
@ClassnameFilters({
// The following packages are run by Pt1. IF YOU CHANGE THIS LIST ALSO CHANGE IT IN AllTestSuitePt3.
"org\\.alfresco\\.module\\.org_alfresco_module_rm\\.test\\.legacy\\.action\\..*Test",
"org\\.alfresco\\.module\\.org_alfresco_module_rm\\.test\\.legacy\\.capabilities\\..*Test",
"org\\.alfresco\\.module\\.org_alfresco_module_rm\\.test\\.legacy\\.jscript\\..*Test",
"org\\.alfresco\\.module\\.org_alfresco_module_rm\\.test\\.legacy\\.security\\..*Test",
"org\\.alfresco\\.module\\.org_alfresco_module_rm\\.test\\.legacy\\.service\\..*Test",
"org\\.alfresco\\.module\\.org_alfresco_module_rm\\.test\\.legacy\\.webscript\\..*Test",
// There appears to be some common setup taking place in the first 2 packages, which is why all legacy tests are
// together even though they take a little longer to run that way.
"org\\.alfresco\\.module\\.org_alfresco_module_rm\\.test\\.legacy\\.jscript\\..*Test",
// Exclude all UnitTests
"!.*UnitTest",
// Put the test classes you want to exclude here
"!.*DispositionServiceImplTest",
"!.*FilePlanPermissionServiceImplTest",
// above 2 tests from service package require setup from FileReportActionTest so they've been moved to part 4
"!.*DataLoadSystemTest",
"!.*RM2072Test",
"!.*RM2190Test",

View File

@@ -0,0 +1,78 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.module.org_alfresco_module_rm.test;
import org.junit.extensions.cpsuite.ClasspathSuite;
import org.junit.extensions.cpsuite.ClasspathSuite.ClassnameFilters;
import org.junit.extensions.cpsuite.ClasspathSuite.SuiteTypes;
import org.junit.extensions.cpsuite.SuiteType;
import org.junit.runner.RunWith;
/**
* Convenience test suite that runs all the tests. THIS HAS BEEN SPLIT INTO PARTS SO THAT THE BUILD TIME IS REDUCED.
*
* @author Marcin Strankowski
* @since 2.1
*/
@RunWith(ClasspathSuite.class)
@SuiteTypes({SuiteType.TEST_CLASSES, SuiteType.RUN_WITH_CLASSES, SuiteType.JUNIT38_TEST_CLASSES})
@ClassnameFilters({
// The following packages are run by Pt1. IF YOU CHANGE THIS LIST ALSO CHANGE IT IN AllTestSuitePt3.
"org\\.alfresco\\.module\\.org_alfresco_module_rm\\.test\\.legacy\\.action\\..*Test",
"org\\.alfresco\\.module\\.org_alfresco_module_rm\\.test\\.legacy\\.capabilities\\..*Test",
"org\\.alfresco\\.module\\.org_alfresco_module_rm\\.test\\.legacy\\.security\\..*Test",
"org\\.alfresco\\.module\\.org_alfresco_module_rm\\.test\\.legacy\\.webscript\\..*Test",
"org\\.alfresco\\.module\\.org_alfresco_module_rm\\.test\\.legacy\\.service\\.*DispositionServiceImplTest",
"org\\.alfresco\\.module\\.org_alfresco_module_rm\\.test\\.legacy\\.service\\.*FilePlanPermissionServiceImplTest",
// 2 tests from service package are here, they seem to require FileReportActionTest being ran beforehand
// and it would take too much time to run them in 1st test suite
// Exclude all UnitTests
"!.*UnitTest",
// Put the test classes you want to exclude here
"!.*DataLoadSystemTest",
"!.*RM2072Test",
"!.*RM2190Test",
"!.*RM981SystemTest",
"!.*RM3993Test",
"!.*RM4163Test",
"!.*RecordsManagementEventServiceImplTest",
"!.*RmRestApiTest",
"!.*NotificationServiceHelperSystemTest",
"!.*RetryingTransactionHelperBaseTest",
"!.*RMCaveatConfigServiceImplTest",
// This test is running successfully locally but not on bamboo (if executed as a single test).
// The problem can be reproduced if the whole test suite is run locally as well.
// Tests should not be dependant on other test classes and should run in any order without any problems.
"!.*EmailMapScriptTest"
})
public class AllTestSuitePt4
{
}

View File

@@ -4,7 +4,7 @@
# Version label
version.major=7
version.minor=0
version.minor=3
version.revision=0
version.label=
@@ -15,4 +15,4 @@ version.edition=Community
version.scmrevision=@scm-path@-r@scm-revision@
# Build number
version.build=r@scm-revision@-b@build-number@
version.build=r@scm-revision@-b@build-number@

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<build>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<modules>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<dependencies>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<properties>

View File

@@ -78,6 +78,10 @@ public class ObjectTypeIdLuceneBuilder extends BaseLuceneBuilder
String field = getLuceneFieldName();
String stringValue = getValueAsString(value);
TypeDefinitionWrapper type = cmisDictionaryService.findType(stringValue);
if (type == null)
{
throw new CmisInvalidArgumentException("Unknown type: " + stringValue);
}
return lqpa
.getFieldQuery(field, type.getAlfrescoClass().toString(), AnalysisMode.IDENTIFIER, luceneFunction);
}

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<dependencies>

View File

@@ -9,6 +9,6 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
</project>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<modules>

View File

@@ -1,4 +1,3 @@
TRANSFORMERS_TAG=2.5.7
SOLR6_TAG=2.0.3
POSTGRES_TAG=13.3
ACTIVEMQ_TAG=5.16.1

View File

@@ -6,7 +6,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<modules>

View File

@@ -27,6 +27,9 @@ fi
echo "Starting ACS stack in ${DOCKER_COMPOSE_PATH}"
export TRANSFORMERS_TAG=$(mvn help:evaluate -Dexpression=dependency.alfresco-transform-core.version -q -DforceStdout)
export TRANSFORM_ROUTER_TAG=$(mvn help:evaluate -Dexpression=dependency.alfresco-transform-service.version -q -DforceStdout)
# .env files are picked up from project directory correctly on docker-compose 1.23.0+
docker-compose --file "${DOCKER_COMPOSE_PATH}" --project-directory $(dirname "${DOCKER_COMPOSE_PATH}") up -d

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<developers>

View File

@@ -0,0 +1,76 @@
package org.alfresco.cmis.search;
import java.lang.reflect.Method;
import org.alfresco.cmis.CmisProperties;
import org.alfresco.utility.Utility;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import org.springframework.test.context.ContextConfiguration;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
@ContextConfiguration("classpath:alfresco-cmis-context.xml")
@Component
@Scope(value = "prototype")
public abstract class AbstractCmisE2ETest extends AbstractE2EFunctionalTest
{
private static Logger LOGGER = LoggerFactory.getLogger(AbstractCmisE2ETest.class);
@Autowired
protected CmisProperties cmisProperties;
public String documentContent = "CMIS document content";
@BeforeMethod(alwaysRun = true)
public void showStartTestInfo(Method method)
{
LOGGER.info(String.format("*** STARTING Test: [%s] ***", method.getName()));
}
@AfterMethod(alwaysRun = true)
public void showEndTestInfo(Method method)
{
LOGGER.info(String.format("*** ENDING Test: [%s] ***", method.getName()));
}
public Integer getElasticWaitTimeInSeconds()
{
return cmisProperties.envProperty().getSolrWaitTimeInSeconds();
}
/**
* Repeat Elastic Query till results count returns expectedCountResults
* @param query CMIS Query to be executed
* @param expectedCountResults Number of results expected
* @return true when results count is equals to expectedCountResults
*/
protected boolean waitForIndexing(String query, long expectedCountResults)
{
for (int searchCount = 1; searchCount <= SEARCH_MAX_ATTEMPTS; searchCount++)
{
try
{
cmisApi.withQuery(query).assertResultsCount().equals(expectedCountResults);
return true;
}
catch (AssertionError ae)
{
LOGGER.info(String.format("WaitForIndexing in Progress: %s", ae));
}
Utility.waitToLoopTime(getElasticWaitTimeInSeconds(), "Wait For Indexing");
}
return false;
}
}

View File

@@ -0,0 +1,58 @@
package org.alfresco.cmis.search;
import org.alfresco.cmis.CmisWrapper;
import org.alfresco.dataprep.SiteService.Visibility;
import org.alfresco.utility.data.DataContent;
import org.alfresco.utility.data.DataSite;
import org.alfresco.utility.data.DataUser;
import org.alfresco.utility.data.RandomData;
import org.alfresco.utility.model.SiteModel;
import org.alfresco.utility.model.UserModel;
import org.alfresco.utility.network.ServerHealth;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.testng.AbstractTestNGSpringContextTests;
import org.testng.annotations.BeforeClass;
@ContextConfiguration ("classpath:alfresco-cmis-context.xml")
public abstract class AbstractE2EFunctionalTest extends AbstractTestNGSpringContextTests
{
/** The number of retries that a query will be tried before giving up. */
protected static final int SEARCH_MAX_ATTEMPTS = 20;
@Autowired
protected ServerHealth serverHealth;
@Autowired
protected DataSite dataSite;
@Autowired
protected DataContent dataContent;
@Autowired
protected CmisWrapper cmisApi;
@Autowired
protected DataUser dataUser;
protected UserModel testUser, adminUserModel;
protected SiteModel testSite;
protected static String unique_searchString;
@BeforeClass (alwaysRun = true)
public void setup()
{
serverHealth.assertServerIsOnline();
adminUserModel = dataUser.getAdminUser();
testUser = dataUser.createRandomTestUser("UserSearch");
testSite = new SiteModel(RandomData.getRandomName("SiteSearch"));
testSite.setVisibility(Visibility.PRIVATE);
testSite = dataSite.usingUser(testUser).createSite(testSite);
unique_searchString = testSite.getTitle().replace("SiteSearch", "Unique");
}
}

View File

@@ -0,0 +1,62 @@
package org.alfresco.cmis.search;
import org.alfresco.utility.Utility;
import org.alfresco.utility.data.provider.XMLDataConfig;
import org.alfresco.utility.data.provider.XMLTestDataProvider;
import org.alfresco.utility.model.FileModel;
import org.alfresco.utility.model.FileType;
import org.alfresco.utility.model.FolderModel;
import org.alfresco.utility.model.QueryModel;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
public class SearchInFolderTests extends AbstractCmisE2ETest
{
private FolderModel parentFolder, subFolder1, subFolder2, subFolder3;
private FileModel subFile1, subFile2, subFile3, subFile4, subFile5;
@BeforeClass(alwaysRun = true)
public void createTestData() throws Exception
{
// create input data
parentFolder = FolderModel.getRandomFolderModel();
subFolder1 = FolderModel.getRandomFolderModel();
subFolder2 = FolderModel.getRandomFolderModel();
subFolder3 = new FolderModel("subFolder");
subFile5 = new FileModel("fifthFile.txt",FileType.TEXT_PLAIN, "fifthFile content");
subFile1 = new FileModel("firstFile", FileType.MSEXCEL);
subFile2 = FileModel.getRandomFileModel(FileType.MSPOWERPOINT2007);
subFile3 = FileModel.getRandomFileModel(FileType.TEXT_PLAIN);
subFile4 = new FileModel("fourthFile", "fourthFileTitle", "fourthFileDescription", FileType.MSWORD2007);
cmisApi.authenticateUser(testUser).usingSite(testSite).createFolder(parentFolder)
.then().usingResource(parentFolder)
.createFile(subFile5).assertThat().contentIs("fifthFile content")
.createFolder(subFolder1)
.createFolder(subFolder2)
.createFolder(subFolder3)
.createFile(subFile1)
.createFile(subFile2)
.createFile(subFile3)
.createFile(subFile4);
// wait for index
Utility.waitToLoopTime(getElasticWaitTimeInSeconds());
}
@AfterClass(alwaysRun = true)
public void cleanupEnvironment()
{
dataContent.deleteSite(testSite);
}
@Test(dataProviderClass = XMLTestDataProvider.class, dataProvider = "getQueriesData")
@XMLDataConfig(file = "src/test/resources/search-in-folder.xml")
public void executeCMISQuery(QueryModel query)
{
String currentQuery = String.format(query.getValue(), parentFolder.getNodeRef());
cmisApi.authenticateUser(testUser);
Assert.assertTrue(waitForIndexing(currentQuery, query.getResults()), String.format("Result count not as expected for query: %s", currentQuery));
}
}

View File

@@ -7,7 +7,7 @@ alfresco.port=8082
admin.user=admin
admin.password=admin
solrWaitTimeInSeconds=30
solrWaitTimeInSeconds=60
# in containers we cannot access directly JMX, so we will use http://jolokia.org agent
# disabling this we will use direct JMX calls to server

View File

@@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--CMIS Queries: passing the search query as first param and results expected -->
<testData xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<queries>
<query value="SELECT cmis:name, cmis:parentId, cmis:path, cmis:allowedChildObjectTypeIds FROM cmis:folder where IN_FOLDER('%s') AND cmis:name = 'subFolder'" expectedResults="1" />
<query value="SELECT cmis:name, cmis:objectId, cmis:lastModifiedBy, cmis:creationDate, cmis:contentStreamFileName FROM cmis:document where IN_FOLDER('%s') AND cmis:name = 'fourthFile'" expectedResults="1" />
<query value="SELECT cmis:parentId FROM cmis:folder where IN_FOLDER('%s')" expectedResults="3" />
<query value="SELECT * FROM cmis:document where IN_FOLDER('%s')" expectedResults="5" />
<query value="SELECT * FROM cmis:document where IN_FOLDER('%s') ORDER BY cmis:name ASC" expectedResults="5" />
<query value="SELECT * FROM cmis:document where IN_FOLDER('%s') ORDER BY cmis:name DESC" expectedResults="5" />
<query value="SELECT * FROM cmis:folder where IN_FOLDER('%s') ORDER BY cmis:lastModificationDate ASC" expectedResults="3" />
<query value="SELECT * FROM cmis:folder where IN_FOLDER('%s') ORDER BY cmis:lastModificationDate DESC" expectedResults="3" />
<query value="SELECT * FROM cmis:document where IN_FOLDER('%s') ORDER BY cmis:createdBy DESC" expectedResults="5" />
<query value="SELECT * FROM cmis:document where IN_FOLDER('%s') AND cmis:name IS NOT NULL" expectedResults="5" />
<query value="SELECT * FROM cmis:folder where IN_FOLDER('%s') AND cmis:name IS NOT NULL" expectedResults="3" />
<query value="SELECT * FROM cmis:document where IN_FOLDER('%s') AND cmis:name LIKE 'fourthFile'" expectedResults="1" />
<query value="SELECT * FROM cmis:folder where IN_FOLDER('%s') AND NOT(cmis:name NOT IN ('subFolder'))" expectedResults="1" />
<query value="SELECT * FROM cmis:document where IN_FOLDER('%s') AND cmis:name IN ('fourthFile', 'fifthFile.txt')" expectedResults="2" />
<query value="SELECT * FROM cmis:document where IN_FOLDER('%s') AND cmis:name NOT IN ('fourthFile', 'fifthFile.txt')" expectedResults="3" />
<query value="SELECT * FROM cmis:folder where IN_FOLDER('%s') AND cmis:name &lt;&gt; 'subFolder'" expectedResults="2" />
<query value="SELECT cmis:secondaryObjectTypeIds FROM cmis:folder where IN_FOLDER('%s') AND cmis:name = 'subFolder'" expectedResults="1" />
</queries>
</testData>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<developers>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<developers>

View File

@@ -60,15 +60,15 @@ public class IntegrationFullTestsBulk2 extends IntegrationTest
ftpProtocol.authenticateUser(testUser1).usingSite(testSitePublic).createFolder(testFolder1)
.usingResource(testFolder1).createFile(wordFile)
.assertThat().contentIs("tasTesting");
STEP("2. Open document for edit using CMIS");
cmisAPI.authenticateUser(testUser1).usingResource(wordFile).checkOut();
FileModel wordFilePWC = cmisAPI.usingResource(wordFile).withCMISUtil().getPWCFileModel();
STEP("3. Try to edit document using Webdav while checked-out with CMIS - content should be updated");
webDavProtocol.authenticateUser(testUser1).usingResource(wordFilePWC).update("update")
.and().assertThat().contentIs("update");
STEP("4. Copy document to testFolder2 with ftp");
FileModel copiedWordFile = new FileModel(wordFile);
ftpProtocol.usingSite(testSitePublic).createFolder(testFolder2)
@@ -76,15 +76,15 @@ public class IntegrationFullTestsBulk2 extends IntegrationTest
copiedWordFile.setCmisLocation(ftpProtocol.getLastResourceWithoutPrefix());
ftpProtocol.usingResource(testFolder1).assertThat().hasFiles(wordFile)
.and().usingResource(testFolder2).assertThat().hasFiles(copiedWordFile);
STEP("5. Update document from folder2, check its content is updated with Webdav");
webDavProtocol.usingResource(copiedWordFile).update("Step5")
.and().assertThat().contentIs("Step5");
STEP("6. Update document with WebDAV");
webDavProtocol.authenticateUser(testUser1).usingResource(wordFile).update("WebDAVUpdate");
}
/**
* Scenario 84
* 1. Create folder1 with webdav
@@ -150,7 +150,7 @@ public class IntegrationFullTestsBulk2 extends IntegrationTest
* 13. Check file is deleted with WebDAV
*/
@Test(groups = { TestGroup.INTEGRATION, TestGroup.FULL })
@TestRail(section = { TestGroup.INTEGRATION, TestGroup.CONTENT }, executionType = ExecutionType.REGRESSION,
@TestRail(section = { TestGroup.INTEGRATION, TestGroup.CONTENT }, executionType = ExecutionType.REGRESSION,
description = "Update file with different user roles and protocols")
public void updateFileWithDifferentRolesUsingDifferentProtocols() throws Exception
{
@@ -276,41 +276,41 @@ public class IntegrationFullTestsBulk2 extends IntegrationTest
UserModel user1 = dataUser.createRandomTestUser();
UserModel user2 = dataUser.createRandomTestUser();
user2.setUserRole(UserRole.SiteManager);
STEP("2. User1 creates site1 and invites user2 as manager");
SiteModel site = dataSite.usingUser(user1).createPublicRandomSite();
restAPI.authenticateUser(user1).withCoreAPI().usingSite(site).addPerson(user2);
STEP("3. User1 adds document1 and tag1 to doc");
dataContent.usingUser(user1).usingSite(site).createContent(testFile);
RestTagModel tag = restAPI.withCoreAPI().usingResource(testFile).addTag("tag1");
restAPI.withCoreAPI().usingResource(testFile).getNodeTags().assertThat().entriesListContains("tag", "tag1");
STEP("4. User2 gets tags and verifies tag1 appears");
restAPI.authenticateUser(user2).withCoreAPI().usingResource(testFile).getNodeTags().assertThat().entriesListContains("tag", "tag1");
STEP("5. User2 delete tag1");
restAPI.withCoreAPI().usingResource(testFile).deleteTag(tag);
restAPI.withCoreAPI().usingResource(testFile).getNodeTags().assertThat().entriesListDoesNotContain("tag", "tag1");
STEP("6. User1 tries to update tag1");
restAPI.authenticateUser(user2).withCoreAPI().usingTag(tag).update("updatedTag");
restAPI.assertStatusCodeIs(HttpStatus.FORBIDDEN)
.assertLastError().containsSummary(RestErrorModel.PERMISSION_WAS_DENIED)
.containsErrorKey(RestErrorModel.PERMISSION_DENIED_ERRORKEY);
STEP("7. User1 add new tag tag2");
tag = restAPI.authenticateUser(user1).withCoreAPI().usingResource(testFile).addTag("tag2");
restAPI.withCoreAPI().usingResource(testFile).getNodeTags().assertThat().entriesListContains("tag", "tag2");
STEP("8. User2 verifies tag2 appears and tag1 is not in the list");
restAPI.authenticateUser(user2).withCoreAPI().usingResource(testFile).getNodeTags()
.assertThat().entriesListDoesNotContain("tag", "tag1")
.assertThat().entriesListContains("tag", "tag2");
STEP("9. User2 deletes document1");
dataContent.usingUser(user2).usingResource(testFile).deleteContent();
STEP("10. User1 tries to delete tag2");
restAPI.authenticateUser(user1).withCoreAPI().usingResource(testFile).deleteTag(tag);
restAPI.assertStatusCodeIs(HttpStatus.NOT_FOUND)
@@ -329,9 +329,9 @@ public class IntegrationFullTestsBulk2 extends IntegrationTest
* 7. User1 deletes the document1 with ftp
* 8. Verify user2 cannot update document1 with cmis
*/
@Test(groups = { TestGroup.INTEGRATION, TestGroup.FULL }, expectedExceptions = CmisObjectNotFoundException.class, expectedExceptionsMessageRegExp = ".*Object not found:.*")
@TestRail(section = { TestGroup.INTEGRATION, TestGroup.CONTENT }, executionType = ExecutionType.REGRESSION, description = "Negative scenarios for update document")
public void fileUpdateNegativeScenariosTest() throws Exception
@Test(groups = { TestGroup.INTEGRATION, TestGroup.FULL }, expectedExceptions = CmisObjectNotFoundException.class, expectedExceptionsMessageRegExp = ".*Object not found:.*")
@TestRail(section = { TestGroup.INTEGRATION, TestGroup.CONTENT }, executionType = ExecutionType.REGRESSION, description = "Negative scenarios for update document")
public void fileUpdateNegativeScenariosTest() throws Exception
{
STEP("1. Create user1, user2");
UserModel user1 = dataUser.createRandomTestUser();

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<developers>

View File

@@ -7,9 +7,7 @@ import org.alfresco.utility.Utility;
import org.alfresco.utility.model.FileModel;
import org.alfresco.utility.model.FolderModel;
import org.alfresco.utility.model.SiteModel;
import org.alfresco.utility.model.UserModel;
import org.springframework.http.HttpStatus;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
@@ -40,17 +38,20 @@ public abstract class RenditionIntegrationTests extends RestTest
FileModel file = new FileModel();
file.setNodeRef(nodeId);
// 1. Create a rendition of the file using RESTAPI
// 1. Preemptively delete an existing rendition of the file using RESTAPI
restClient.withCoreAPI().usingNode(file).deleteNodeRendition(renditionId);
// 2. Create a rendition of the file using RESTAPI
restClient.withCoreAPI().usingNode(file).createNodeRendition(renditionId);
Assert.assertEquals(Integer.valueOf(restClient.getStatusCode()).intValue(), HttpStatus.ACCEPTED.value(),
"Failed to submit a request for rendition. [" + fileName+ ", " + renditionId+"] [source file, rendition ID]. ");
// 2. Verify that a rendition of the file is created and has content using RESTAPI
// 3. Verify that a rendition of the file is created and has content using RESTAPI
RestResponse restResponse = restClient.withCoreAPI().usingNode(file).getNodeRenditionContentUntilIsCreated(renditionId);
Assert.assertEquals(Integer.valueOf(restClient.getStatusCode()).intValue(), HttpStatus.OK.value(),
"Failed to produce rendition. [" + fileName+ ", " + renditionId+"] [source file, rendition ID] ");
// 3. Check the returned content type
// 4. Check the returned content type
Assert.assertEquals(restClient.getResponseHeaders().getValue("Content-Type"), expectedMimeType+";charset=UTF-8",
"Rendition was created but it has the wrong Content-Type. [" + fileName+ ", " + renditionId + "] [source file, rendition ID]");

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<developers>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<properties>

17
pom.xml
View File

@@ -2,7 +2,7 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>alfresco-community-repo</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
<packaging>pom</packaging>
<name>Alfresco Community Repo Parent</name>
@@ -24,7 +24,7 @@
<properties>
<acs.version.major>7</acs.version.major>
<acs.version.minor>2</acs.version.minor>
<acs.version.minor>3</acs.version.minor>
<acs.version.revision>0</acs.version.revision>
<acs.version.label />
<amp.min.version>${acs.version.major}.0.0</amp.min.version>
@@ -51,17 +51,18 @@
<dependency.alfresco-log-sanitizer.version>0.2</dependency.alfresco-log-sanitizer.version>
<dependency.activiti-engine.version>5.23.0</dependency.activiti-engine.version>
<dependency.activiti.version>5.23.0</dependency.activiti.version>
<dependency.alfresco-transform-model.version>1.4.15</dependency.alfresco-transform-model.version>
<dependency.alfresco-transform-service.version>1.5.3</dependency.alfresco-transform-service.version>
<dependency.alfresco-transform-core.version>2.6.0</dependency.alfresco-transform-core.version>
<dependency.alfresco-greenmail.version>6.2</dependency.alfresco-greenmail.version>
<dependency.acs-event-model.version>0.0.13</dependency.acs-event-model.version>
<dependency.spring.version>5.3.15</dependency.spring.version>
<dependency.spring.version>5.3.18</dependency.spring.version>
<dependency.antlr.version>3.5.2</dependency.antlr.version>
<dependency.jackson.version>2.13.1</dependency.jackson.version>
<dependency.jackson-databind.version>2.13.1</dependency.jackson-databind.version>
<dependency.cxf.version>3.5.0</dependency.cxf.version>
<dependency.opencmis.version>1.0.0</dependency.opencmis.version>
<dependency.webscripts.version>8.28</dependency.webscripts.version>
<dependency.webscripts.version>8.29</dependency.webscripts.version>
<dependency.bouncycastle.version>1.70</dependency.bouncycastle.version>
<dependency.mockito-core.version>3.11.2</dependency.mockito-core.version>
<dependency.org-json.version>20211205</dependency.org-json.version>
@@ -118,7 +119,7 @@
<dependency.mariadb.version>2.7.4</dependency.mariadb.version>
<dependency.tas-utility.version>3.0.47</dependency.tas-utility.version>
<dependency.rest-assured.version>3.3.0</dependency.rest-assured.version>
<dependency.tas-restapi.version>1.79</dependency.tas-restapi.version>
<dependency.tas-restapi.version>1.80</dependency.tas-restapi.version>
<dependency.tas-cmis.version>1.31</dependency.tas-cmis.version>
<dependency.tas-email.version>1.8</dependency.tas-email.version>
<dependency.tas-webdav.version>1.6</dependency.tas-webdav.version>
@@ -146,7 +147,7 @@
<connection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</connection>
<developerConnection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</developerConnection>
<url>https://github.com/Alfresco/alfresco-community-repo</url>
<tag>HEAD</tag>
<tag>17.9</tag>
</scm>
<distributionManagement>
@@ -887,7 +888,7 @@
<plugin>
<groupId>io.fabric8</groupId>
<artifactId>docker-maven-plugin</artifactId>
<version>0.38.0</version>
<version>0.39.1</version>
</plugin>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<dependencies>

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -390,8 +390,9 @@ public class LockInfoImpl implements Serializable, LockInfo
else
{
Date now = dateNow();
long timeout = ((expires.getTime() - now.getTime()) / 1000);
return timeout;
long remainingTimeoutInSecondsRoundedUp = (Math.max(expires.getTime() - now.getTime(), 0) + 999) / 1000;
return remainingTimeoutInSecondsRoundedUp;
}
}

View File

@@ -1,28 +1,28 @@
/*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
/*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.webdav;
import java.util.Date;
@@ -449,30 +449,18 @@ public class LockMethod extends WebDAVMethod
*/
protected final void createLock(FileInfo lockNode, String userName) throws WebDAVServerException
{
// Create Lock token
lockToken = WebDAV.makeLockToken(lockNode.getNodeRef(), userName);
if (createExclusive)
{
// Lock the node
lockInfo.setTimeoutSeconds(getLockTimeout());
lockInfo.setExclusiveLockToken(lockToken);
}
else
{
if (!createExclusive) {
// Shared lock creation should already have been prohibited when parsing the request body
throw new WebDAVServerException(HttpServletResponse.SC_PRECONDITION_FAILED);
}
// Store lock depth
lockToken = WebDAV.makeLockToken(lockNode.getNodeRef(), userName);
lockInfo.setExclusiveLockToken(lockToken);
lockInfo.setDepth(WebDAV.getDepthName(m_depth));
// Store lock scope (shared/exclusive)
String scope = createExclusive ? WebDAV.XML_EXCLUSIVE : WebDAV.XML_SHARED;
lockInfo.setScope(scope);
// Store the owner of this lock
lockInfo.setScope(WebDAV.XML_EXCLUSIVE);
lockInfo.setOwner(userName);
// Lock the node
getDAVLockService().lock(lockNode.getNodeRef(), lockInfo);
getDAVLockService().lock(lockNode.getNodeRef(), lockInfo, getLockTimeout());
if (logger.isDebugEnabled())
{

View File

@@ -1,28 +1,28 @@
/*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
/*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.webdav;
@@ -56,7 +56,9 @@ public interface WebDAVLockService
void lock(NodeRef nodeRef, String userName, int timeout);
void lock(NodeRef nodeRef, LockInfo lockInfo);
void lock(NodeRef nodeRef, LockInfo lockInfo, int timeout);
/**
* Shared method for webdav/vti to unlock node. Unlocked node is automatically removed from
* current sessions's locked resources list.

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -32,7 +32,6 @@ import java.util.List;
import javax.servlet.http.HttpSession;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.lock.LockUtils;
import org.alfresco.repo.lock.mem.Lifetime;
import org.alfresco.repo.lock.mem.LockState;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
@@ -237,57 +236,15 @@ public class WebDAVLockServiceImpl implements WebDAVLockService
}
}
public void lock(NodeRef nodeRef, LockInfo lockInfo)
{
boolean performSessionBehavior = false;
long timeout;
timeout = lockInfo.getRemainingTimeoutSeconds();
// ALF-11777 fix, do not lock node for more than 24 hours (webdav and vti)
if (timeout >= WebDAV.TIMEOUT_24_HOURS || timeout == WebDAV.TIMEOUT_INFINITY)
{
timeout = WebDAV.TIMEOUT_24_HOURS;
lockInfo.setTimeoutSeconds((int) timeout);
performSessionBehavior = true;
}
// TODO: lock children according to depth? lock type?
final String additionalInfo = lockInfo.toJSON();
lockService.lock(nodeRef, LockType.WRITE_LOCK, (int) timeout, Lifetime.EPHEMERAL, additionalInfo);
if (logger.isDebugEnabled())
{
logger.debug(nodeRef + " was locked for " + timeout + " seconds.");
}
if (performSessionBehavior)
{
HttpSession session = currentSession.get();
if (session == null)
{
if (logger.isDebugEnabled())
{
logger.debug("Couldn't find current session.");
}
return;
}
storeObjectInSessionList(session, LOCKED_RESOURCES, new Pair<String, NodeRef>(AuthenticationUtil.getRunAsUser(), nodeRef));
if (logger.isDebugEnabled())
{
logger.debug(nodeRef + " was added to the session " + session.getId() + " for post expiration processing.");
}
}
public void lock(NodeRef nodeRef, LockInfo lockInfo) {
int timeout = (int) lockInfo.getRemainingTimeoutSeconds();
lock(nodeRef, lockInfo, timeout);
}
/**
* Shared method for webdav/vti protocols to lock node. If node is locked for more than 24 hours it is automatically added
* to the current session locked resources list.
*
*
* @param nodeRef the node to lock
* @param userName userName
* @param timeout the number of seconds before the locks expires
@@ -295,8 +252,68 @@ public class WebDAVLockServiceImpl implements WebDAVLockService
@Override
public void lock(NodeRef nodeRef, String userName, int timeout)
{
LockInfo lockInfo = createLock(nodeRef, userName, true, timeout);
lock(nodeRef, lockInfo);
LockInfo lockInfo = createLock(nodeRef, userName, true);
lock(nodeRef, lockInfo, timeout);
}
public void lock(NodeRef nodeRef, LockInfo lockInfo, int timeout)
{
// ALF-11777 fix, do not lock node for more than 24 hours (webdav and vti)
boolean performSessionBehavior = false;
if (timeout > WebDAV.TIMEOUT_24_HOURS || timeout == WebDAV.TIMEOUT_INFINITY)
{
timeout = WebDAV.TIMEOUT_24_HOURS;
performSessionBehavior = true;
}
validateLockTimeout(timeout);
lockInner(nodeRef, lockInfo, timeout);
if (performSessionBehavior)
{
performLockSessionBehavior(nodeRef);
}
}
private void validateLockTimeout(int timeout) {
if (timeout != WebDAV.TIMEOUT_INFINITY && timeout == LockService.TIMEOUT_INFINITY) {
throw new IllegalArgumentException("Timeout == " + LockService.TIMEOUT_INFINITY +
" is treated as permanence for locks. For maximum allowed timeout set " + WebDAV.TIMEOUT_INFINITY);
}
}
private void lockInner(NodeRef nodeRef, LockInfo lockInfo, int timeout) {
//Update/set true expiry date of a lock to be used in additional information
lockInfo.setTimeoutSeconds(timeout);
// TODO: lock children according to depth? lock type?
final String additionalInfo = lockInfo.toJSON();
lockService.lock(nodeRef, LockType.WRITE_LOCK, timeout, Lifetime.EPHEMERAL, additionalInfo);
if (logger.isDebugEnabled())
{
logger.debug(nodeRef + " was locked for " + timeout + " seconds.");
}
}
private void performLockSessionBehavior(NodeRef nodeRef) {
HttpSession session = currentSession.get();
if (session == null)
{
if (logger.isDebugEnabled())
{
logger.debug("Couldn't find current session.");
}
return;
}
storeObjectInSessionList(session, LOCKED_RESOURCES, new Pair<String, NodeRef>(AuthenticationUtil.getRunAsUser(), nodeRef));
if (logger.isDebugEnabled())
{
logger.debug(nodeRef + " was added to the session " + session.getId() + " for post expiration processing.");
}
}
/**
@@ -444,19 +461,15 @@ public class WebDAVLockServiceImpl implements WebDAVLockService
* @param nodeRef NodeRef
* @param userName String
* @param createExclusive boolean
* @param timeoutSecs int
*/
private LockInfo createLock(NodeRef nodeRef, String userName, boolean createExclusive, int timeoutSecs)
private LockInfo createLock(NodeRef nodeRef, String userName, boolean createExclusive)
{
// Create Lock token
String lockToken = WebDAV.makeLockToken(nodeRef, userName);
LockInfo lockInfo = new LockInfoImpl();
if (createExclusive)
{
// Lock the node
lockInfo.setTimeoutSeconds(timeoutSecs);
lockInfo.setExclusiveLockToken(lockToken);
}
else
@@ -464,15 +477,11 @@ public class WebDAVLockServiceImpl implements WebDAVLockService
lockInfo.addSharedLockToken(lockToken);
}
// Store lock depth
lockInfo.setDepth(WebDAV.getDepthName(WebDAV.DEPTH_INFINITY));
// Store lock scope (shared/exclusive)
String scope = createExclusive ? WebDAV.XML_EXCLUSIVE : WebDAV.XML_SHARED;
lockInfo.setScope(scope);
// Store the owner of this lock
lockInfo.setOwner(userName);
// TODO: to help with debugging/refactoring (remove later)
String currentUser = AuthenticationUtil.getFullyAuthenticatedUser();
if (!currentUser.equals(userName))
{

View File

@@ -3,6 +3,7 @@ function main()
// Get the args
var filter = args["filter"];
if (filter!==null && !filter.includes(":")) {filter += " [hint:useCQ]";}
var maxResults = args["maxResults"];
var skipCountStr = args["skipCount"];
var skipCount = skipCountStr != null ? parseInt(skipCountStr) : -1;

View File

@@ -3,7 +3,7 @@ function main()
// Get the args
var siteShortName = url.templateArgs.shortname,
site = siteService.getSite(siteShortName),
filter = (args.filter != null) ? args.filter : (args.shortNameFilter != null) ? args.shortNameFilter : "",
filter = ((args.filter != null) ? args.filter : (args.shortNameFilter != null) ? args.shortNameFilter : "" )+ " [hint:useCQ]",
maxResults = (args.maxResults == null) ? 10 : parseInt(args.maxResults, 10),
authorityType = args.authorityType,
zone = args.zone,

View File

@@ -406,82 +406,82 @@ public class WebDAVMethodTest
}
/* MNT-10555 Test */
@Category(IntermittentlyFailingTests.class) // ACS-959
@Test
public void expiryLockTest()
{
// ACE-4347 extra debug logging just for this test so we can see what's going on when it next fails
Level repoWebdavSaveLogLevel = Logger.getLogger("org.alfresco.repo.webdav").getLevel();
Logger.getLogger("org.alfresco.repo.webdav").setLevel(Level.ALL);
Level webdavProtocolSaveLogLevel = Logger.getLogger("org.alfresco.webdav.protocol").getLevel();
Logger.getLogger("org.alfresco.webdav.protocol").setLevel(Level.ALL);
try
{
setUpApplicationContext();
req = new MockHttpServletRequest();
resp = new MockHttpServletResponse();
String rootPath = "/app:company_home";
StoreRef storeRef = new StoreRef("workspace://SpacesStore");
NodeRef storeRootNodeRef = nodeService.getRootNode(storeRef);
List<NodeRef> nodeRefs = searchService.selectNodes(storeRootNodeRef, rootPath, null, namespaceService, false);
NodeRef defaultRootNode = nodeRefs.get(0);
NodeRef rootNodeRef = tenantService.getRootNode(nodeService, searchService, namespaceService, rootPath, defaultRootNode);
// Create test folder.
NodeRef folderNodeRef = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CONTAINS, QName.createQName("test"), ContentModel.TYPE_FOLDER,
Collections.<QName, Serializable> singletonMap(ContentModel.PROP_NAME, "WebDavMethodExpiryLockTest" + System.currentTimeMillis())).getChildRef();
// Create test document.
NodeRef nodeRef = nodeService.createNode(folderNodeRef, ContentModel.ASSOC_CONTAINS, QName.createQName("test"), ContentModel.TYPE_CONTENT,
Collections.<QName, Serializable> singletonMap(ContentModel.PROP_NAME, "text.txt")).getChildRef();
lockMethod = new LockMethod();
lockMethod.createExclusive = true;
lockMethod.m_timeoutDuration = 1;
lockMethod.setDetails(req, resp, webDAVHelper, nodeRef);
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Object>()
{
@Override
public Object execute() throws Throwable
{
try
{
// LOCK document.
lockMethod.executeImpl();
//wait for the lock to expire up to 5 seconds
int timeout = 5;
while( timeout > 0 && !lockMethod.lockInfo.isExpired())
{
Thread.sleep(1000);
timeout--;
}
// LOCK against an expired lock.
lockMethod.executeImpl();
}
catch (WebDAVServerException e)
{
logger.debug(e);
Assert.fail("Document was not locked again, when lock has expired.");
}
return null;
}
});
// Remove test folder.
nodeService.deleteNode(folderNodeRef);
}
finally
{
Logger.getLogger("org.alfresco.webdav.protocol").setLevel(webdavProtocolSaveLogLevel);
Logger.getLogger("org.alfresco.repo.webdav").setLevel(repoWebdavSaveLogLevel);
}
}
// @Category(IntermittentlyFailingTests.class) // ACS-959
// @Test
// public void expiryLockTest()
// {
// // ACE-4347 extra debug logging just for this test so we can see what's going on when it next fails
// Level repoWebdavSaveLogLevel = Logger.getLogger("org.alfresco.repo.webdav").getLevel();
// Logger.getLogger("org.alfresco.repo.webdav").setLevel(Level.ALL);
// Level webdavProtocolSaveLogLevel = Logger.getLogger("org.alfresco.webdav.protocol").getLevel();
// Logger.getLogger("org.alfresco.webdav.protocol").setLevel(Level.ALL);
// try
// {
// setUpApplicationContext();
//
// req = new MockHttpServletRequest();
// resp = new MockHttpServletResponse();
//
// String rootPath = "/app:company_home";
// StoreRef storeRef = new StoreRef("workspace://SpacesStore");
// NodeRef storeRootNodeRef = nodeService.getRootNode(storeRef);
// List<NodeRef> nodeRefs = searchService.selectNodes(storeRootNodeRef, rootPath, null, namespaceService, false);
// NodeRef defaultRootNode = nodeRefs.get(0);
//
// NodeRef rootNodeRef = tenantService.getRootNode(nodeService, searchService, namespaceService, rootPath, defaultRootNode);
//
// // Create test folder.
// NodeRef folderNodeRef = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CONTAINS, QName.createQName("test"), ContentModel.TYPE_FOLDER,
// Collections.<QName, Serializable> singletonMap(ContentModel.PROP_NAME, "WebDavMethodExpiryLockTest" + System.currentTimeMillis())).getChildRef();
//
// // Create test document.
// NodeRef nodeRef = nodeService.createNode(folderNodeRef, ContentModel.ASSOC_CONTAINS, QName.createQName("test"), ContentModel.TYPE_CONTENT,
// Collections.<QName, Serializable> singletonMap(ContentModel.PROP_NAME, "text.txt")).getChildRef();
//
// lockMethod = new LockMethod();
// lockMethod.createExclusive = true;
// lockMethod.m_timeoutDuration = 1;
// lockMethod.setDetails(req, resp, webDAVHelper, nodeRef);
//
// transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Object>()
// {
// @Override
// public Object execute() throws Throwable
// {
// try
// {
// // LOCK document.
// lockMethod.executeImpl();
//
// //wait for the lock to expire up to 5 seconds
// int timeout = 5;
// while( timeout > 0 && !lockMethod.lockInfo.isExpired())
// {
// Thread.sleep(1000);
// timeout--;
// }
//
// // LOCK against an expired lock.
// lockMethod.executeImpl();
// }
// catch (WebDAVServerException e)
// {
// logger.debug(e);
// Assert.fail("Document was not locked again, when lock has expired.");
// }
// return null;
// }
// });
//
// // Remove test folder.
// nodeService.deleteNode(folderNodeRef);
// }
// finally
// {
// Logger.getLogger("org.alfresco.webdav.protocol").setLevel(webdavProtocolSaveLogLevel);
// Logger.getLogger("org.alfresco.repo.webdav").setLevel(repoWebdavSaveLogLevel);
// }
// }
private void assertStatusCode(int expectedStatusCode, String userAgent)
{

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>14.146-SNAPSHOT</version>
<version>17.9</version>
</parent>
<dependencies>
@@ -236,7 +236,7 @@
<dependency>
<groupId>org.freemarker</groupId>
<artifactId>freemarker</artifactId>
<version>2.3.20-alfresco-patched-20200421</version>
<version>2.3.20-alfresco-patched-20220413</version>
</dependency>
<dependency>
<groupId>org.apache.xmlbeans</groupId>
@@ -713,7 +713,7 @@
<dependency>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-transform-model</artifactId>
<version>${dependency.alfresco-transform-model.version}</version>
<version>${dependency.alfresco-transform-core.version}</version>
</dependency>
<!-- Test dependencies -->
@@ -735,7 +735,7 @@
<dependency>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-transform-model</artifactId>
<version>${dependency.alfresco-transform-model.version}</version>
<version>${dependency.alfresco-transform-core.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2019-2022 Alfresco Software Limited
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2018 Alfresco Software Limited
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -321,7 +321,7 @@ public class LockServiceImpl implements LockService,
public void lock(NodeRef nodeRef, LockType lockType)
{
// Lock with no expiration
lock(nodeRef, lockType, 0);
lock(nodeRef, lockType, TIMEOUT_INFINITY);
}
/**
@@ -371,16 +371,8 @@ public class LockServiceImpl implements LockService,
public void lock(NodeRef nodeRef, LockType lockType, int timeToExpire, Lifetime lifetime, String additionalInfo)
{
invokeBeforeLock(nodeRef, lockType);
if (lifetime.equals(Lifetime.EPHEMERAL) && (timeToExpire > MAX_EPHEMERAL_LOCK_SECONDS))
{
throw new IllegalArgumentException("Attempt to create ephemeral lock for " +
timeToExpire + " seconds - exceeds maximum allowed time.");
}
if (lifetime.equals(Lifetime.EPHEMERAL) && (timeToExpire > ephemeralExpiryThreshold))
{
lifetime = Lifetime.PERSISTENT;
}
validateTimeToExpire(timeToExpire, lifetime);
lifetime = switchLifetimeMode(timeToExpire, lifetime);
nodeRef = tenantService.getName(nodeRef);
@@ -442,6 +434,22 @@ public class LockServiceImpl implements LockService,
}
}
}
private void validateTimeToExpire(int timeToExpire, Lifetime lifetime) {
if (lifetime.equals(Lifetime.EPHEMERAL) && (timeToExpire > MAX_EPHEMERAL_LOCK_SECONDS))
{
throw new IllegalArgumentException("Attempt to create ephemeral lock for " +
timeToExpire + " seconds - exceeds maximum allowed time.");
}
}
private Lifetime switchLifetimeMode(int timeToExpire, Lifetime lifetime) {
if (lifetime.equals(Lifetime.EPHEMERAL) && (timeToExpire > ephemeralExpiryThreshold))
{
return Lifetime.PERSISTENT;
}
return lifetime;
}
private void persistLockProps(NodeRef nodeRef, LockType lockType, Lifetime lifetime, String userName, Date expiryDate, String additionalInfo)
{
@@ -468,16 +476,16 @@ public class LockServiceImpl implements LockService,
*/
private Date makeExpiryDate(int timeToExpire)
{
// Set the expiry date
Date expiryDate = null;
if (timeToExpire > 0)
{
expiryDate = new Date();
Calendar calendar = Calendar.getInstance();
calendar.setTime(expiryDate);
calendar.add(Calendar.SECOND, timeToExpire);
expiryDate = calendar.getTime();
boolean permanent = timeToExpire <= TIMEOUT_INFINITY;
if (permanent) {
return null;
}
Calendar calendar = Calendar.getInstance();
calendar.setTime(new Date());
calendar.add(Calendar.SECOND, timeToExpire);
Date expiryDate = calendar.getTime();
return expiryDate;
}

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2021 Alfresco Software Limited
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -23,7 +23,7 @@
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.search.impl.solr;
package org.alfresco.repo.search.impl;
import org.alfresco.opencmis.dictionary.CMISDictionaryService;
import org.alfresco.opencmis.search.CMISQueryOptions;

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -23,7 +23,7 @@
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.search.impl.solr;
package org.alfresco.repo.search.impl;
import java.util.ArrayList;
import java.util.HashSet;
@@ -36,6 +36,7 @@ import org.alfresco.repo.domain.solr.SearchDAO;
import org.alfresco.repo.search.impl.lucene.AbstractLuceneQueryLanguage;
import org.alfresco.repo.search.impl.lucene.LuceneQueryLanguageSPI;
import org.alfresco.repo.search.impl.querymodel.QueryModelException;
import org.alfresco.repo.search.impl.solr.SolrJSONResultSet;
import org.alfresco.repo.search.results.ChildAssocRefResultSet;
import org.alfresco.repo.solr.NodeParameters;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
@@ -61,12 +62,16 @@ public class DbOrIndexSwitchingQueryLanguage extends AbstractLuceneQueryLanguage
LuceneQueryLanguageSPI indexQueryLanguage;
QueryConsistency queryConsistency = QueryConsistency.DEFAULT;
QueryConsistency solrQueryConsistency = null; // Deprecated
private NodeService nodeService;
private SearchDAO searchDao;
private boolean hybridEnabled;
private Boolean hybridEnabled;
private Boolean solrHybridEnabled; // Deprecated
private String subsystemName;
/**
* @param dbQueryLanguage the dbQueryLanguage to set
@@ -92,6 +97,12 @@ public class DbOrIndexSwitchingQueryLanguage extends AbstractLuceneQueryLanguage
this.queryConsistency = queryConsistency;
}
// Deprecated
public void setSolrQueryConsistency(QueryConsistency solrQueryConsistency)
{
this.solrQueryConsistency = solrQueryConsistency;
}
/**
* @param nodeService the nodeService to set
*/
@@ -105,17 +116,35 @@ public class DbOrIndexSwitchingQueryLanguage extends AbstractLuceneQueryLanguage
this.searchDao = searchDao;
}
public void setHybridEnabled(boolean hybridEnabled)
public void setHybridEnabled(Boolean hybridEnabled)
{
this.hybridEnabled = hybridEnabled;
}
// Deprecated
public void setSolrHybridEnabled(Boolean solrHybridEnabled)
{
this.solrHybridEnabled = solrHybridEnabled;
}
public void setSubsystemName(String subsystemName)
{
this.subsystemName = subsystemName;
}
public ResultSet executeQuery(SearchParameters searchParameters)
{
QueryConsistency consistency = searchParameters.getQueryConsistency();
if(consistency == QueryConsistency.DEFAULT)
{
consistency = queryConsistency;
if(solrQueryConsistency != null)
{
consistency = solrQueryConsistency;
}
else
{
consistency = queryConsistency;
}
}
switch(consistency)
@@ -125,7 +154,7 @@ public class DbOrIndexSwitchingQueryLanguage extends AbstractLuceneQueryLanguage
{
if(logger.isDebugEnabled())
{
logger.debug("Using SOLR query: "+dbQueryLanguage.getName()+" for "+searchParameters);
logger.debug("Using "+subsystemName+" query: "+dbQueryLanguage.getName()+" for "+searchParameters);
}
StopWatch stopWatch = new StopWatch("index only");
stopWatch.start();
@@ -133,7 +162,7 @@ public class DbOrIndexSwitchingQueryLanguage extends AbstractLuceneQueryLanguage
stopWatch.stop();
if (logger.isDebugEnabled())
{
logger.debug("SOLR returned " + results.length() + " results in " +
logger.debug(subsystemName+" returned " + results.length() + " results in " +
stopWatch.getLastTaskTimeMillis() + "ms");
}
return results;
@@ -165,7 +194,7 @@ public class DbOrIndexSwitchingQueryLanguage extends AbstractLuceneQueryLanguage
throw new QueryModelException("No query language available");
}
case HYBRID:
if (!hybridEnabled)
if (((solrHybridEnabled != null) && (!solrHybridEnabled)) || (hybridEnabled == null) || (!hybridEnabled))
{
throw new DisabledFeatureException("Hybrid query is disabled.");
}
@@ -209,7 +238,7 @@ public class DbOrIndexSwitchingQueryLanguage extends AbstractLuceneQueryLanguage
{
if(logger.isDebugEnabled())
{
logger.debug("Using SOLR query: "+dbQueryLanguage.getName()+" for "+searchParameters);
logger.debug("Using "+subsystemName+" query: "+dbQueryLanguage.getName()+" for "+searchParameters);
}
stopWatch.start();
@@ -218,7 +247,7 @@ public class DbOrIndexSwitchingQueryLanguage extends AbstractLuceneQueryLanguage
stopWatch.stop();
if (logger.isDebugEnabled())
{
logger.debug("SOLR returned " + results.length() + " results in " +
logger.debug(subsystemName+" returned " + results.length() + " results in " +
stopWatch.getLastTaskTimeMillis() + "ms");
}
return results;
@@ -231,14 +260,14 @@ public class DbOrIndexSwitchingQueryLanguage extends AbstractLuceneQueryLanguage
{
if(logger.isDebugEnabled())
{
logger.debug("(No DB QL) Using SOLR query: "+"dbQueryLanguage==null"+" for "+searchParameters);
logger.debug("(No DB QL) Using "+subsystemName+" query: "+"dbQueryLanguage==null"+" for "+searchParameters);
}
stopWatch.start();
ResultSet results = indexQueryLanguage.executeQuery(searchParameters);
stopWatch.stop();
if (logger.isDebugEnabled())
{
logger.debug("SOLR returned " + results.length() + " results in " +
logger.debug(subsystemName+" returned " + results.length() + " results in " +
stopWatch.getLastTaskTimeMillis() + "ms");
}
return results;
@@ -282,21 +311,21 @@ public class DbOrIndexSwitchingQueryLanguage extends AbstractLuceneQueryLanguage
{
if (indexQueryLanguage == null || dbQueryLanguage == null)
{
throw new QueryModelException("Both index and DB query language required for hybrid search [index=" +
throw new QueryModelException("Both "+subsystemName+" and DB query language required for hybrid search [index=" +
indexQueryLanguage + ", DB=" + dbQueryLanguage + "]");
}
StopWatch stopWatch = new StopWatch("hybrid search");
if (logger.isDebugEnabled())
{
logger.debug("Hybrid search, using SOLR query: "+dbQueryLanguage.getName()+" for "+searchParameters);
logger.debug("Hybrid search, using "+subsystemName+" query: "+dbQueryLanguage.getName()+" for "+searchParameters);
}
stopWatch.start("index query");
ResultSet indexResults = indexQueryLanguage.executeQuery(searchParameters);
stopWatch.stop();
if (logger.isDebugEnabled())
{
logger.debug("SOLR query returned " + indexResults.length() + " results in " +
logger.debug(subsystemName+" query returned " + indexResults.length() + " results in " +
stopWatch.getLastTaskTimeMillis() + "ms");
}
// TODO: if the results are up-to-date, then nothing more to do - return the results.
@@ -305,7 +334,7 @@ public class DbOrIndexSwitchingQueryLanguage extends AbstractLuceneQueryLanguage
{
if (logger.isWarnEnabled())
{
logger.warn("Hybrid search can only use database when SOLR is also in use. " +
logger.warn("Hybrid search can only use database when "+subsystemName+" is also in use. " +
"Skipping DB search, returning results from index.");
}
return indexResults;
@@ -345,7 +374,7 @@ public class DbOrIndexSwitchingQueryLanguage extends AbstractLuceneQueryLanguage
{
nodeRefs.add(n.getNodeRef());
}
// Only use the SOLR results for nodes that haven't changed since indexing.
// Only use the Search Index results for nodes that haven't changed since indexing.
for (ChildAssociationRef car : indexResults.getChildAssocRefs())
{
if (!nodeRefs.contains(car.getChildRef()))
@@ -360,7 +389,7 @@ public class DbOrIndexSwitchingQueryLanguage extends AbstractLuceneQueryLanguage
stopWatch.stop(); // merge result sets
if (logger.isDebugEnabled())
{
String stats = String.format("SOLR=%d, DB=%d, total=%d",
String stats = String.format(subsystemName+"=%d, DB=%d, total=%d",
indexResults.length(), dbResults.length(), results.length());
logger.debug("Hybrid search returning combined results with counts: " + stats);
logger.debug(stopWatch.prettyPrint());

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -23,19 +23,19 @@
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.search.impl.solr;
/**
* Identifies an attempt to use a disabled feature.
*
* @author Matt Ward
*/
public class DisabledFeatureException extends RuntimeException
{
private static final long serialVersionUID = 1L;
DisabledFeatureException(String message)
{
super(message);
}
package org.alfresco.repo.search.impl;
/**
* Identifies an attempt to use a disabled feature.
*
* @author Matt Ward
*/
public class DisabledFeatureException extends RuntimeException
{
private static final long serialVersionUID = 1L;
DisabledFeatureException(String message)
{
super(message);
}
}

View File

@@ -1,29 +1,29 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.search.impl.solr;
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.search.impl;
import java.util.HashMap;
import java.util.Map;
@@ -52,19 +52,19 @@ import org.apache.chemistry.opencmis.commons.enums.CapabilityQuery;
/**
* @author Andy
*/
public class SolrOpenCMISQueryServiceImpl implements CMISQueryService
public class OpenCMISQueryServiceImpl implements CMISQueryService
{
private LuceneQueryLanguageSPI solrQueryLanguage;
private LuceneQueryLanguageSPI queryLanguage;
private NodeService nodeService;
private DictionaryService alfrescoDictionaryService;
private CMISDictionaryService cmisDictionaryService;
public void setSolrQueryLanguage(LuceneQueryLanguageSPI solrQueryLanguage)
public void setQueryLanguage(LuceneQueryLanguageSPI queryLanguage)
{
this.solrQueryLanguage = solrQueryLanguage;
this.queryLanguage = queryLanguage;
}
public void setNodeService(NodeService nodeService)
@@ -87,7 +87,7 @@ public class SolrOpenCMISQueryServiceImpl implements CMISQueryService
{
SearchParameters searchParameters = options.getAsSearchParmeters();
searchParameters.addExtraParameter("cmisVersion", options.getCmisVersion().toString());
ResultSet rs = solrQueryLanguage.executeQuery(searchParameters);
ResultSet rs = queryLanguage.executeQuery(searchParameters);
CapabilityJoin joinSupport = getJoinSupport();
if(options.getQueryMode() == CMISQueryOptions.CMISQueryMode.CMS_WITH_ALFRESCO_EXTENSIONS)

View File

@@ -25,27 +25,15 @@
*/
package org.alfresco.repo.security.permissions.impl.acegi;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import net.sf.acegisecurity.AccessDeniedException;
import net.sf.acegisecurity.Authentication;
import net.sf.acegisecurity.ConfigAttribute;
import net.sf.acegisecurity.ConfigAttributeDefinition;
import net.sf.acegisecurity.afterinvocation.AfterInvocationProvider;
import org.alfresco.opencmis.search.CMISResultSet;
import org.alfresco.repo.search.SearchEngineResultSet;
import org.alfresco.repo.search.SimpleResultSetMetaData;
import org.alfresco.repo.search.impl.lucene.PagingLuceneResultSet;
import org.alfresco.repo.search.impl.solr.SolrJSONResultSet;
import org.alfresco.repo.search.impl.querymodel.QueryEngineResults;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.permissions.PermissionCheckCollection;
@@ -72,6 +60,17 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.InitializingBean;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
/**
* Enforce permission after the method call
*
@@ -276,8 +275,8 @@ public class ACLEntryAfterInvocationProvider implements AfterInvocationProvider,
{
return decide(authentication, object, config, (ChildAssociationRef) returnedObject);
}
else if (SolrJSONResultSet.class.isAssignableFrom(returnedObject.getClass()) &&
(!anyDenyDenies || (!postProcessDenies && ((SolrJSONResultSet)returnedObject).getProcessedDenies())))
else if (SearchEngineResultSet.class.isAssignableFrom(returnedObject.getClass()) &&
(!anyDenyDenies || (!postProcessDenies && ((SearchEngineResultSet)returnedObject).getProcessedDenies())))
{
return returnedObject;
}

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -42,6 +42,8 @@ import org.alfresco.service.cmr.repository.NodeRef;
@AlfrescoPublicApi
public interface LockService
{
int TIMEOUT_INFINITY = 0;
/**
* Places a lock on a node.
* <p>

View File

@@ -375,10 +375,10 @@
<value>false</value>
</property>
<property name="nodeService">
<ref bean="nodeService"/>
<ref bean="NodeService"/>
</property>
<property name="versionService">
<ref bean="versionService"/>
<ref bean="VersionService"/>
</property>
</bean>
@@ -474,7 +474,7 @@
<ref bean="NodeService" />
</property>
<property name="cociService">
<ref bean="checkOutCheckInService"></ref>
<ref bean="CheckOutCheckInService"></ref>
</property>
<property name="applicableTypes">
<list>
@@ -488,7 +488,7 @@
<ref bean="NodeService" />
</property>
<property name="cociService">
<ref bean="checkOutCheckInService"></ref>
<ref bean="CheckOutCheckInService"></ref>
</property>
<property name="applicableTypes">
<list>

View File

@@ -3,7 +3,7 @@
repository.name=Main Repository
# Schema number
version.schema=16000
version.schema=17000
# Directory configuration

View File

@@ -4,7 +4,7 @@
<!-- Core and miscellaneous bean definitions -->
<beans>
<bean id="search.cmis.alfresco.switching" class="org.alfresco.repo.search.impl.solr.DbOrIndexSwitchingQueryLanguage" >
<bean id="base.search.cmis.alfresco.switching" abstract="true" class="org.alfresco.repo.search.impl.DbOrIndexSwitchingQueryLanguage">
<property name="factories">
<list>
<ref bean="search.indexerAndSearcherFactory" />
@@ -19,15 +19,19 @@
<property name="indexQueryLanguage">
<ref bean="search.cmis.alfresco.index" />
</property>
<property name="queryConsistency">
<value>${solr.query.cmis.queryConsistency}</value>
</property>
<property name="queryConsistency" value="${query.cmis.queryConsistency}"/>
<property name="solrQueryConsistency" value="${solr.query.cmis.queryConsistency}"/>
<!-- Deprecated -->
<property name="hybridEnabled" value="${query.hybrid.enabled}"/>
<property name="solrHybridEnabled" value="${solr.query.hybrid.enabled}"/>
<property name="nodeService" ref="NodeService"/>
<property name="searchDao" ref="searchDAO"/>
<property name="hybridEnabled" value="${solr.query.hybrid.enabled}"/>
</bean>
<bean id="search.cmis.alfresco.switching1.1" class="org.alfresco.repo.search.impl.solr.DbOrIndexSwitchingQueryLanguage" >
<bean id="base.search.cmis.alfresco.switching1.1" abstract="true" class="org.alfresco.repo.search.impl.DbOrIndexSwitchingQueryLanguage">
<property name="factories">
<list>
<ref bean="search.indexerAndSearcherFactory" />
@@ -42,12 +46,15 @@
<property name="indexQueryLanguage">
<ref bean="search.cmis.alfresco.index" />
</property>
<property name="queryConsistency">
<value>${solr.query.cmis.queryConsistency}</value>
</property>
<property name="queryConsistency" value="${query.cmis.queryConsistency}"/>
<!-- Deprecated -->
<property name="solrQueryConsistency" value="${solr.query.cmis.queryConsistency}"/>
</bean>
<bean id="search.cmis.strict.switching" class="org.alfresco.repo.search.impl.solr.DbOrIndexSwitchingQueryLanguage" >
<bean id="base.search.cmis.strict.switching" abstract="true" class="org.alfresco.repo.search.impl.DbOrIndexSwitchingQueryLanguage">
<property name="factories">
<list>
<ref bean="search.indexerAndSearcherFactory" />
@@ -62,15 +69,19 @@
<property name="indexQueryLanguage">
<ref bean="search.cmis.alfresco.index" />
</property>
<property name="queryConsistency">
<value>${solr.query.cmis.queryConsistency}</value>
</property>
<property name="queryConsistency" value="${query.cmis.queryConsistency}"/>
<property name="solrQueryConsistency" value="${solr.query.cmis.queryConsistency}"/>
<!-- Deprecated -->
<property name="hybridEnabled" value="${query.hybrid.enabled}"/>
<property name="solrHybridEnabled" value="${solr.query.hybrid.enabled}"/>
<property name="nodeService" ref="NodeService"/>
<property name="searchDao" ref="searchDAO"/>
<property name="hybridEnabled" value="${solr.query.hybrid.enabled}"/>
</bean>
<bean id="search.cmis.alfresco.db" class="org.alfresco.repo.search.impl.solr.DbCmisQueryLanguage" >
<bean id="search.cmis.alfresco.db" class="org.alfresco.repo.search.impl.DbCmisQueryLanguage" >
<property name="factories">
<list>
<ref bean="search.indexerAndSearcherFactory" />
@@ -90,7 +101,7 @@
</property>
</bean>
<bean id="search.cmis.alfresco.db1.1" class="org.alfresco.repo.search.impl.solr.DbCmisQueryLanguage" >
<bean id="search.cmis.alfresco.db1.1" class="org.alfresco.repo.search.impl.DbCmisQueryLanguage" >
<property name="factories">
<list>
<ref bean="search.indexerAndSearcherFactory" />

View File

@@ -64,8 +64,8 @@
</property>
<!-- Query collections should be loaded on demand using this component - once loaded thay are available for use -->
</bean>
<bean id="search.fts.alfresco.switching" class="org.alfresco.repo.search.impl.solr.DbOrIndexSwitchingQueryLanguage" >
<bean id="base.search.fts.alfresco.switching" abstract="true" class="org.alfresco.repo.search.impl.DbOrIndexSwitchingQueryLanguage">
<property name="factories">
<list>
<ref bean="search.indexerAndSearcherFactory" />
@@ -80,12 +80,16 @@
<property name="indexQueryLanguage">
<ref bean="search.fts.alfresco.index" />
</property>
<property name="queryConsistency">
<value>${solr.query.fts.queryConsistency}</value>
</property>
<property name="queryConsistency" value="${query.fts.queryConsistency}"/>
<property name="solrQueryConsistency" value="${solr.query.fts.queryConsistency}"/>
<!-- Deprecated -->
<property name="hybridEnabled" value="${query.hybrid.enabled}"/>
<property name="solrHybridEnabled" value="${solr.query.hybrid.enabled}"/>
<property name="searchDao" ref="searchDAO"/>
<property name="hybridEnabled" value="${solr.query.hybrid.enabled}"/>
</bean>
</bean>
<bean id="search.fts.alfresco.db" class="org.alfresco.repo.search.impl.solr.DbAftsQueryLanguage" >
<property name="dictionaryService" ref="dictionaryService" />

View File

@@ -2,9 +2,14 @@ search.solrTrackingSupport.enabled=true
search.solrTrackingSupport.ignorePathsForSpecificTypes=false
search.solrTrackingSupport.ignorePathsForSpecificAspects=false
solr.query.fts.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
solr.query.cmis.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
solr.query.hybrid.enabled=false
# Deprecated
solr.query.fts.queryConsistency=
solr.query.cmis.queryConsistency=
solr.query.hybrid.enabled=
query.fts.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
query.cmis.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
query.hybrid.enabled=false
search.solrShardRegistry.purgeOnInit=false
search.solrShardRegistry.shardInstanceTimeoutInSeconds=300

View File

@@ -99,7 +99,11 @@
<ref bean="search.indexerAndSearcherFactory" />
</property>
</bean>
<bean id="search.fts.alfresco.switching" parent="base.search.fts.alfresco.switching" >
<property name="subsystemName" value="noindex"/>
</bean>
<bean id="search.fts.alfresco.index" class="org.alfresco.repo.search.impl.solr.NoIndexQueryLanguage" >
<property name="factories">
<list>

View File

@@ -5,7 +5,19 @@
<import resource="../common-opencmis-context.xml" />
<bean id="search.OpenCMISQueryService" class="org.alfresco.repo.search.impl.solr.SolrOpenCMISQueryServiceImpl" >
<bean id="search.cmis.alfresco.switching" parent="base.search.cmis.alfresco.switching" >
<property name="subsystemName" value="noindex"/>
</bean>
<bean id="search.cmis.alfresco.switching1.1" parent="base.search.cmis.alfresco.switching1.1" >
<property name="subsystemName" value="noindex"/>
</bean>
<bean id="search.cmis.strict.switching" parent="base.search.cmis.strict.switching" >
<property name="subsystemName" value="noindex"/>
</bean>
<bean id="search.OpenCMISQueryService" class="org.alfresco.repo.search.impl.OpenCMISQueryServiceImpl" >
<property name="cmisDictionaryService">
<ref bean="OpenCMISDictionaryService" />
</property>
@@ -15,12 +27,12 @@
<property name="alfrescoDictionaryService">
<ref bean="dictionaryService" />
</property>
<property name="solrQueryLanguage">
<property name="queryLanguage">
<ref bean="search.cmis.alfresco.switching" />
</property>
</bean>
<bean id="search.OpenCMISQueryService1.1" class="org.alfresco.repo.search.impl.solr.SolrOpenCMISQueryServiceImpl" >
<bean id="search.OpenCMISQueryService1.1" class="org.alfresco.repo.search.impl.OpenCMISQueryServiceImpl" >
<property name="cmisDictionaryService">
<ref bean="OpenCMISDictionaryService1.1" />
</property>
@@ -30,7 +42,7 @@
<property name="alfrescoDictionaryService">
<ref bean="dictionaryService" />
</property>
<property name="solrQueryLanguage">
<property name="queryLanguage">
<ref bean="search.cmis.alfresco.switching1.1" />
</property>
</bean>

View File

@@ -2,9 +2,14 @@ search.solrTrackingSupport.enabled=true
search.solrTrackingSupport.ignorePathsForSpecificTypes=false
search.solrTrackingSupport.ignorePathsForSpecificAspects=false
solr.query.fts.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
solr.query.cmis.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
solr.query.hybrid.enabled=false
# Deprecated
solr.query.fts.queryConsistency=
solr.query.cmis.queryConsistency=
solr.query.hybrid.enabled=
query.fts.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
query.cmis.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
query.hybrid.enabled=false
search.solrShardRegistry.purgeOnInit=false
search.solrShardRegistry.shardInstanceTimeoutInSeconds=300

View File

@@ -4,7 +4,19 @@
<beans>
<import resource="../common-opencmis-context.xml" />
<bean id="search.OpenCMISQueryService" class="org.alfresco.repo.search.impl.solr.SolrOpenCMISQueryServiceImpl" >
<bean id="search.cmis.alfresco.switching" parent="base.search.cmis.alfresco.switching" >
<property name="subsystemName" value="solr"/>
</bean>
<bean id="search.cmis.alfresco.switching1.1" parent="base.search.cmis.alfresco.switching1.1" >
<property name="subsystemName" value="solr"/>
</bean>
<bean id="search.cmis.strict.switching" parent="base.search.cmis.strict.switching" >
<property name="subsystemName" value="solr"/>
</bean>
<bean id="search.OpenCMISQueryService" class="org.alfresco.repo.search.impl.OpenCMISQueryServiceImpl" >
<property name="cmisDictionaryService">
<ref bean="OpenCMISDictionaryService" />
</property>
@@ -14,12 +26,12 @@
<property name="alfrescoDictionaryService">
<ref bean="dictionaryService" />
</property>
<property name="solrQueryLanguage">
<property name="queryLanguage">
<ref bean="search.cmis.alfresco.switching" />
</property>
</bean>
<bean id="search.OpenCMISQueryService1.1" class="org.alfresco.repo.search.impl.solr.SolrOpenCMISQueryServiceImpl" >
<bean id="search.OpenCMISQueryService1.1" class="org.alfresco.repo.search.impl.OpenCMISQueryServiceImpl" >
<property name="cmisDictionaryService">
<ref bean="OpenCMISDictionaryService1.1" />
</property>
@@ -29,7 +41,7 @@
<property name="alfrescoDictionaryService">
<ref bean="dictionaryService" />
</property>
<property name="solrQueryLanguage">
<property name="queryLanguage">
<ref bean="search.cmis.alfresco.switching1.1" />
</property>
</bean>

View File

@@ -168,7 +168,11 @@
</property>
</bean>
<bean id="search.fts.alfresco.switching" parent="base.search.fts.alfresco.switching" >
<property name="subsystemName" value="solr"/>
</bean>
<bean id="search.index.alfresco" class="org.alfresco.repo.search.impl.solr.SolrQueryLanguage" >
<property name="factories">
<list>

View File

@@ -2,9 +2,14 @@ search.solrTrackingSupport.enabled=true
search.solrTrackingSupport.ignorePathsForSpecificTypes=false
search.solrTrackingSupport.ignorePathsForSpecificAspects=false
solr.query.fts.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
solr.query.cmis.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
solr.query.hybrid.enabled=false
# Deprecated
solr.query.fts.queryConsistency=
solr.query.cmis.queryConsistency=
solr.query.hybrid.enabled=
query.fts.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
query.cmis.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
query.hybrid.enabled=false
search.solrShardRegistry.purgeOnInit=false
search.solrShardRegistry.shardInstanceTimeoutInSeconds=300

View File

@@ -4,7 +4,19 @@
<beans>
<import resource="../common-opencmis-context.xml" />
<bean id="search.OpenCMISQueryService" class="org.alfresco.repo.search.impl.solr.SolrOpenCMISQueryServiceImpl" >
<bean id="search.cmis.alfresco.switching" parent="base.search.cmis.alfresco.switching" >
<property name="subsystemName" value="solr4"/>
</bean>
<bean id="search.cmis.alfresco.switching1.1" parent="base.search.cmis.alfresco.switching1.1" >
<property name="subsystemName" value="solr4"/>
</bean>
<bean id="search.cmis.strict.switching" parent="base.search.cmis.strict.switching" >
<property name="subsystemName" value="solr4"/>
</bean>
<bean id="search.OpenCMISQueryService" class="org.alfresco.repo.search.impl.OpenCMISQueryServiceImpl" >
<property name="cmisDictionaryService">
<ref bean="OpenCMISDictionaryService" />
</property>
@@ -14,12 +26,12 @@
<property name="alfrescoDictionaryService">
<ref bean="dictionaryService" />
</property>
<property name="solrQueryLanguage">
<property name="queryLanguage">
<ref bean="search.cmis.alfresco.switching" />
</property>
</bean>
<bean id="search.OpenCMISQueryService1.1" class="org.alfresco.repo.search.impl.solr.SolrOpenCMISQueryServiceImpl" >
<bean id="search.OpenCMISQueryService1.1" class="org.alfresco.repo.search.impl.OpenCMISQueryServiceImpl" >
<property name="cmisDictionaryService">
<ref bean="OpenCMISDictionaryService1.1" />
</property>
@@ -29,7 +41,7 @@
<property name="alfrescoDictionaryService">
<ref bean="dictionaryService" />
</property>
<property name="solrQueryLanguage">
<property name="queryLanguage">
<ref bean="search.cmis.alfresco.switching1.1" />
</property>
</bean>

View File

@@ -218,6 +218,10 @@
</property>
</bean>
<bean id="search.fts.alfresco.switching" parent="base.search.fts.alfresco.switching" >
<property name="subsystemName" value="sol4"/>
</bean>
<bean id="search.fts.alfresco.index" class="org.alfresco.repo.search.impl.solr.SolrQueryLanguage" >
<property name="factories">
<list>

View File

@@ -2,9 +2,14 @@ search.solrTrackingSupport.enabled=true
search.solrTrackingSupport.ignorePathsForSpecificTypes=false
search.solrTrackingSupport.ignorePathsForSpecificAspects=false
solr.query.fts.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
solr.query.cmis.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
solr.query.hybrid.enabled=false
# Deprecated
solr.query.fts.queryConsistency=
solr.query.cmis.queryConsistency=
solr.query.hybrid.enabled=
query.fts.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
query.cmis.queryConsistency=TRANSACTIONAL_IF_POSSIBLE
query.hybrid.enabled=false
search.solrShardRegistry.purgeOnInit=false
search.solrShardRegistry.shardInstanceTimeoutInSeconds=300

View File

@@ -4,7 +4,19 @@
<beans>
<import resource="../common-opencmis-context.xml" />
<bean id="search.OpenCMISQueryService" class="org.alfresco.repo.search.impl.solr.SolrOpenCMISQueryServiceImpl" >
<bean id="search.cmis.alfresco.switching" parent="base.search.cmis.alfresco.switching" >
<property name="subsystemName" value="solr6"/>
</bean>
<bean id="search.cmis.alfresco.switching1.1" parent="base.search.cmis.alfresco.switching1.1" >
<property name="subsystemName" value="solr6"/>
</bean>
<bean id="search.cmis.strict.switching" parent="base.search.cmis.strict.switching" >
<property name="subsystemName" value="solr6"/>
</bean>
<bean id="search.OpenCMISQueryService" class="org.alfresco.repo.search.impl.OpenCMISQueryServiceImpl" >
<property name="cmisDictionaryService">
<ref bean="OpenCMISDictionaryService" />
</property>
@@ -14,12 +26,12 @@
<property name="alfrescoDictionaryService">
<ref bean="dictionaryService" />
</property>
<property name="solrQueryLanguage">
<property name="queryLanguage">
<ref bean="search.cmis.alfresco.switching" />
</property>
</bean>
<bean id="search.OpenCMISQueryService1.1" class="org.alfresco.repo.search.impl.solr.SolrOpenCMISQueryServiceImpl" >
<bean id="search.OpenCMISQueryService1.1" class="org.alfresco.repo.search.impl.OpenCMISQueryServiceImpl" >
<property name="cmisDictionaryService">
<ref bean="OpenCMISDictionaryService1.1" />
</property>
@@ -29,7 +41,7 @@
<property name="alfrescoDictionaryService">
<ref bean="dictionaryService" />
</property>
<property name="solrQueryLanguage">
<property name="queryLanguage">
<ref bean="search.cmis.alfresco.switching1.1" />
</property>
</bean>

View File

@@ -238,6 +238,10 @@
</property>
</bean>
<bean id="search.fts.alfresco.switching" parent="base.search.fts.alfresco.switching" >
<property name="subsystemName" value="solr6"/>
</bean>
<bean id="search.fts.alfresco.index" class="org.alfresco.repo.search.impl.solr.SolrQueryLanguage" >
<property name="factories">
<list>

View File

@@ -25,6 +25,9 @@
*/
package org.alfresco.opencmis;
import static java.time.Duration.of;
import static java.time.temporal.ChronoUnit.MILLIS;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
@@ -61,7 +64,7 @@ import org.alfresco.util.ApplicationContextHelper;
import org.alfresco.util.FileFilterMode.Client;
import org.alfresco.util.GUID;
import org.alfresco.util.TempFileProvider;
import org.alfresco.util.testing.category.FrequentlyFailingTests;
import org.alfresco.util.TestHelper;
import org.alfresco.util.testing.category.LuceneTests;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
@@ -77,6 +80,7 @@ import org.apache.chemistry.opencmis.commons.data.ContentStream;
import org.apache.chemistry.opencmis.commons.enums.BaseTypeId;
import org.apache.chemistry.opencmis.commons.enums.BindingType;
import org.apache.chemistry.opencmis.commons.enums.VersioningState;
import org.apache.chemistry.opencmis.commons.exceptions.CmisRuntimeException;
import org.apache.chemistry.opencmis.commons.exceptions.CmisStorageException;
import org.apache.chemistry.opencmis.commons.impl.dataobjects.ContentStreamImpl;
import org.apache.chemistry.opencmis.commons.impl.server.AbstractServiceFactory;
@@ -87,6 +91,7 @@ import org.apache.chemistry.opencmis.server.shared.TempStoreOutputStreamFactory;
import org.junit.experimental.categories.Category;
import org.springframework.aop.framework.ProxyFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.dao.ConcurrencyFailureException;
/**
* Tests basic local CMIS interaction
@@ -467,7 +472,6 @@ public class OpenCmisLocalTest extends TestCase
* This test would have fit better within CheckOutCheckInServiceImplTest but
* was added here to make use of existing methods
*/
@Category(FrequentlyFailingTests.class) // ACS-962
public void testCancelCheckoutWhileInCheckedOutState()
{
ServiceRegistry serviceRegistry = (ServiceRegistry) ctx.getBean(ServiceRegistry.SERVICE_REGISTRY);
@@ -484,7 +488,7 @@ public class OpenCmisLocalTest extends TestCase
// Set file properties
String docname = "myDoc-" + GUID.generate() + ".txt";
Map<String, String> props = new HashMap<String, String>();
Map<String, String> props = new HashMap<>();
{
props.put(PropertyIds.OBJECT_TYPE_ID, BaseTypeId.CMIS_DOCUMENT.value());
props.put(PropertyIds.NAME, docname);
@@ -501,7 +505,9 @@ public class OpenCmisLocalTest extends TestCase
NodeRef doc1WorkingCopy = cociService.getWorkingCopy(doc1NodeRef);
/* Cancel Checkout */
cociService.cancelCheckout(doc1WorkingCopy);
TestHelper.waitForMethodToFinish(of(100, MILLIS), () ->
cociService.cancelCheckout(doc1WorkingCopy),
CmisRuntimeException.class, ConcurrencyFailureException.class);
/* Check if both the working copy and the document were deleted */
NodeService nodeService = serviceRegistry.getNodeService();

View File

@@ -29,7 +29,6 @@ import static java.time.Duration.of;
import static java.time.temporal.ChronoUnit.MILLIS;
import static java.time.temporal.ChronoUnit.SECONDS;
import java.time.Duration;
import java.util.concurrent.atomic.AtomicInteger;
import junit.framework.AssertionFailedError;
@@ -44,6 +43,7 @@ import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.test_category.OwnJVMTestsCategory;
import org.alfresco.util.ApplicationContextHelper;
import org.alfresco.util.TestHelper;
import org.alfresco.util.testing.category.DBTests;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -549,22 +549,22 @@ public class JobLockServiceTest extends TestCase
if (callback == null) throw new IllegalStateException();
waitForAssertion(of(100, MILLIS), () -> {
TestHelper.waitForMethodToFinish(of(100, MILLIS), () -> {
assertEquals(false,callback.released);
assertEquals(0,callback.getIsActiveCount());
});
}, AssertionFailedError.class);
waitForAssertion(of(1, SECONDS), () -> {
TestHelper.waitForMethodToFinish(of(1, SECONDS), () -> {
assertEquals(false, callback.released);
assertEquals(1, callback.getIsActiveCount());
});
}, AssertionFailedError.class);
callback.isActive = false;
waitForAssertion(of(2, SECONDS), () -> {
TestHelper.waitForMethodToFinish(of(2, SECONDS), () -> {
assertEquals(true, callback.released);
assertEquals(2, callback.getIsActiveCount());
});
}, AssertionFailedError.class);
}
catch (IllegalArgumentException e)
{
@@ -621,43 +621,6 @@ public class JobLockServiceTest extends TestCase
Logger.getLogger("org.alfresco.repo.lock").setLevel(saveLogLevel);
}
}
private static void waitForAssertion(Duration timeout, Runnable assertion)
{
logger.debug("Waiting for assertion to succeed.");
final long lastStep = 10;
final long delayMillis = timeout.toMillis() > lastStep ? timeout.toMillis() / lastStep : 1;
for (int s = 0; s <= lastStep; s++)
{
try
{
assertion.run();
logger.debug("Assertion succeeded.");
return;
}
catch (AssertionFailedError e)
{
if (s == lastStep)
{
logger.debug("Assertion failed. No more waiting.");
throw e;
}
logger.debug("Assertion failed. Waiting until it succeeds.", e);
}
try
{
Thread.sleep(delayMillis);
}
catch (InterruptedException e)
{
Thread.currentThread().interrupt();
fail("Thread has been interrupted.");
}
}
throw new IllegalStateException("Unexpected.");
}
private class TestCallback implements JobLockRefreshCallback
{

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -28,7 +28,7 @@ package org.alfresco.repo.search;
import javax.transaction.UserTransaction;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.search.impl.solr.DisabledFeatureException;
import org.alfresco.repo.search.impl.DisabledFeatureException;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authentication.MutableAuthenticationDao;

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -38,6 +38,8 @@ import java.util.List;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.domain.node.Node;
import org.alfresco.repo.domain.solr.SearchDAO;
import org.alfresco.repo.search.impl.DbOrIndexSwitchingQueryLanguage;
import org.alfresco.repo.search.impl.DisabledFeatureException;
import org.alfresco.repo.search.impl.lucene.LuceneQueryLanguageSPI;
import org.alfresco.repo.search.impl.querymodel.QueryModelException;
import org.alfresco.repo.solr.NodeParameters;

View File

@@ -29,8 +29,13 @@ package org.alfresco.util;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.time.Duration;
import java.util.Arrays;
import java.util.function.Supplier;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* A helper class to create a concise test.
*
@@ -39,6 +44,8 @@ import java.util.function.Supplier;
*/
public class TestHelper
{
private static final Log logger = LogFactory.getLog(TestHelper.class);
/**
* Checks the thrown exception is the expected exception.
*
@@ -94,4 +101,59 @@ public class TestHelper
return t;
}
/**
* Waits for <b>{@code method}</b> to succeed until <b>({@code timeout})</b>.
* <p>
* If the method failed to succeed because of previous step of test is not finished yet,
* it waits and re-executes the given method again.
* This will continue until the method do not fail or the <b>{@code timeout}</b> has been reached.
*
* @param timeout max time of wait.
* @param method the method that is called for retry.
* @param expectedExceptions array of excepted exception.
* @throws Exception after failing to finish given method with success.
*/
@SafeVarargs
public static void waitForMethodToFinish(
Duration timeout,
Runnable method,
Class<? extends Throwable> ... expectedExceptions)
{
logger.debug("Waiting for method to succeed.");
final long lastStep = 10;
final long delayMillis = timeout.toMillis() > lastStep ? timeout.toMillis() / lastStep : 1;
for (int step = 0; step <= lastStep; step++)
{
try
{
method.run();
logger.debug("Method succeeded.");
return;
} catch (Throwable e)
{
if(Arrays.stream(expectedExceptions).noneMatch(expEx -> expEx.isInstance(e)))
{
throw e;
}
if (step == lastStep)
{
logger.debug("Method failed - no more waiting.");
throw e;
}
logger.debug("Method failed. Waiting until it succeeds.", e);
}
try
{
Thread.sleep(delayMillis);
} catch (InterruptedException e)
{
Thread.currentThread().interrupt();
fail("Thread has been interrupted.");
}
}
throw new IllegalStateException("Unexpected.");
}
}

View File

@@ -7,8 +7,27 @@ pushd "$(dirname "${BASH_SOURCE[0]}")/../../"
source "$(dirname "${BASH_SOURCE[0]}")/build_functions.sh"
# Build the current project
mvn -B -V install -DskipTests -Dmaven.javadoc.skip=true -Pbuild-docker-images -Pags
# Build the current project if needed
if [[ -n ${REQUIRES_INSTALLED_ARTIFACTS} ]] || [[ -n ${REQUIRES_LOCAL_IMAGES} ]] || [[ -n ${BUILD_PROFILES} ]]; then
if [[ -n ${BUILD_PROFILES} ]]; then
PROFILES="${BUILD_PROFILES}"
else
if [[ "${REQUIRES_LOCAL_IMAGES}" == "true" ]]; then
PROFILES="-Pbuild-docker-images -Pags"
else
PROFILES="-Pags"
fi
fi
if [[ "${REQUIRES_INSTALLED_ARTIFACTS}" == "true" ]]; then
PHASE="install"
else
PHASE="package"
fi
mvn -B -V $PHASE -DskipTests -Dmaven.javadoc.skip=true $PROFILES $BUILD_OPTIONS
fi
popd

View File

@@ -1,20 +1,5 @@
#!/usr/bin/env bash
set -ev
rm -rf "${HOME}/.m2/repository/org/alfresco/acs-community-packaging"
rm -rf "${HOME}/.m2/repository/org/alfresco/alfresco-community-repo"
rm -rf "${HOME}/.m2/repository/org/alfresco/alfresco-community-repo-*"
rm -rf "${HOME}/.m2/repository/org/alfresco/alfresco-core"
rm -rf "${HOME}/.m2/repository/org/alfresco/alfresco-data-model"
rm -rf "${HOME}/.m2/repository/org/alfresco/alfresco-enterprise-remote-api"
rm -rf "${HOME}/.m2/repository/org/alfresco/alfresco-enterprise-repo-*"
rm -rf "${HOME}/.m2/repository/org/alfresco/alfresco-enterprise-repository"
rm -rf "${HOME}/.m2/repository/org/alfresco/alfresco-remote-api"
rm -rf "${HOME}/.m2/repository/org/alfresco/alfresco-repository"
rm -rf "${HOME}/.m2/repository/org/alfresco/content-services"
rm -rf "${HOME}/.m2/repository/org/alfresco/content-services*"
rm -rf "${HOME}/.m2/repository/org/alfresco/content-services-community"
rm -rf "${HOME}/.m2/repository/org/alfresco/tas/alfresco-community-repo-*-test"
rm -rf "${HOME}/.m2/repository/org/alfresco/tas/alfresco-enterprise-repo-*-test"
rm -rf "${HOME}/.m2/repository/org/alfresco/alfresco-governance-services*"
find "${HOME}/.m2/repository/" -type d -name "*-SNAPSHOT" | xargs -r -l rm -rf

31
scripts/travis/prepare.sh Executable file
View File

@@ -0,0 +1,31 @@
#!/usr/bin/env bash
set -e
M2_REPO_DIR="$HOME/.m2/repository"
M2_REPO_TTL_MINUTES=10080
M2_REPO_EXPIRED="$(find $M2_REPO_DIR -type f -mmin +$M2_REPO_TTL_MINUTES 2>/dev/null | head -n 1 | wc -l)"
M2_REPO_FILE_COUNT="$(find $M2_REPO_DIR -type f 2>/dev/null | wc -l)"
ORG_ALFRESCO_M2_REPO_DIR="$M2_REPO_DIR/org/alfresco"
ORG_ALFRESCO_M2_REPO_TTL_MINUTES=1440
ORG_ALFRESCO_M2_REPO_EXPIRED="$(find $ORG_ALFRESCO_M2_REPO_DIR -type f -mmin +$ORG_ALFRESCO_M2_REPO_TTL_MINUTES 2>/dev/null | head -n 1 | wc -l)"
echo "Files in the maven repo: $M2_REPO_FILE_COUNT"
if [ $ORG_ALFRESCO_M2_REPO_EXPIRED -eq 1 ];then
echo "Invalidating org/alfresco maven local cache."
rm -rf "$ORG_ALFRESCO_M2_REPO_DIR"
fi
if [ $M2_REPO_EXPIRED -eq 1 ];then
echo "Invalidating maven local cache."
rm -rf "$M2_REPO_DIR"
fi
if [ $M2_REPO_FILE_COUNT -lt 1000 ] || [ $ORG_ALFRESCO_M2_REPO_EXPIRED -eq 1 ] || [ $M2_REPO_EXPIRED -eq 1 ];then
echo "Populating maven cache."
export BUILD_PROFILES="-Pall-tas-tests,ags"
export BUILD_OPTIONS="-Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn -Dmaven.artifact.threads=8"
source "$(dirname "${BASH_SOURCE[0]}")/build.sh"
fi

View File

@@ -33,7 +33,11 @@ git status
git --no-pager diff pom.xml
git add pom.xml
if git status --untracked-files=no --porcelain | grep -q '^' ; then
if [[ "${TRAVIS_COMMIT_MESSAGE}" =~ \[force[^\]]*\] ]]; then
FORCE_TOKEN=$(echo "${TRAVIS_COMMIT_MESSAGE}" | sed "s|^.*\(\[force[^]]*\]\).*$|\1|g")
git commit --allow-empty -m "${FORCE_TOKEN} Update upstream version to ${VERSION}"
git push
elif git status --untracked-files=no --porcelain | grep -q '^' ; then
git commit -m "Update upstream version to ${VERSION}"
git push
else