Compare commits

...

114 Commits

Author SHA1 Message Date
alfresco-build
626a97ff45 [maven-release-plugin][skip ci] prepare release 25.1.0.3 2024-11-25 10:35:21 +00:00
Damian Ujma
4a91132226 ACS-5371 Exclude xpp3 dependency to avoid conflict in JDK9+ (#3049)
* ACS-5371 Exclude xpp3 dependency

* ACS-5371 Set compiler compliance to 17

* ACS-5371 Update xmlpull version

* ACS-5371 Update xmlpull version

* ACS-5371 Use kxml2

* ACS-5371 Refactor
2024-11-25 09:49:47 +01:00
Piotr Żurek
b6fb8725ce Revert "Bump org.springframework.security:spring-security-bom (#3048)"
This reverts commit aacef0c949.
2024-11-25 09:44:21 +01:00
alfresco-build
5fc0d9edde [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-24 00:10:05 +00:00
alfresco-build
44d394b1a0 [maven-release-plugin][skip ci] prepare release 25.1.0.2 2024-11-24 00:10:03 +00:00
Alfresco CI User
c1d62cca4d [force] Force release for 2024-11-24. 2024-11-24 00:05:36 +00:00
Piotr Żurek
24ae4ce57c Revert "Bump dependency.spring.version from 6.1.14 to 6.2.0 (#3038)"
This reverts commit b8f2ec252d.
2024-11-22 15:07:03 +01:00
dependabot[bot]
b8f2ec252d Bump dependency.spring.version from 6.1.14 to 6.2.0 (#3038)
Bumps `dependency.spring.version` from 6.1.14 to 6.2.0.

Updates `org.springframework:spring-aop` from 6.1.14 to 6.2.0
- [Release notes](https://github.com/spring-projects/spring-framework/releases)
- [Commits](https://github.com/spring-projects/spring-framework/compare/v6.1.14...v6.2.0)

Updates `org.springframework:spring-beans` from 6.1.14 to 6.2.0
- [Release notes](https://github.com/spring-projects/spring-framework/releases)
- [Commits](https://github.com/spring-projects/spring-framework/compare/v6.1.14...v6.2.0)

Updates `org.springframework:spring-context` from 6.1.14 to 6.2.0
- [Release notes](https://github.com/spring-projects/spring-framework/releases)
- [Commits](https://github.com/spring-projects/spring-framework/compare/v6.1.14...v6.2.0)

Updates `org.springframework:spring-context-support` from 6.1.14 to 6.2.0
- [Release notes](https://github.com/spring-projects/spring-framework/releases)
- [Commits](https://github.com/spring-projects/spring-framework/compare/v6.1.14...v6.2.0)

Updates `org.springframework:spring-core` from 6.1.14 to 6.2.0
- [Release notes](https://github.com/spring-projects/spring-framework/releases)
- [Commits](https://github.com/spring-projects/spring-framework/compare/v6.1.14...v6.2.0)

Updates `org.springframework:spring-expression` from 6.1.14 to 6.2.0
- [Release notes](https://github.com/spring-projects/spring-framework/releases)
- [Commits](https://github.com/spring-projects/spring-framework/compare/v6.1.14...v6.2.0)

Updates `org.springframework:spring-jdbc` from 6.1.14 to 6.2.0
- [Release notes](https://github.com/spring-projects/spring-framework/releases)
- [Commits](https://github.com/spring-projects/spring-framework/compare/v6.1.14...v6.2.0)

Updates `org.springframework:spring-jms` from 6.1.14 to 6.2.0
- [Release notes](https://github.com/spring-projects/spring-framework/releases)
- [Commits](https://github.com/spring-projects/spring-framework/compare/v6.1.14...v6.2.0)

Updates `org.springframework:spring-test` from 6.1.14 to 6.2.0
- [Release notes](https://github.com/spring-projects/spring-framework/releases)
- [Commits](https://github.com/spring-projects/spring-framework/compare/v6.1.14...v6.2.0)

Updates `org.springframework:spring-orm` from 6.1.14 to 6.2.0
- [Release notes](https://github.com/spring-projects/spring-framework/releases)
- [Commits](https://github.com/spring-projects/spring-framework/compare/v6.1.14...v6.2.0)

Updates `org.springframework:spring-tx` from 6.1.14 to 6.2.0
- [Release notes](https://github.com/spring-projects/spring-framework/releases)
- [Commits](https://github.com/spring-projects/spring-framework/compare/v6.1.14...v6.2.0)

Updates `org.springframework:spring-web` from 6.1.14 to 6.2.0
- [Release notes](https://github.com/spring-projects/spring-framework/releases)
- [Commits](https://github.com/spring-projects/spring-framework/compare/v6.1.14...v6.2.0)

Updates `org.springframework:spring-webmvc` from 6.1.14 to 6.2.0
- [Release notes](https://github.com/spring-projects/spring-framework/releases)
- [Commits](https://github.com/spring-projects/spring-framework/compare/v6.1.14...v6.2.0)

---
updated-dependencies:
- dependency-name: org.springframework:spring-aop
  dependency-type: direct:production
  update-type: version-update:semver-minor
- dependency-name: org.springframework:spring-beans
  dependency-type: direct:production
  update-type: version-update:semver-minor
- dependency-name: org.springframework:spring-context
  dependency-type: direct:production
  update-type: version-update:semver-minor
- dependency-name: org.springframework:spring-context-support
  dependency-type: direct:production
  update-type: version-update:semver-minor
- dependency-name: org.springframework:spring-core
  dependency-type: direct:production
  update-type: version-update:semver-minor
- dependency-name: org.springframework:spring-expression
  dependency-type: direct:production
  update-type: version-update:semver-minor
- dependency-name: org.springframework:spring-jdbc
  dependency-type: direct:production
  update-type: version-update:semver-minor
- dependency-name: org.springframework:spring-jms
  dependency-type: direct:production
  update-type: version-update:semver-minor
- dependency-name: org.springframework:spring-test
  dependency-type: direct:production
  update-type: version-update:semver-minor
- dependency-name: org.springframework:spring-orm
  dependency-type: direct:production
  update-type: version-update:semver-minor
- dependency-name: org.springframework:spring-tx
  dependency-type: direct:production
  update-type: version-update:semver-minor
- dependency-name: org.springframework:spring-web
  dependency-type: direct:production
  update-type: version-update:semver-minor
- dependency-name: org.springframework:spring-webmvc
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-22 14:22:19 +01:00
dependabot[bot]
ae721c86f6 Bump commons-io:commons-io from 2.17.0 to 2.18.0 (#3046)
Bumps commons-io:commons-io from 2.17.0 to 2.18.0.

---
updated-dependencies:
- dependency-name: commons-io:commons-io
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-22 14:19:59 +01:00
dependabot[bot]
aacef0c949 Bump org.springframework.security:spring-security-bom (#3048)
Bumps [org.springframework.security:spring-security-bom](https://github.com/spring-projects/spring-security) from 6.3.4 to 6.4.1.
- [Release notes](https://github.com/spring-projects/spring-security/releases)
- [Changelog](https://github.com/spring-projects/spring-security/blob/main/RELEASE.adoc)
- [Commits](https://github.com/spring-projects/spring-security/compare/6.3.4...6.4.1)

---
updated-dependencies:
- dependency-name: org.springframework.security:spring-security-bom
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-22 14:19:15 +01:00
dependabot[bot]
e4edceda75 Bump org.apache.maven.plugins:maven-failsafe-plugin from 3.5.1 to 3.5.2 (#3019) 2024-11-22 12:41:46 +00:00
dependabot[bot]
09ec6688c6 Bump org.quartz-scheduler:quartz from 2.3.2 to 2.5.0 (#3035) 2024-11-22 12:41:36 +00:00
dependabot[bot]
14025f00de Bump org.projectlombok:lombok from 1.18.34 to 1.18.36 (#3042) 2024-11-22 12:41:16 +00:00
dependabot[bot]
22c5a5e134 Bump dependency.bouncycastle.version from 1.78.1 to 1.79 (#3016) 2024-11-22 12:39:42 +00:00
dependabot[bot]
154cad1ff8 Bump org.apache.maven.plugins:maven-javadoc-plugin from 3.10.1 to 3.11.1 (#3021) 2024-11-22 12:39:40 +00:00
dependabot[bot]
7749b89e2b Bump org.apache.santuario:xmlsec from 4.0.2 to 4.0.3 (#3023) 2024-11-22 12:39:35 +00:00
dependabot[bot]
bbb45e667b Bump org.codehaus.groovy:groovy from 3.0.22 to 3.0.23 (#3030) 2024-11-22 12:39:04 +00:00
dependabot[bot]
a5f01e0d94 Bump Alfresco/alfresco-build-tools from 7.1.0 to 8.2.0 (#3018) 2024-11-22 12:39:02 +00:00
dependabot[bot]
0c6d455ec4 Bump com.networknt:json-schema-validator from 1.5.1 to 1.5.3 (#3017) 2024-11-22 12:38:58 +00:00
dependabot[bot]
5e86f5163e Bump org.apache.xmlbeans:xmlbeans from 5.2.1 to 5.2.2 (#3024) 2024-11-22 12:38:23 +00:00
alfresco-build
4981be1564 [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-20 15:49:36 +00:00
alfresco-build
9a30fbb75a [maven-release-plugin][skip ci] prepare release 25.1.0.1 2024-11-20 15:49:34 +00:00
Piotr Żurek
9c86fdb8b5 Trigger CI 2024-11-20 15:42:59 +01:00
Kacper Magdziarz
c52f74832d Updating master branch to 25.1.0 after 23.4.0 ACS release [skip ci] 2024-11-20 13:36:19 +01:00
alfresco-build
70dd7642f4 [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-19 09:48:20 +00:00
alfresco-build
0cf3eec834 [maven-release-plugin][skip ci] prepare release 23.4.0.67 2024-11-19 09:48:12 +00:00
MohinishSah
dfd855440d bumped aos version 2024-11-19 14:38:12 +05:30
alfresco-build
88d46884c4 [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-17 00:08:12 +00:00
alfresco-build
e6444a1c7a [maven-release-plugin][skip ci] prepare release 23.4.0.66 2024-11-17 00:08:10 +00:00
Alfresco CI User
e215038230 [force] Force release for 2024-11-17. 2024-11-17 00:04:44 +00:00
alfresco-build
a5f8034725 [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-15 09:54:37 +00:00
alfresco-build
c7dd659268 [maven-release-plugin][skip ci] prepare release 23.4.0.65 2024-11-15 09:54:35 +00:00
Sara
ffae77169e ACS-8937 Bump IE/SS to 2.0.13 (#3040) 2024-11-15 09:12:11 +00:00
alfresco-build
3bcfaf256e [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-14 18:30:28 +00:00
alfresco-build
0e8d56d7b2 [maven-release-plugin][skip ci] prepare release 23.4.0.64 2024-11-14 18:30:25 +00:00
Piotr Żurek
ef9e8d574f ACS-8935 Use GA dependencies (#3037) 2024-11-14 18:47:51 +01:00
alfresco-build
a17712b501 [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-14 14:03:54 +00:00
alfresco-build
fce6d1d7eb [maven-release-plugin][skip ci] prepare release 23.4.0.63 2024-11-14 14:03:52 +00:00
mohit-singh4
867af27a96 Fix/apps 2894 node size details automation sample test file (#3036)
* Adding the test case for calculating the NodeSize.

* Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationSampleTestFile] Updating sample testfile which being used for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationSampleTestFile] Updating sample testfile which being used for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationSampleTestFile] Updating sample testfile which being used for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationSampleTestFile] Updating sample testfile which being used for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationSampleTestFile] Updating sample testfile which being used for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationSampleTestFile] Updating sample testfile which being used for calculating the NodeSize.

---------

Co-authored-by: kshah <kavit.shah@hyland.com>
Co-authored-by: mohit-singh4 <mohit.singh@contractors.hyland.com>
2024-11-14 18:53:27 +05:30
alfresco-build
3de74456ae [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-13 10:46:53 +00:00
alfresco-build
919880e363 [maven-release-plugin][skip ci] prepare release 23.4.0.62 2024-11-13 10:46:52 +00:00
mohit-singh4
e6ac2875b9 Fix/apps 2894 node size details automation test (#3032)
* Adding the test case for calculating the NodeSize.

* Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

* [fix/APPS-2894_NodeSizeDetails_AutomationTest] Adding the test case for calculating the NodeSize.

---------

Co-authored-by: kshah <kavit.shah@hyland.com>
Co-authored-by: mohit-singh4 <mohit.singh@contractors.hyland.com>
2024-11-13 15:32:54 +05:30
alfresco-build
4250167a28 [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-10 00:07:40 +00:00
alfresco-build
312463b5e0 [maven-release-plugin][skip ci] prepare release 23.4.0.61 2024-11-10 00:07:38 +00:00
Alfresco CI User
c09f5f7cb0 [force] Force release for 2024-11-10. 2024-11-10 00:04:32 +00:00
alfresco-build
6284e9724e [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-08 14:28:59 +00:00
alfresco-build
dcf391faea [maven-release-plugin][skip ci] prepare release 23.4.0.60 2024-11-08 14:28:57 +00:00
kavitshah-gl
475f345909 Bump api explorer to 23.4.0-A1 2024-11-08 18:28:44 +05:30
alfresco-build
f42e033e40 [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-08 10:48:14 +00:00
alfresco-build
546f37eded [maven-release-plugin][skip ci] prepare release 23.4.0.59 2024-11-08 10:48:12 +00:00
mohit-singh4
a35673d44f [MNT-24127] Added Endpoint To Calculate Folder Size (#2709)
* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [MNT-24127] Added Endpoint to Calculate Size of the folder with Integration Test Cases [ags][tas]

* [feature/MNT-24127] EndpointToCalculateFolderSize

* [feature/MNT-24127] EndpointToCalculateFolderSize

* [feature/MNT-24127] EndpointToCalculateFolderSize

* [feature/MNT-24127] EndpointToCalculateFolderSize

* [feature/MNT-24127] EndpointToCalculateFolderSize

* [feature/MNT-24127] EndpointToCalculateFolderSize

* [feature/MNT-24127] EndpointToCalculateFolderSize

* [feature/MNT-24127] Endpoint Added To Calculate Folder Size

* [feature/MNT-24127] Endpoint Added To Calculate Folder Size

* [feature/MNT-24127] Endpoint Added To Calculate Folder Size

* [feature/MNT-24127] Endpoint Added To Calculate Folder Size

* [feature/MNT-24127] Endpoint Added To Calculate Folder Size

* [feature/MNT-24127] Endpoint Added To Calculate Folder Size

* Some Optimization for NodeSize Calculation.

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added Endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added endpoint to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding performance test case

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding performance test case

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding performance test case

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding performance test case

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding performance test case

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding performance test case

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding performance test case

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding performance test case

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding performance test case

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding performance test case

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding performance test case

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding performance test case

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added endpoints implementation to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added endpoints implementation to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added endpoints implementation to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Added endpoints implementation to calculate folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding endpoints to calculate and retrieve folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding endpoints to calculate and retrieve folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding endpoints to calculate and retrieve folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding endpoints to calculate and retrieve folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding endpoints to calculate and retrieve folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Adding endpoints to calculate and retrieve folder size

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* Changes as per the Relationship Api Framework implementation.

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Updated endpoints flow to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments related to calculate and retrieve folder size details

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments and refactoring files

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments and refactoring files

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments and refactoring files

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments and refactoring files

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments regarding Integeration testcases

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments regarding Integeration testcases

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Addressing review comments

* [feature/MNT-24127-EndpointToCalculateFolderSize] Creating Integeration test cases

* [feature/MNT-24127-EndpointToCalculateFolderSize] Creating Integeration test cases

* [feature/MNT-24127-EndpointToCalculateFolderSize] Creating Integeration test cases

* [feature/MNT-24127-EndpointToCalculateFolderSize] Creating Integeration test cases

* [feature/MNT-24127-EndpointToCalculateFolderSize] Creating Integeration test cases

* [feature/MNT-24127-EndpointToCalculateFolderSize] Creating Integeration test cases

* [feature/MNT-24127-EndpointToCalculateFolderSize] Some Changes in NodeSizeDetailsTests

* [feature/MNT-24127-EndpointToCalculateFolderSize] Some Changes in NodeSizeDetailsTests

* [feature/MNT-24127-EndpointToCalculateFolderSize] Some Changes in NodeSizeDetailsTests

* [feature/MNT-24127-EndpointToCalculateFolderSize] Some Changes in NodeSizeDetailsTests

* [feature/MNT-24127-EndpointToCalculateFolderSize] Checking size-details feature for solr6 subsystem

* [feature/MNT-24127-EndpointToCalculateFolderSize] Checking size-details feature for solr6 subsystem

---------

Co-authored-by: kshah <kavit.shah@globallogic.com>
Co-authored-by: mohit-singh4 <mohit.singh@contractors.hyland.com>
Co-authored-by: kshah <kavit.shah@hyland.com>
2024-11-08 15:37:27 +05:30
alfresco-build
25e9bdbdb6 [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-08 06:37:38 +00:00
alfresco-build
33ab26e52b [maven-release-plugin][skip ci] prepare release 23.4.0.58 2024-11-08 06:37:36 +00:00
rrajoria
7e35abfdc8 Update aos version to 3.2.0-A2 2024-11-08 10:28:04 +05:30
alfresco-build
634e591264 [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-07 18:44:42 +00:00
alfresco-build
86f66c86da [maven-release-plugin][skip ci] prepare release 23.4.0.57 2024-11-07 18:44:40 +00:00
Eva Vasques
2904535d40 MNT-24708 - Set messagesCache and loadedBundlesCache as local (#3026)
* In a cluster env these caches were always invalidating the other nodes loaded bundles, forcing them to reload
* Only resourceBundlesNamesCache should be invalidating to be able to force the cache reset if a bundle is added, removed or reloaded dynamically
* On unregister of a resource bundle, the cache wasn't being explicitly updated, so it was not invalidating the cache on the other nodes.
2024-11-07 18:03:52 +00:00
alfresco-build
e99905efbd [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-07 13:09:09 +00:00
alfresco-build
ce65de0f10 [maven-release-plugin][skip ci] prepare release 23.4.0.56 2024-11-07 13:09:07 +00:00
Piotr Żurek
29b19d4245 ACS-8936 Propagate latest Search Service (#3025) 2024-11-07 12:54:06 +01:00
alfresco-build
4eea43bddf [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-05 23:04:52 +00:00
alfresco-build
4c9282790a [maven-release-plugin][skip ci] prepare release 23.4.0.55 2024-11-05 23:04:50 +00:00
Sara
39a74e42b6 ACS-8934 Bump ATS 4.1.5-A3 (#3022) 2024-11-05 18:57:01 +00:00
alfresco-build
a298c0dcb9 [maven-release-plugin][skip ci] prepare for next development iteration 2024-11-03 00:07:36 +00:00
alfresco-build
9ee56a5d08 [maven-release-plugin][skip ci] prepare release 23.4.0.54 2024-11-03 00:07:34 +00:00
Alfresco CI User
6b80a2c39c [force] Force release for 2024-11-03. 2024-11-03 00:04:31 +00:00
alfresco-build
f6569edde1 [maven-release-plugin][skip ci] prepare for next development iteration 2024-10-28 13:55:04 +00:00
alfresco-build
5ab9aa8cf2 [maven-release-plugin][skip ci] prepare release 23.4.0.53 2024-10-28 13:55:02 +00:00
Damian Ujma
8a171d09b5 ACS-8915 Propagate latest ATS/AIS (#3012) 2024-10-28 14:15:03 +01:00
alfresco-build
3e423a1543 [maven-release-plugin][skip ci] prepare for next development iteration 2024-10-27 00:07:40 +00:00
alfresco-build
6351782c1d [maven-release-plugin][skip ci] prepare release 23.4.0.52 2024-10-27 00:07:38 +00:00
Alfresco CI User
4c92868efb [force] Force release for 2024-10-27. 2024-10-27 00:04:27 +00:00
alfresco-build
2cca9ea11b [maven-release-plugin][skip ci] prepare for next development iteration 2024-10-25 11:06:31 +00:00
alfresco-build
e12001e4d1 [maven-release-plugin][skip ci] prepare release 23.4.0.51 2024-10-25 11:06:29 +00:00
dependabot[bot]
a57607f728 Bump org.springframework:spring-context from 6.1.13 to 6.1.14 (#3002)
Bumps [org.springframework:spring-context](https://github.com/spring-projects/spring-framework) from 6.1.13 to 6.1.14.
- [Release notes](https://github.com/spring-projects/spring-framework/releases)
- [Commits](https://github.com/spring-projects/spring-framework/compare/v6.1.13...v6.1.14)

---
updated-dependencies:
- dependency-name: org.springframework:spring-context
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-25 10:49:56 +02:00
dependabot[bot]
a79af2cac0 Bump org.springframework.security:spring-security-bom (#3004)
Bumps [org.springframework.security:spring-security-bom](https://github.com/spring-projects/spring-security) from 6.3.3 to 6.3.4.
- [Release notes](https://github.com/spring-projects/spring-security/releases)
- [Changelog](https://github.com/spring-projects/spring-security/blob/main/RELEASE.adoc)
- [Commits](https://github.com/spring-projects/spring-security/compare/6.3.3...6.3.4)

---
updated-dependencies:
- dependency-name: org.springframework.security:spring-security-bom
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-25 10:14:37 +02:00
alfresco-build
be807c5b19 [maven-release-plugin][skip ci] prepare for next development iteration 2024-10-24 11:58:56 +00:00
alfresco-build
ae03e7076e [maven-release-plugin][skip ci] prepare release 23.4.0.50 2024-10-24 11:58:54 +00:00
Damian Ujma
26e394c398 ACS-6670 Change nodesSharedCache to fully-distributed (#3007) 2024-10-24 13:00:04 +02:00
alfresco-build
200aa95784 [maven-release-plugin][skip ci] prepare for next development iteration 2024-10-20 00:07:31 +00:00
alfresco-build
4eeabb3dbd [maven-release-plugin][skip ci] prepare release 23.4.0.49 2024-10-20 00:07:29 +00:00
Alfresco CI User
227bbe4fd8 [force] Force release for 2024-10-20. 2024-10-20 00:04:38 +00:00
alfresco-build
1461a04a3d [maven-release-plugin][skip ci] prepare for next development iteration 2024-10-16 13:08:10 +00:00
alfresco-build
52008dc139 [maven-release-plugin][skip ci] prepare release 23.4.0.48 2024-10-16 13:08:09 +00:00
rrajoria
f2a10052e4 Bump aos version 3.2.0-A1 (#2999) 2024-10-16 17:57:11 +05:30
alfresco-build
add64e0cb6 [maven-release-plugin][skip ci] prepare for next development iteration 2024-10-13 00:07:36 +00:00
alfresco-build
14511e2621 [maven-release-plugin][skip ci] prepare release 23.4.0.47 2024-10-13 00:07:34 +00:00
Alfresco CI User
42e0c93121 [force] Force release for 2024-10-13. 2024-10-13 00:04:33 +00:00
alfresco-build
715bc273ee [maven-release-plugin][skip ci] prepare for next development iteration 2024-10-11 12:42:44 +00:00
alfresco-build
812541870e [maven-release-plugin][skip ci] prepare release 23.4.0.46 2024-10-11 12:42:39 +00:00
dependabot[bot]
9aa5051826 Bump commons-io:commons-io from 2.16.1 to 2.17.0 (#2928)
Bumps commons-io:commons-io from 2.16.1 to 2.17.0.

---
updated-dependencies:
- dependency-name: commons-io:commons-io
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-11 14:00:02 +02:00
alfresco-build
54580b4aeb [maven-release-plugin][skip ci] prepare for next development iteration 2024-10-10 09:55:40 +00:00
alfresco-build
2b1b6091a3 [maven-release-plugin][skip ci] prepare release 23.4.0.45 2024-10-10 09:55:37 +00:00
Aleksandra Onych
74a147ab3f [ACS-8862] Bump Keycloak to 25.0.6 (#2983) 2024-10-10 11:13:08 +02:00
alfresco-build
07f0595f5a [maven-release-plugin][skip ci] prepare for next development iteration 2024-10-09 16:57:03 +00:00
alfresco-build
e3422ea6a5 [maven-release-plugin][skip ci] prepare release 23.4.0.44 2024-10-09 16:57:00 +00:00
Eva Vasques
f4103c242f MNT-24641 Avoid duplicate key error on content upload (#2984)
MNT-24641
* On createOrGetByValue in EntityLookupCache, also cache by value
* Created getCachedEntityByValue that attempt to retrieve the value only from cache
* On attempt to create content URL, first check cache before attempting to create in the database avoiding a duplicate key
2024-10-09 17:07:10 +01:00
alfresco-build
34fb5e9dd9 [maven-release-plugin][skip ci] prepare for next development iteration 2024-10-09 07:03:55 +00:00
alfresco-build
f6cf0670c1 [maven-release-plugin][skip ci] prepare release 23.4.0.43 2024-10-09 07:03:53 +00:00
rrajoria
c7bd036030 Update aos version (#2982)
Update aos version to 4.0.0-A1 with Spring 6.1 upgrade
2024-10-09 11:53:51 +05:30
alfresco-build
b20c573040 [maven-release-plugin][skip ci] prepare for next development iteration 2024-10-08 15:11:21 +00:00
alfresco-build
6568885c10 [maven-release-plugin][skip ci] prepare release 23.4.0.42 2024-10-08 15:11:17 +00:00
dependabot[bot]
31237135c5 Bump org.apache.maven.plugins:maven-failsafe-plugin from 3.5.0 to 3.5.1 (#2979)
Bumps [org.apache.maven.plugins:maven-failsafe-plugin](https://github.com/apache/maven-surefire) from 3.5.0 to 3.5.1.
- [Release notes](https://github.com/apache/maven-surefire/releases)
- [Commits](https://github.com/apache/maven-surefire/compare/surefire-3.5.0...surefire-3.5.1)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-failsafe-plugin
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-08 16:30:28 +02:00
alfresco-build
d528ed1e97 [maven-release-plugin][skip ci] prepare for next development iteration 2024-10-06 00:07:53 +00:00
alfresco-build
bb207340fd [maven-release-plugin][skip ci] prepare release 23.4.0.41 2024-10-06 00:07:51 +00:00
Alfresco CI User
314e1aeb64 [force] Force release for 2024-10-06. 2024-10-06 00:04:26 +00:00
alfresco-build
9846f7b04f [maven-release-plugin][skip ci] prepare for next development iteration 2024-10-04 18:40:05 +00:00
alfresco-build
6e442e93b8 [maven-release-plugin][skip ci] prepare release 23.4.0.40 2024-10-04 18:40:03 +00:00
Tom Page
fb3c57aab4 Merge pull request #2969 from Alfresco/feature/MNT-24637_IncludeAspectNames
MNT-24637 Add include=aspectNames to favourites API.
2024-10-04 18:59:35 +01:00
Tom Page
093b3281fb MNT-24637 PMD fixes. 2024-10-04 15:54:57 +01:00
Tom Page
3b027c6c36 MNT-24637 Include aspectNames in TAS model. 2024-10-04 15:45:54 +01:00
Tom Page
f193309e4c MNT-24637 Add include=aspectNames to favourites API. 2024-10-04 15:22:38 +01:00
Tom Page
7668849a59 MNT-24637 Pre-commit formatting. 2024-10-04 15:21:53 +01:00
alfresco-build
1350e68c29 [maven-release-plugin][skip ci] prepare for next development iteration 2024-10-04 13:13:41 +00:00
61 changed files with 5687 additions and 3332 deletions

View File

@@ -44,14 +44,14 @@ jobs:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- id: changed-files
uses: Alfresco/alfresco-build-tools/.github/actions/github-list-changes@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/github-list-changes@v8.2.0
with:
write-list-to-env: true
- uses: Alfresco/alfresco-build-tools/.github/actions/pre-commit@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/pre-commit@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Prepare maven cache and check compilation"
@@ -69,12 +69,12 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- uses: Alfresco/alfresco-build-tools/.github/actions/veracode@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/veracode@v8.2.0
continue-on-error: true
with:
srcclr-api-token: ${{ secrets.SRCCLR_API_TOKEN }}
@@ -92,10 +92,10 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/github-download-file@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/github-download-file@v8.2.0
with:
token: ${{ secrets.BOT_GITHUB_TOKEN }}
repository: "Alfresco/veracode-baseline-archive"
@@ -142,9 +142,9 @@ jobs:
!contains(github.event.head_commit.message, '[skip tests]') &&
!contains(github.event.head_commit.message, '[force]')
steps:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- uses: Alfresco/ya-pmd-scan@v4.1.0
with:
classpath-build-command: "mvn test-compile -ntp -Pags -pl \"-:alfresco-community-repo-docker\""
@@ -175,14 +175,14 @@ jobs:
testAttributes: "-Dtest=AllMmtUnitTestSuite"
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }} - ${{ matrix.testModule }}
@@ -213,7 +213,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -255,9 +255,9 @@ jobs:
REQUIRES_INSTALLED_ARTIFACTS: true
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -270,7 +270,7 @@ jobs:
run: docker compose -f ./scripts/ci/docker-compose/docker-compose.yaml --profile ${{ matrix.compose-profile }} up -d
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }} - ${{ matrix.testSuite }}
@@ -301,7 +301,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -334,9 +334,9 @@ jobs:
version: ['10.2.18', '10.4', '10.5']
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: Run MariaDB ${{ matrix.version }} database
@@ -345,7 +345,7 @@ jobs:
MARIADB_VERSION: ${{ matrix.version }}
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }} - ${{ matrix.version }}
@@ -376,7 +376,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -405,9 +405,9 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run MariaDB 10.6 database"
@@ -416,7 +416,7 @@ jobs:
MARIADB_VERSION: 10.6
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -447,7 +447,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -476,9 +476,9 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run MySQL 8 database"
@@ -487,7 +487,7 @@ jobs:
MYSQL_VERSION: 8
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -518,7 +518,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -546,9 +546,9 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run PostgreSQL 13.12 database"
@@ -557,7 +557,7 @@ jobs:
POSTGRES_VERSION: 13.12
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -588,7 +588,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -616,9 +616,9 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run PostgreSQL 14.9 database"
@@ -627,7 +627,7 @@ jobs:
POSTGRES_VERSION: 14.9
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -658,7 +658,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -686,9 +686,9 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run PostgreSQL 15.4 database"
@@ -697,7 +697,7 @@ jobs:
POSTGRES_VERSION: 15.4
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -728,7 +728,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -754,16 +754,16 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run ActiveMQ"
run: docker compose -f ./scripts/ci/docker-compose/docker-compose.yaml --profile activemq up -d
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -794,7 +794,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -854,9 +854,9 @@ jobs:
mvn-options: '-Dencryption.ssl.keystore.location=${CI_WORKSPACE}/keystores/alfresco/alfresco.keystore -Dencryption.ssl.truststore.location=${CI_WORKSPACE}/keystores/alfresco/alfresco.truststore'
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Set transformers tag"
@@ -879,7 +879,7 @@ jobs:
run: docker compose -f ./scripts/ci/docker-compose/docker-compose.yaml --profile ${{ matrix.compose-profile }} up -d
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }} - ${{ matrix.testSuite }} ${{ matrix.idp }}
@@ -910,7 +910,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -968,9 +968,9 @@ jobs:
REQUIRES_LOCAL_IMAGES: true
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -986,7 +986,7 @@ jobs:
run: mvn install -pl :alfresco-community-repo-integration-test -am -DskipTests -Pall-tas-tests
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }} - ${{ matrix.test-name }}
@@ -1024,7 +1024,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.tests.outcome }}
@@ -1050,16 +1050,16 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run Postgres 15.4 database"
run: docker compose -f ./scripts/ci/docker-compose/docker-compose.yaml --profile postgres up -d
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -1090,7 +1090,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -1124,9 +1124,9 @@ jobs:
REQUIRES_INSTALLED_ARTIFACTS: true
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -1134,7 +1134,7 @@ jobs:
bash ./scripts/ci/build.sh
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }} 0${{ matrix.part }} - (PostgreSQL) ${{ matrix.test-name }}
@@ -1170,9 +1170,9 @@ jobs:
REQUIRES_INSTALLED_ARTIFACTS: true
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -1180,7 +1180,7 @@ jobs:
bash ./scripts/ci/build.sh
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }} 0${{ matrix.part }} - (MySQL) ${{ matrix.test-name }}
@@ -1212,9 +1212,9 @@ jobs:
REQUIRES_LOCAL_IMAGES: true
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -1228,7 +1228,7 @@ jobs:
mvn -B install -pl :alfresco-governance-services-automation-community-rest-api -am -Pags -Pall-tas-tests -DskipTests
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -1260,7 +1260,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v7.1.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -1302,9 +1302,9 @@ jobs:
!contains(github.event.head_commit.message, '[force]')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |

View File

@@ -34,12 +34,12 @@ jobs:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v8.2.0
with:
username: ${{ env.GIT_USERNAME }}
email: ${{ env.GIT_EMAIL }}
@@ -63,12 +63,12 @@ jobs:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v7.1.0
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v8.2.0
with:
username: ${{ env.GIT_USERNAME }}
email: ${{ env.GIT_EMAIL }}

View File

@@ -731,7 +731,7 @@
"filename": "remote-api/src/test/java/org/alfresco/rest/api/tests/AbstractBaseApiTest.java",
"hashed_secret": "d033e22ae348aeb5660fc2140aec35850c4da997",
"is_verified": false,
"line_number": 120,
"line_number": 111,
"is_secret": false
}
],
@@ -1888,5 +1888,5 @@
}
]
},
"generated_at": "2024-10-02T10:18:47Z"
"generated_at": "2024-10-09T09:32:52Z"
}

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-automation-community-repo</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<build>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<modules>

View File

@@ -1,3 +1,3 @@
SOLR6_TAG=2.0.12
SOLR6_TAG=2.0.13
POSTGRES_TAG=15.4
ACTIVEMQ_TAG=5.18.3-jre17-rockylinux8

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<properties>

View File

@@ -3,8 +3,8 @@
#
# Version label
version.major=23
version.minor=4
version.major=25
version.minor=1
version.revision=0
version.label=

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<build>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<modules>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<dependencies>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<properties>
@@ -118,6 +118,18 @@
<groupId>org.jibx</groupId>
<artifactId>jibx-run</artifactId>
<version>1.4.2</version>
<exclusions>
<!-- [ACS-5371] Excluded to avoid conflict in JDK9+ as it includes javax.xml-->
<exclusion>
<groupId>xpp3</groupId>
<artifactId>xpp3</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>net.sf.kxml</groupId>
<artifactId>kxml2</artifactId>
<version>${dependency.kxml2.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<dependencies>

View File

@@ -9,6 +9,6 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
</project>

View File

@@ -146,6 +146,7 @@ libgif http://giflib.sourceforge.net/
libfreetype http://www.freetype.org/
PostgreSQL http://www.postgresql.org/
PostgreSQL JDBC Driver http://www.postgresql.org/
kXML 2 http://kxml.sourceforge.net/
=== CDDL 1.0 ===

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<modules>

View File

@@ -1,3 +1,3 @@
SOLR6_TAG=2.0.12
SOLR6_TAG=2.0.13
POSTGRES_TAG=15.4
ACTIVEMQ_TAG=5.18.3-jre17-rockylinux8

View File

@@ -6,7 +6,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<organization>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<developers>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<developers>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<properties>
@@ -171,7 +171,7 @@
<dependency>
<groupId>org.codehaus.groovy</groupId>
<artifactId>groovy</artifactId>
<version>3.0.22</version>
<version>3.0.23</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.codehaus.groovy/groovy-json-->

View File

@@ -40,18 +40,18 @@ public class RestPersonFavoritesModel extends TestModel implements IRestModel<Re
@Override
public RestPersonFavoritesModel onModel()
{
return model;
return model;
}
private String targetGuid;
private String createdAt;
private List<String> aspectNames;
private List<String> allowableOperations;
private RestTargetModel target;
public RestPersonFavoritesModel()
{
}
{}
public RestPersonFavoritesModel(String targetGuid, String createdAt)
{
@@ -90,11 +90,23 @@ public class RestPersonFavoritesModel extends TestModel implements IRestModel<Re
this.createdAt = createdAt;
}
public List<String> getAllowableOperations() {
public List<String> getAspectNames()
{
return aspectNames;
}
public void setAspectNames(List<String> aspectNames)
{
this.aspectNames = aspectNames;
}
public List<String> getAllowableOperations()
{
return allowableOperations;
}
public void setAllowableOperations(List<String> allowableOperations) {
public void setAllowableOperations(List<String> allowableOperations)
{
this.allowableOperations = allowableOperations;
}
}

View File

@@ -0,0 +1,141 @@
/*-
* #%L
* alfresco-tas-restapi
* %%
* Copyright (C) 2005 - 2024 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.model;
import java.util.Date;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.alfresco.rest.core.IRestModel;
import org.alfresco.utility.model.TestModel;
public class RestSizeDetailsModel extends TestModel implements IRestModel<RestSizeDetailsModel>
{
@JsonProperty(value = "entry")
RestSizeDetailsModel model;
private String id;
private Long sizeInBytes;
private Date calculatedAt;
private Integer numberOfFiles;
private String jobId;
private STATUS status;
public enum STATUS
{
NOT_INITIATED, PENDING, IN_PROGRESS, COMPLETED, FAILED
}
public String getId()
{
return id;
}
public void setId(String id)
{
this.id = id;
}
public Long getSizeInBytes()
{
return sizeInBytes;
}
public void setSizeInBytes(Long sizeInBytes)
{
this.sizeInBytes = sizeInBytes;
}
public Date getCalculatedAt()
{
return calculatedAt;
}
public void setCalculatedAt(Date calculatedAt)
{
this.calculatedAt = calculatedAt;
}
public Integer getNumberOfFiles()
{
return numberOfFiles;
}
public void setNumberOfFiles(Integer numberOfFiles)
{
this.numberOfFiles = numberOfFiles;
}
public String getJobId()
{
return jobId;
}
public void setJobId(String jobId)
{
this.jobId = jobId;
}
public STATUS getStatus()
{
return status;
}
public void setStatus(STATUS status)
{
this.status = status;
}
@Override
public RestSizeDetailsModel onModel()
{
return model;
}
@Override
public boolean equals(Object o)
{
if (this == o)
{
return true;
}
if (o == null || getClass() != o.getClass())
{
return false;
}
RestSizeDetailsModel that = (RestSizeDetailsModel) o;
return Objects.equals(id, that.id) && Objects.equals(sizeInBytes, that.sizeInBytes) && Objects.equals(
calculatedAt, that.calculatedAt) && Objects.equals(numberOfFiles, that.numberOfFiles)
&& Objects.equals(jobId, that.jobId) && status == that.status;
}
@Override
public int hashCode()
{
return Objects.hash(id, sizeInBytes, calculatedAt, numberOfFiles, jobId, status);
}
}

View File

@@ -2,7 +2,7 @@
* #%L
* alfresco-tas-restapi
* %%
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* Copyright (C) 2005 - 2024 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -26,59 +26,34 @@
package org.alfresco.rest.requests;
import static org.alfresco.rest.core.JsonBodyGenerator.arrayToJson;
import static org.alfresco.rest.requests.RuleSettings.IS_INHERITANCE_ENABLED;
import static org.springframework.http.HttpMethod.PUT;
import jakarta.json.JsonArrayBuilder;
import static org.alfresco.rest.core.JsonBodyGenerator.arrayToJson;
import static org.alfresco.rest.requests.RuleSettings.IS_INHERITANCE_ENABLED;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.List;
import java.util.stream.Stream;
import jakarta.json.JsonArrayBuilder;
import io.restassured.http.ContentType;
import org.apache.commons.lang3.StringUtils;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.testng.reporters.Files;
import org.alfresco.rest.core.JsonBodyGenerator;
import org.alfresco.rest.core.RestRequest;
import org.alfresco.rest.core.RestResponse;
import org.alfresco.rest.core.RestWrapper;
import org.alfresco.rest.exception.JsonToModelConversionException;
import org.alfresco.rest.model.RestActionDefinitionModelsCollection;
import org.alfresco.rest.model.RestCategoryLinkBodyModel;
import org.alfresco.rest.model.RestCategoryModel;
import org.alfresco.rest.model.RestCategoryModelsCollection;
import org.alfresco.rest.model.RestCommentModel;
import org.alfresco.rest.model.RestCommentModelsCollection;
import org.alfresco.rest.model.RestNodeAssocTargetModel;
import org.alfresco.rest.model.RestNodeAssociationModel;
import org.alfresco.rest.model.RestNodeAssociationModelCollection;
import org.alfresco.rest.model.RestNodeAssociationTypeModel;
import org.alfresco.rest.model.RestNodeBodyModel;
import org.alfresco.rest.model.RestNodeBodyMoveCopyModel;
import org.alfresco.rest.model.RestNodeChildAssocModelCollection;
import org.alfresco.rest.model.RestNodeChildAssociationModel;
import org.alfresco.rest.model.RestNodeModel;
import org.alfresco.rest.model.RestNodeModelsCollection;
import org.alfresco.rest.model.RestRatingModel;
import org.alfresco.rest.model.RestRatingModelsCollection;
import org.alfresco.rest.model.RestRenditionInfoModel;
import org.alfresco.rest.model.RestRenditionInfoModelCollection;
import org.alfresco.rest.model.RestRuleExecutionModel;
import org.alfresco.rest.model.RestRuleSetLinkModel;
import org.alfresco.rest.model.RestRuleSetModel;
import org.alfresco.rest.model.RestRuleSetModelsCollection;
import org.alfresco.rest.model.RestTagModel;
import org.alfresco.rest.model.RestTagModelsCollection;
import org.alfresco.rest.model.RestVersionModel;
import org.alfresco.rest.model.RestVersionModelsCollection;
import org.alfresco.rest.model.*;
import org.alfresco.rest.model.body.RestNodeLockBodyModel;
import org.alfresco.rest.model.builder.NodesBuilder;
import org.alfresco.utility.Utility;
import org.alfresco.utility.model.RepoTestModel;
import org.apache.commons.lang3.StringUtils;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.testng.reporters.Files;
/**
* Declares all Rest API under the /nodes path
@@ -294,6 +269,7 @@ public class Node extends ModelRequest<Node>
/**
*
* Get fivestar rating of a document using GET call on "nodes/{nodeId}/ratings/{ratingId}"
*
* @return
*/
public RestRatingModel getFiveStarRating()
@@ -336,7 +312,6 @@ public class Node extends ModelRequest<Node>
return restWrapper.processModels(RestTagModelsCollection.class, request);
}
/**
* Deletes a tag for a specific content node using DELETE call on nodes/{nodeId}/tags/{tagId}
*
@@ -381,6 +356,7 @@ public class Node extends ModelRequest<Node>
* You need to specify first the multipart call {@link RestWrapper#usingMultipartFile(java.io.File)}
*
* <code>usingMultipartFile(new File("your-local-file.txt")).withCoreAPI().usingNode(ContentModel.my()).createNode();</code>
*
* @return
*/
public RestNodeModel createNode()
@@ -415,7 +391,8 @@ public class Node extends ModelRequest<Node>
/**
* Create node rendition using POST call on '/nodes/{nodeId}/renditions'
*
* @param renditionId id of rendition to be created
* @param renditionId
* id of rendition to be created
* @return
*/
public void createNodeRendition(String renditionId)
@@ -428,8 +405,10 @@ public class Node extends ModelRequest<Node>
/**
* Create node version rendition using POST call on '/nodes/{nodeId}/versions/{versionId}/renditions'
*
* @param renditionId id of rendition to be created
* @param versionId version id of node
* @param renditionId
* id of rendition to be created
* @param versionId
* version id of node
* @return
*/
public void createNodeVersionRendition(String renditionId, String versionId)
@@ -442,10 +421,10 @@ public class Node extends ModelRequest<Node>
}
/**
* Check if specified rendition exists and if not
* create node rendition using POST call on '/nodes/{nodeId}/renditions'
* Check if specified rendition exists and if not create node rendition using POST call on '/nodes/{nodeId}/renditions'
*
* @param renditionId id of rendition to be created
* @param renditionId
* id of rendition to be created
* @return
*/
public void createNodeRenditionIfNotExists(String renditionId)
@@ -460,9 +439,10 @@ public class Node extends ModelRequest<Node>
}
/**
* Get node rendition using GET call on '/nodes/{nodeId}/renditions/{renditionId}
* Get node rendition using GET call on '/nodes/{nodeId}/renditions/{renditionId}
*
* @param renditionId id of rendition to be retrieved
* @param renditionId
* id of rendition to be retrieved
* @return
*/
public RestRenditionInfoModel getNodeRendition(String renditionId)
@@ -474,8 +454,10 @@ public class Node extends ModelRequest<Node>
/**
* Get node version rendition using GET call on '/nodes/{nodeId}/versions/{versionId}renditions/{renditionId}
*
* @param renditionId id of rendition to be retrieved
* @param versionId versionId of the node
* @param renditionId
* id of rendition to be retrieved
* @param versionId
* versionId of the node
* @return
*/
public RestRenditionInfoModel getNodeVersionRendition(String renditionId, String versionId)
@@ -487,8 +469,7 @@ public class Node extends ModelRequest<Node>
}
/**
* Get node rendition using GET call on 'nodes/{nodeId}/renditions/{renditionId} Please note that it retries to get
* the renditions response several times because on the alfresco server the rendition can take a while to be created.
* Get node rendition using GET call on 'nodes/{nodeId}/renditions/{renditionId} Please note that it retries to get the renditions response several times because on the alfresco server the rendition can take a while to be created.
*
* @return
*/
@@ -510,8 +491,7 @@ public class Node extends ModelRequest<Node>
}
/**
* Get node version rendition using GET call on 'nodes/{nodeId}/versions/{versionId}/renditions/{renditionId} Please note that it retries to get
* the renditions response several times because on the alfresco server the rendition can take a while to be created.
* Get node version rendition using GET call on 'nodes/{nodeId}/versions/{versionId}/renditions/{renditionId} Please note that it retries to get the renditions response several times because on the alfresco server the rendition can take a while to be created.
*
* @return
*/
@@ -533,10 +513,7 @@ public class Node extends ModelRequest<Node>
}
/**
* Get node rendition content using GET call on
* 'nodes/{nodeId}/renditions/{renditionId}/content Please note that it
* retries to get the renditions response several times because on the
* alfresco server the rendition can take a while to be created.
* Get node rendition content using GET call on 'nodes/{nodeId}/renditions/{renditionId}/content Please note that it retries to get the renditions response several times because on the alfresco server the rendition can take a while to be created.
*
* @return
*/
@@ -546,7 +523,7 @@ public class Node extends ModelRequest<Node>
renditionId);
RestResponse response = restWrapper.process(request);
int retry = 0;
//Multiplied by '8' because AI rendition test cases need more time (~30 seconds) - see ACS-2158
// Multiplied by '8' because AI rendition test cases need more time (~30 seconds) - see ACS-2158
while (!Integer.valueOf(response.getStatusCode()).equals(HttpStatus.OK.value()) && retry < (8 * Utility.retryCountSeconds))
{
Utility.waitToLoopTime(1);
@@ -558,10 +535,7 @@ public class Node extends ModelRequest<Node>
}
/**
* Get node version rendition content using GET call on
* 'nodes/{nodeId}/versions/{versionId}/renditions/{renditionId}/content Please note that it
* retries to get the renditions response several times because on the
* alfresco server the rendition can take a while to be created.
* Get node version rendition content using GET call on 'nodes/{nodeId}/versions/{versionId}/renditions/{renditionId}/content Please note that it retries to get the renditions response several times because on the alfresco server the rendition can take a while to be created.
*
* @return
*/
@@ -582,8 +556,7 @@ public class Node extends ModelRequest<Node>
}
/**
* Get node rendition content using GET call on
* 'nodes/{nodeId}/renditions/{renditionId}/content
* Get node rendition content using GET call on 'nodes/{nodeId}/renditions/{renditionId}/content
*
* @return
*/
@@ -595,8 +568,7 @@ public class Node extends ModelRequest<Node>
}
/**
* Get node version rendition content using GET call on
* 'nodes/{nodeId}/versions/{versionId}/renditions/{renditionId}/content
* Get node version rendition content using GET call on 'nodes/{nodeId}/versions/{versionId}/renditions/{renditionId}/content
*
* @return
*/
@@ -608,8 +580,8 @@ public class Node extends ModelRequest<Node>
}
/**
* Get rendition information for available renditions for the node using GET call on
* 'nodes/{nodeId}/renditions'
* Get rendition information for available renditions for the node using GET call on 'nodes/{nodeId}/renditions'
*
* @return
*/
public RestRenditionInfoModelCollection getNodeRenditionsInfo()
@@ -620,8 +592,8 @@ public class Node extends ModelRequest<Node>
}
/**
* Get rendition information for available renditions for the node version using GET call on
* 'nodes/{nodeId}/versions/{versionId}/renditions'
* Get rendition information for available renditions for the node version using GET call on 'nodes/{nodeId}/versions/{versionId}/renditions'
*
* @return
*/
public RestRenditionInfoModelCollection getNodeVersionRenditionsInfo(String versionId)
@@ -631,11 +603,11 @@ public class Node extends ModelRequest<Node>
return restWrapper.processModels(RestRenditionInfoModelCollection.class, request);
}
/**
* Delete the rendition identified by renditionId using DELETE call on "/nodes/{nodeId}/renditions/{renditionId}"
*
* @param renditionId id of rendition to delete
* @param renditionId
* id of rendition to delete
*/
public void deleteNodeRendition(String renditionId)
{
@@ -657,7 +629,8 @@ public class Node extends ModelRequest<Node>
/**
* Move a node to a target folder
*
* @param moveBody a {@link RestNodeBodyMoveCopyModel} containing at least the target parent id
* @param moveBody
* a {@link RestNodeBodyMoveCopyModel} containing at least the target parent id
* @return the moved node's new information
*/
public RestNodeModel move(RestNodeBodyMoveCopyModel moveBody)
@@ -669,7 +642,8 @@ public class Node extends ModelRequest<Node>
/**
* Copy a node to a target folder
*
* @param copyBody a {@link RestNodeBodyMoveCopyModel} containing at least the target parent id
* @param copyBody
* a {@link RestNodeBodyMoveCopyModel} containing at least the target parent id
* @return the moved node's new information
*/
public RestNodeModel copy(RestNodeBodyMoveCopyModel copyBody)
@@ -679,7 +653,6 @@ public class Node extends ModelRequest<Node>
return restWrapper.processModel(RestNodeModel.class, request);
}
/**
* Lock a specific node using POST call on "nodes/{nodeId}/lock"
*
@@ -747,8 +720,7 @@ public class Node extends ModelRequest<Node>
}
/**
* Delete a target for a specific node using DELETE call on
* nodes/{nodeId}/targets/{targetId}
* Delete a target for a specific node using DELETE call on nodes/{nodeId}/targets/{targetId}
*
* @param target
*/
@@ -830,7 +802,8 @@ public class Node extends ModelRequest<Node>
/**
* Creates a secondary child association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param secondaryChild - node, which should become a secondary child
* @param secondaryChild
* - node, which should become a secondary child
* @return a node's parent-child association
*/
public RestNodeChildAssociationModel addSecondaryChild(RepoTestModel secondaryChild)
@@ -841,8 +814,10 @@ public class Node extends ModelRequest<Node>
/**
* Creates a secondary child association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param associationType - type of secondary parent-child relationship association
* @param secondaryChild - node, which should become a secondary child
* @param associationType
* - type of secondary parent-child relationship association
* @param secondaryChild
* - node, which should become a secondary child
* @return a node's parent-child association
*/
public RestNodeChildAssociationModel addSecondaryChild(String associationType, RepoTestModel secondaryChild)
@@ -853,7 +828,8 @@ public class Node extends ModelRequest<Node>
/**
* Creates a secondary child association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param secondaryChildAssociation - node's secondary parent-child association model
* @param secondaryChildAssociation
* - node's secondary parent-child association model
* @return a node's parent-child association
*/
public RestNodeChildAssociationModel addSecondaryChild(RestNodeChildAssociationModel secondaryChildAssociation)
@@ -865,7 +841,8 @@ public class Node extends ModelRequest<Node>
/**
* Creates a secondary children association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param secondaryChildren - nodes, which should become secondary children
* @param secondaryChildren
* - nodes, which should become secondary children
* @return a collection of node's parent-child associations
*/
public RestNodeChildAssocModelCollection addSecondaryChildren(RepoTestModel... secondaryChildren)
@@ -876,21 +853,24 @@ public class Node extends ModelRequest<Node>
/**
* Creates a secondary children association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param associationType - type of secondary parent-child relationship association
* @param secondaryChildren - nodes, which should become secondary children
* @param associationType
* - type of secondary parent-child relationship association
* @param secondaryChildren
* - nodes, which should become secondary children
* @return a collection of node's parent-child associations
*/
public RestNodeChildAssocModelCollection addSecondaryChildren(String associationType, RepoTestModel... secondaryChildren)
{
return addSecondaryChildren(Stream.of(secondaryChildren)
.map(child -> new RestNodeChildAssociationModel(child.getNodeRef(), associationType))
.toArray(RestNodeChildAssociationModel[]::new));
.map(child -> new RestNodeChildAssociationModel(child.getNodeRef(), associationType))
.toArray(RestNodeChildAssociationModel[]::new));
}
/**
* Creates a secondary children association using POST call to: 'nodes/{nodeId}/secondary-children'.
*
* @param secondaryChildrenAssociations - node's secondary parent-child association models
* @param secondaryChildrenAssociations
* - node's secondary parent-child association models
* @return a collection of node's parent-child associations
*/
public RestNodeChildAssocModelCollection addSecondaryChildren(RestNodeChildAssociationModel... secondaryChildrenAssociations)
@@ -903,7 +883,8 @@ public class Node extends ModelRequest<Node>
/**
* Removes secondary child association using DELETE call 'nodes/{nodeId}/secondary-children/{childId}'.
*
* @param secondaryChild - node, which should NOT be a secondary child anymore
* @param secondaryChild
* - node, which should NOT be a secondary child anymore
*/
public void removeSecondaryChild(RepoTestModel secondaryChild)
{
@@ -913,8 +894,10 @@ public class Node extends ModelRequest<Node>
/**
* Removes secondary child association using DELETE call 'nodes/{nodeId}/secondary-children/{childId}'.
*
* @param associationType - type of secondary parent-child relationship association
* @param secondaryChild - node, which should NOT be a secondary child anymore
* @param associationType
* - type of secondary parent-child relationship association
* @param secondaryChild
* - node, which should NOT be a secondary child anymore
*/
public void removeSecondaryChild(String associationType, RepoTestModel secondaryChild)
{
@@ -932,13 +915,12 @@ public class Node extends ModelRequest<Node>
/**
* Removes secondary child association using DELETE call 'nodes/{nodeId}/secondary-children/{childId}'.
*
* @param secondaryChildAssociation - node's secondary parent-child association to remove
* @param secondaryChildAssociation
* - node's secondary parent-child association to remove
*/
public void removeSecondaryChild(RestNodeAssociationModel secondaryChildAssociation)
{
String parameters = StringUtils.isNotEmpty(secondaryChildAssociation.getAssociation().getAssocType()) ?
"assocType=" + secondaryChildAssociation.getAssociation().getAssocType() + "&" + restWrapper.getParameters() :
restWrapper.getParameters();
String parameters = StringUtils.isNotEmpty(secondaryChildAssociation.getAssociation().getAssocType()) ? "assocType=" + secondaryChildAssociation.getAssociation().getAssocType() + "&" + restWrapper.getParameters() : restWrapper.getParameters();
RestRequest request = RestRequest.simpleRequest(HttpMethod.DELETE, "nodes/{nodeId}/secondary-children/{childId}?{parameters}", repoModel.getNodeRef(), secondaryChildAssociation.getId(), parameters);
restWrapper.processEmptyModel(request);
}
@@ -1013,7 +995,6 @@ public class Node extends ModelRequest<Node>
deleteNode(nodeModel.getId());
}
/**
* Delete a specific node using DELETE call on nodes/{nodeId}
*
@@ -1035,6 +1016,7 @@ public class Node extends ModelRequest<Node>
/**
* Get Direct Access URL for a node
*
* @param postBody
* @return
*/
@@ -1055,6 +1037,7 @@ public class Node extends ModelRequest<Node>
/**
* Get Direct Access URL for a specific node rendition E.g "pdf"
*
* @param renditionId
* @return
*/
@@ -1067,6 +1050,7 @@ public class Node extends ModelRequest<Node>
/**
* Get Direct Access URL for a specific node version. E.g "1.1"
*
* @param versionId
* @return
*/
@@ -1079,6 +1063,7 @@ public class Node extends ModelRequest<Node>
/**
* Get Direct Access URL for a specific node version rendition. E.g ("1.1", "pdf")
*
* @param versionId
* @param renditionId
* @return
@@ -1132,7 +1117,8 @@ public class Node extends ModelRequest<Node>
/**
* Get the specified rule set from a folder.
*
* @param ruleSetId The id of the rule set.
* @param ruleSetId
* The id of the rule set.
* @return The specified rule set.
*/
public RestRuleSetModel getRuleSet(String ruleSetId)
@@ -1145,7 +1131,8 @@ public class Node extends ModelRequest<Node>
/**
* Update a rule set on this folder - for example to reorder the rules.
*
* @param ruleSet The updated rule set.
* @param ruleSet
* The updated rule set.
* @return The updated rule set returned by the server.
*/
public RestRuleSetModel updateRuleSet(RestRuleSetModel ruleSet)
@@ -1188,7 +1175,8 @@ public class Node extends ModelRequest<Node>
/**
* Try to delete a ruleset link performing a DELETE call on "/nodes/{folderNodeId}/rule-set-links/{rulesetId}"
*
* @param ruleSetId the id of the ruleset to be unlinked from the folder
* @param ruleSetId
* the id of the ruleset to be unlinked from the folder
* @return
*/
public void unlinkRuleSet(String ruleSetId)
@@ -1200,7 +1188,8 @@ public class Node extends ModelRequest<Node>
/**
* Trigger rules on a folder performing POST call on "/nodes/{folderNodeId}/rule-executions"
*
* @param body - rules execution request
* @param body
* - rules execution request
* @return execution result
*/
public RestRuleExecutionModel executeRules(RestRuleExecutionModel body)
@@ -1223,7 +1212,8 @@ public class Node extends ModelRequest<Node>
/**
* Link content to category performing POST call on "/nodes/{nodeId}/category-links"
*
* @param categoryLink - contains category ID
* @param categoryLink
* - contains category ID
* @return linked to category
*/
public RestCategoryModel linkToCategory(RestCategoryLinkBodyModel categoryLink)
@@ -1235,7 +1225,8 @@ public class Node extends ModelRequest<Node>
/**
* Link content to many categories performing POST call on "/nodes/{nodeId}/category-links"
*
* @param categoryLinks - contains categories IDs
* @param categoryLinks
* - contains categories IDs
* @return linked to categories
*/
public RestCategoryModelsCollection linkToCategories(List<RestCategoryLinkBodyModel> categoryLinks)
@@ -1247,11 +1238,34 @@ public class Node extends ModelRequest<Node>
/**
* Unlink content from a category performing a DELETE call on "nodes/{nodeId}/category-links/{categoryId}"
*
* @param categoryId the id of the category to be unlinked from content
* @param categoryId
* the id of the category to be unlinked from content
*/
public void unlinkFromCategory(String categoryId)
{
RestRequest request = RestRequest.simpleRequest(HttpMethod.DELETE, "nodes/{nodeId}/category-links/{categoryId}", repoModel.getNodeRef(), categoryId);
restWrapper.processEmptyModel(request);
}
/**
* In order to retrieve folder size details using POST call on "nodes/{nodeId}/size-details"
*
* @return
*/
public RestSizeDetailsModel executeSizeDetails()
{
RestRequest request = RestRequest.simpleRequest(HttpMethod.POST, "nodes/{nodeId}/size-details", repoModel.getNodeRef());
return restWrapper.processModel(RestSizeDetailsModel.class, request);
}
/**
* Getting Folder size details using GET call on "nodes/{nodeId}/size-details/{jobId}"
*
* @return
*/
public RestSizeDetailsModel getSizeDetails(String jobId)
{
RestRequest request = RestRequest.simpleRequest(HttpMethod.GET, "nodes/{nodeId}/size-details/{jobId}", repoModel.getNodeRef(), jobId);
return restWrapper.processModel(RestSizeDetailsModel.class, request);
}
}

View File

@@ -2,6 +2,11 @@ package org.alfresco.rest.favorites;
import java.util.List;
import org.hamcrest.Matchers;
import org.springframework.http.HttpStatus;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.alfresco.dataprep.CMISUtil.DocumentType;
import org.alfresco.rest.RestTest;
import org.alfresco.rest.model.RestErrorModel;
@@ -20,14 +25,11 @@ import org.alfresco.utility.model.TestGroup;
import org.alfresco.utility.model.UserModel;
import org.alfresco.utility.testrail.ExecutionType;
import org.alfresco.utility.testrail.annotation.TestRail;
import org.hamcrest.Matchers;
import org.springframework.http.HttpStatus;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
public class GetFavoritesTests extends RestTest
{
private static final String ALLOWABLE_OPERATIONS = "allowableOperations";
private static final String ASPECT_NAMES = "aspectNames";
private UserModel adminUserModel, userModel;
private SiteModel firstSiteModel;
private SiteModel secondSiteModel;
@@ -56,7 +58,7 @@ public class GetFavoritesTests extends RestTest
firstSiteUsers = dataUser.addUsersWithRolesToSite(firstSiteModel, UserRole.SiteManager, UserRole.SiteCollaborator, UserRole.SiteConsumer,
UserRole.SiteContributor);
secondSiteUsers = dataUser.addUsersWithRolesToSite(secondSiteModel, UserRole.SiteManager, UserRole.SiteCollaborator, UserRole.SiteConsumer,
UserRole.SiteContributor);
UserRole.SiteContributor);
restClient.authenticateUser(userModel);
restClient.withCoreAPI().usingUser(userModel).addSiteToFavorites(firstSiteModel);
@@ -64,9 +66,9 @@ public class GetFavoritesTests extends RestTest
restClient.withCoreAPI().usingUser(userModel).addSiteToFavorites(thirdSiteModel);
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.SANITY,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.SANITY,
description = "Verify Manager user gets favorites with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.SANITY })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.SANITY})
public void managerIsAbleToRetrieveFavorites()
{
restClient.authenticateUser(firstSiteUsers.getOneUserWithRole(UserRole.SiteManager));
@@ -79,10 +81,10 @@ public class GetFavoritesTests extends RestTest
.assertThat().entriesListContains("targetGuid", secondSiteModel.getGuid());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.SANITY,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.SANITY,
description = "Verify user gets status code 401 if authentication call fails")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.SANITY })
// @Bug(id = "MNT-16904", description = "It fails only on environment with tenants")
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.SANITY})
// @Bug(id = "MNT-16904", description = "It fails only on environment with tenants")
public void userIsNotAbleToRetrieveFavoritesIfAuthenticationFails()
{
UserModel siteManager = firstSiteUsers.getOneUserWithRole(UserRole.SiteManager);
@@ -91,9 +93,9 @@ public class GetFavoritesTests extends RestTest
restClient.assertStatusCodeIs(HttpStatus.UNAUTHORIZED);
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify Admin user gets favorites sites with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void adminIsAbleToRetrieveFavoritesSites()
{
restClient.authenticateUser(adminUserModel).withCoreAPI().usingUser(adminUserModel).addSiteToFavorites(firstSiteModel);
@@ -103,12 +105,12 @@ public class GetFavoritesTests extends RestTest
.usingAuthUser().where().targetSiteExist().getFavorites();
restClient.assertStatusCodeIs(HttpStatus.OK);
userFavorites.assertThat().entriesListContains("targetGuid", firstSiteModel.getGuid())
.and().entriesListContains("targetGuid", secondSiteModel.getGuid());
.and().entriesListContains("targetGuid", secondSiteModel.getGuid());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify Admin user gets favorites folders with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void adminIsAbleToRetrieveFavoritesFolders()
{
restClient.authenticateUser(adminUserModel).withCoreAPI().usingUser(adminUserModel).addFolderToFavorites(firstFolderModel);
@@ -118,12 +120,12 @@ public class GetFavoritesTests extends RestTest
.usingAuthUser().where().targetFolderExist().getFavorites();
restClient.assertStatusCodeIs(HttpStatus.OK);
userFavorites.assertThat().entriesListContains("targetGuid", firstFolderModel.getNodeRef())
.and().entriesListContains("targetGuid", secondFolderModel.getNodeRef());
.and().entriesListContains("targetGuid", secondFolderModel.getNodeRef());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify Admin user gets favorites files with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void adminIsAbleToRetrieveFavoritesFiles()
{
restClient.authenticateUser(adminUserModel).withCoreAPI().usingUser(adminUserModel).addFileToFavorites(firstFileModel);
@@ -136,87 +138,87 @@ public class GetFavoritesTests extends RestTest
.and().entriesListContains("targetGuid", secondFileModel.getNodeRefWithoutVersion());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify Collaborator user gets favorites with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void collaboratorIsAbleToRetrieveFavorites()
{
restClient.authenticateUser(firstSiteUsers.getOneUserWithRole(UserRole.SiteCollaborator))
.withCoreAPI().usingAuthUser().addSiteToFavorites(firstSiteModel);
.withCoreAPI().usingAuthUser().addSiteToFavorites(firstSiteModel);
restClient.withCoreAPI().usingAuthUser().addSiteToFavorites(secondSiteModel);
userFavorites = restClient.withCoreAPI().usingAuthUser().getFavorites();
restClient.assertStatusCodeIs(HttpStatus.OK);
userFavorites.assertThat().entriesListContains("targetGuid", firstSiteModel.getGuid())
.and().entriesListContains("targetGuid", secondSiteModel.getGuid());
.and().entriesListContains("targetGuid", secondSiteModel.getGuid());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify Contributor user gets favorites with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void contributorIsAbleToRetrieveFavorites()
{
restClient.authenticateUser(firstSiteUsers.getOneUserWithRole(UserRole.SiteContributor))
.withCoreAPI().usingAuthUser().addSiteToFavorites(firstSiteModel);
.withCoreAPI().usingAuthUser().addSiteToFavorites(firstSiteModel);
restClient.withCoreAPI().usingAuthUser().addSiteToFavorites(secondSiteModel);
userFavorites = restClient.withCoreAPI().usingAuthUser().getFavorites();
restClient.assertStatusCodeIs(HttpStatus.OK);
userFavorites.assertThat().entriesListContains("targetGuid", firstSiteModel.getGuid()).and()
.entriesListContains("targetGuid", secondSiteModel.getGuid());
.entriesListContains("targetGuid", secondSiteModel.getGuid());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify Consumer user gets favorites with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void consumerIsAbleToRetrieveFavorites()
{
restClient.authenticateUser(firstSiteUsers.getOneUserWithRole(UserRole.SiteConsumer))
.withCoreAPI().usingAuthUser().addSiteToFavorites(firstSiteModel);
.withCoreAPI().usingAuthUser().addSiteToFavorites(firstSiteModel);
restClient.withCoreAPI().usingAuthUser().addSiteToFavorites(secondSiteModel);
userFavorites = restClient.withCoreAPI().usingAuthUser().getFavorites();
restClient.assertStatusCodeIs(HttpStatus.OK);
userFavorites.assertThat().entriesListContains("targetGuid", firstSiteModel.getGuid())
.assertThat().entriesListContains("targetGuid", secondSiteModel.getGuid());
.assertThat().entriesListContains("targetGuid", secondSiteModel.getGuid());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify user doesn't have permission to get favorites of another user with Rest API and status code is 404")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsNotAbleToRetrieveFavoritesOfAnotherUser()
{
restClient.authenticateUser(firstSiteUsers.getOneUserWithRole(UserRole.SiteConsumer))
.withCoreAPI().usingUser(firstSiteUsers.getOneUserWithRole(UserRole.SiteCollaborator)).getFavorites();
.withCoreAPI().usingUser(firstSiteUsers.getOneUserWithRole(UserRole.SiteCollaborator)).getFavorites();
restClient.assertStatusCodeIs(HttpStatus.NOT_FOUND)
.assertLastError().containsSummary(String.format(RestErrorModel.ENTITY_NOT_FOUND, firstSiteUsers.getOneUserWithRole(UserRole.SiteCollaborator).getUsername()));
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify user doesn't have permission to get favorites of admin user with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsNotAbleToRetrieveFavoritesOfAdminUser()
{
restClient.authenticateUser(firstSiteUsers.getOneUserWithRole(UserRole.SiteConsumer)).withCoreAPI()
.usingUser(adminUserModel).getFavorites();
.usingUser(adminUserModel).getFavorites();
restClient.assertStatusCodeIs(HttpStatus.NOT_FOUND)
.assertLastError().containsSummary(String.format(RestErrorModel.ENTITY_NOT_FOUND, adminUserModel.getUsername()));
.assertLastError().containsSummary(String.format(RestErrorModel.ENTITY_NOT_FOUND, adminUserModel.getUsername()));
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify admin user doesn't have permission to get favorites of another user with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void adminIsNotAbleToRetrieveFavoritesOfAnotherUser()
{
restClient.authenticateUser(adminUserModel).withCoreAPI().usingUser(firstSiteUsers.getOneUserWithRole(UserRole.SiteCollaborator))
.getFavorites();
.getFavorites();
restClient.assertStatusCodeIs(HttpStatus.NOT_FOUND)
.assertLastError().containsSummary(String.format(RestErrorModel.ENTITY_NOT_FOUND, firstSiteUsers.getOneUserWithRole(UserRole.SiteCollaborator).getUsername()));
.assertLastError().containsSummary(String.format(RestErrorModel.ENTITY_NOT_FOUND, firstSiteUsers.getOneUserWithRole(UserRole.SiteCollaborator).getUsername()));
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify User gets only favorites sites with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsAbleToRetrieveOnlyFavoritesSites()
{
restClient.authenticateUser(secondSiteUsers.getOneUserWithRole(UserRole.SiteManager));
@@ -232,9 +234,9 @@ public class GetFavoritesTests extends RestTest
.and().entriesListDoesNotContain("targetGuid", firstFolderModel.getNodeRef());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify User gets only favorites files with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsAbleToRetrieveOnlyFavoritesFiles()
{
restClient.authenticateUser(firstSiteUsers.getOneUserWithRole(UserRole.SiteCollaborator));
@@ -250,9 +252,9 @@ public class GetFavoritesTests extends RestTest
.and().entriesListDoesNotContain("targetGuid", firstFolderModel.getNodeRef());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify User gets only favorites folders with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsAbleToRetrieveOnlyFavoritesFolders()
{
restClient.authenticateUser(firstSiteUsers.getOneUserWithRole(UserRole.SiteCollaborator));
@@ -268,9 +270,9 @@ public class GetFavoritesTests extends RestTest
.and().entriesListDoesNotContain("targetGuid", firstFileModel.getNodeRefWithoutVersion());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify User gets only favorites files or folders with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsAbleToRetrieveFavoritesFilesOrFolders()
{
restClient.authenticateUser(firstSiteUsers.getOneUserWithRole(UserRole.SiteConsumer));
@@ -289,9 +291,9 @@ public class GetFavoritesTests extends RestTest
.and().entriesListDoesNotContain("targetGuid", firstSiteModel.getGuid());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify User gets only favorites files or sites with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsAbleToRetrieveFavoritesFilesOrSites()
{
restClient.authenticateUser(secondSiteUsers.getOneUserWithRole(UserRole.SiteManager));
@@ -310,9 +312,9 @@ public class GetFavoritesTests extends RestTest
.and().entriesListDoesNotContain("targetGuid", firstFolderModel.getNodeRef());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify User gets only favorites folders or sites with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsAbleToRetrieveFavoritesFoldersOrSites()
{
restClient.authenticateUser(secondSiteUsers.getOneUserWithRole(UserRole.SiteManager));
@@ -331,9 +333,9 @@ public class GetFavoritesTests extends RestTest
.and().entriesListDoesNotContain("targetGuid", firstFileModel.getNodeRefWithoutVersion());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify User gets all favorites with Rest API and status code is 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsAbleToRetrieveAllFavorites()
{
restClient.authenticateUser(secondSiteUsers.getOneUserWithRole(UserRole.SiteCollaborator));
@@ -362,9 +364,9 @@ public class GetFavoritesTests extends RestTest
.and().entriesListDoesNotContain("targetGuid", secondFileModel.getNodeRefWithoutVersion());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify request for a user with no favorites returns status 200")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userHasNoFavorites()
{
restClient.authenticateUser(secondSiteUsers.getOneUserWithRole(UserRole.SiteContributor));
@@ -374,9 +376,9 @@ public class GetFavoritesTests extends RestTest
userFavorites.assertThat().entriesListIsEmpty().and().paginationField("totalItems").is("0");
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify request using invalid where parameter returns status 400")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void getFavoritesUsingInvalidWhereParameter()
{
restClient.authenticateUser(firstSiteUsers.getOneUserWithRole(UserRole.SiteConsumer));
@@ -390,9 +392,9 @@ public class GetFavoritesTests extends RestTest
.assertLastError().containsSummary(String.format(RestErrorModel.INVALID_ARGUMENT, "WHERE query"));
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify User gets correct favorites after deleting a favorite")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void checkFavoriteFolderIsRemoved()
{
restClient.authenticateUser(firstSiteUsers.getOneUserWithRole(UserRole.SiteManager));
@@ -408,9 +410,9 @@ public class GetFavoritesTests extends RestTest
.and().paginationField("totalItems").is("2");
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify get favorites specifying -me- string in place of <personid> for request")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsAbleToGetFavoritesWhenUsingMeAsUsername()
{
userFavorites = restClient.authenticateUser(userModel).withCoreAPI().usingMe().getFavorites();
@@ -418,9 +420,9 @@ public class GetFavoritesTests extends RestTest
userFavorites.assertThat().entriesListContains("targetGuid", firstSiteModel.getGuid()).and().entriesListContains("targetGuid", secondSiteModel.getGuid());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify get favorites using empty for where parameter for request")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsAbleToGetFavoritesWhenUsingEmptyWhereParameter()
{
userFavorites = restClient.authenticateUser(adminUserModel).withCoreAPI().usingAuthUser().where().getFavorites();
@@ -428,27 +430,27 @@ public class GetFavoritesTests extends RestTest
.assertLastError().containsSummary(String.format(RestErrorModel.INVALID_ARGUMENT, "WHERE query"));
}
@TestRail(section = { TestGroup.REST_API,TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify that for invalid maxItems parameter status code returned is 400.")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void checkInvalidMaxItemsStatusCode()
{
restClient.authenticateUser(adminUserModel).withParams("maxItems=AB").withCoreAPI().usingUser(adminUserModel).getFavorites();
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST).assertLastError().containsSummary("Invalid paging parameter");
}
@TestRail(section = { TestGroup.REST_API,TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify that for invalid skipCount parameter status code returned is 400.")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void checkInvalidSkipCountStatusCode()
{
restClient.authenticateUser(adminUserModel).withParams("skipCount=AB").withCoreAPI().usingUser(adminUserModel).getFavorites();
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST).assertLastError().containsSummary("Invalid paging parameter");
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify get favorites when using invalid network id for non-tenant user")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void getFavoritesWhenNetworkIdIsInvalid()
{
UserModel networkUserModel = dataUser.createRandomTestUser();
@@ -457,9 +459,9 @@ public class GetFavoritesTests extends RestTest
restClient.assertStatusCodeIs(HttpStatus.NOT_FOUND).assertLastError().containsSummary(String.format(RestErrorModel.ENTITY_NOT_FOUND, networkUserModel.getUsername()));
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify get favorites using AND instead of OR in where parameter for request")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsNotAbleToGetFavoritesWhenUsingANDInWhereParameter()
{
userFavorites = restClient.withCoreAPI().usingAuthUser().where().targetFolderExist().invalidWhereParameter("AND").targetFileExist().getFavorites();
@@ -467,9 +469,9 @@ public class GetFavoritesTests extends RestTest
.assertLastError().containsSummary(String.format(RestErrorModel.INVALID_ARGUMENT, "WHERE query"));
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify get favorites using wrong name instead of EXISTS in where parameter for request")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsNotAbleToGetFavoritesWhenUsingWrongWhereParameter()
{
userFavorites = restClient.withCoreAPI().usingAuthUser().where().invalidWhereParameter("EXIST((target/site))").targetFileExist().getFavorites();
@@ -477,9 +479,9 @@ public class GetFavoritesTests extends RestTest
.assertLastError().containsSummary(String.format(RestErrorModel.INVALID_ARGUMENT, "WHERE query"));
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify get favorites except the first one for request")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsAbleToGetFavoritesExceptTheFirstOne()
{
userFavorites = restClient.authenticateUser(userModel).withParams("skipCount=1").withCoreAPI().usingUser(userModel).getFavorites();
@@ -490,9 +492,9 @@ public class GetFavoritesTests extends RestTest
.and().entriesListDoesNotContain("targetGuid", thirdSiteModel.getGuid());
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify get first two favorites sites")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsAbleToGetFirstTwoFavorites()
{
userFavorites = restClient.authenticateUser(userModel).withParams("maxItems=2").withCoreAPI().usingUser(userModel).getFavorites();
@@ -506,9 +508,9 @@ public class GetFavoritesTests extends RestTest
.and().field("count").is("2");
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify get favorites sites when using empty values for skipCount and maxItems")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsAbleToGetFavoritesWhenSkipCountAndMaxItemsAreEmpty()
{
restClient.authenticateUser(userModel).withParams("skipCount= ").withCoreAPI().usingUser(userModel).getFavorites();
@@ -518,9 +520,9 @@ public class GetFavoritesTests extends RestTest
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST).assertLastError().containsSummary(String.format(RestErrorModel.INVALID_MAXITEMS, " "));
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify the get favorites request for a high value for skipCount parameter")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsAbleToGetFavoritesWithHighSkipCount()
{
userFavorites = restClient.authenticateUser(userModel).withParams("skipCount=999999999").withCoreAPI().usingUser(userModel).getFavorites();
@@ -529,9 +531,9 @@ public class GetFavoritesTests extends RestTest
userFavorites.assertThat().entriesListIsEmpty().assertThat().paginationField("skipCount").is("999999999");
}
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION,
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify the get favorites request with properties parameter applied")
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void userIsAbleToGetFavoritesWithPropertiesParamApplied()
{
userFavorites = restClient.authenticateUser(userModel).withParams("properties=targetGuid").withCoreAPI().usingUser(userModel).getFavorites();
@@ -541,8 +543,8 @@ public class GetFavoritesTests extends RestTest
restPersonFavoritesModel.assertThat().field("targetGuid").is(thirdSiteModel.getGuid()).and().field("createdAt").isNull();
}
@Test(groups = { TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION })
@TestRail(section = { TestGroup.REST_API, TestGroup.FAVORITES }, executionType = ExecutionType.REGRESSION, description = "Verify entry details for get favorites response with Rest API")
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION, description = "Verify entry details for get favorites response with Rest API")
public void checkResponseSchemaForGetFavorites()
{
userFavorites = restClient.authenticateUser(userModel).withCoreAPI().usingAuthUser().getFavorites();
@@ -564,8 +566,7 @@ public class GetFavoritesTests extends RestTest
description = "Verify if get favorites response returns allowableOperations object when requested")
public void checkResponsesForGetFavoritesWithAllowableOperations()
{
final RestPersonFavoritesModelsCollection adminFavorites =
restClient.authenticateUser(adminUserModel).withCoreAPI().usingAuthUser().include(ALLOWABLE_OPERATIONS).getFavorites();
final RestPersonFavoritesModelsCollection adminFavorites = restClient.authenticateUser(adminUserModel).withCoreAPI().usingAuthUser().include(ALLOWABLE_OPERATIONS).getFavorites();
restClient.assertStatusCodeIs(HttpStatus.OK);
adminFavorites.getEntries().stream()
@@ -576,18 +577,30 @@ public class GetFavoritesTests extends RestTest
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify the get favorites request with properties parameter applied")
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
public void checkSearchResponseContainsIsFavoriteWhenRequested() throws InterruptedException {
public void checkSearchResponseContainsIsFavoriteWhenRequested() throws InterruptedException
{
final SearchRequest query = new SearchRequest();
final RestRequestQueryModel queryReq = new RestRequestQueryModel();
queryReq.setQuery(firstFileModel.getName());
query.setQuery(queryReq);
query.setInclude(List.of("isFavorite"));
Utility.sleep(500, 60000, () ->
{
restClient.authenticateUser(adminUserModel).withSearchAPI().search(query);
restClient.onResponse().assertThat().body("list.entries.entry[0].isFavorite", Matchers.notNullValue());
}
);
Utility.sleep(500, 60000, () -> {
restClient.authenticateUser(adminUserModel).withSearchAPI().search(query);
restClient.onResponse().assertThat().body("list.entries.entry[0].isFavorite", Matchers.notNullValue());
});
}
@Test(groups = {TestGroup.REST_API, TestGroup.FAVORITES, TestGroup.REGRESSION})
@TestRail(section = {TestGroup.REST_API, TestGroup.FAVORITES}, executionType = ExecutionType.REGRESSION,
description = "Verify if get favorites response returns aspectNames when requested")
public void checkResponsesForGetFavoritesWithAspectNames()
{
final RestPersonFavoritesModelsCollection adminFavorites = restClient.authenticateUser(adminUserModel).withCoreAPI().usingAuthUser().include(ASPECT_NAMES).getFavorites();
restClient.assertStatusCodeIs(HttpStatus.OK);
adminFavorites.getEntries().stream()
.map(RestPersonFavoritesModel::onModel)
.forEach(m -> m.assertThat().field(ASPECT_NAMES).isNotEmpty());
}
}

View File

@@ -0,0 +1,380 @@
package org.alfresco.rest.nodes;
import static java.util.Objects.requireNonNull;
import static org.alfresco.utility.report.log.Step.STEP;
import java.io.IOException;
import java.io.InputStream;
import java.time.Duration;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.IntStream;
import org.apache.commons.lang3.RandomStringUtils;
import org.awaitility.Awaitility;
import org.awaitility.Durations;
import org.springframework.http.HttpStatus;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.alfresco.dataprep.CMISUtil.DocumentType;
import org.alfresco.rest.RestTest;
import org.alfresco.rest.model.RestSizeDetailsModel;
import org.alfresco.utility.Utility;
import org.alfresco.utility.model.*;
import org.alfresco.utility.testrail.ExecutionType;
import org.alfresco.utility.testrail.annotation.TestRail;
public class NodeSizeDetailsTests extends RestTest
{
private UserModel user1;
private SiteModel siteModel;
private FolderModel folder;
private String jobId;
private FileModel sampleFileToCreate;
private long sampleFileSize;
@BeforeClass(alwaysRun = true)
public void dataPreparation() throws IOException
{
user1 = dataUser.createRandomTestUser("User-1");
siteModel = dataSite.usingUser(user1).createPublicRandomSite();
folder = dataContent.usingUser(user1).usingSite(siteModel).createFolder(FolderModel.getRandomFolderModel());
String fileName = "sampleLargeContent.txt";
final byte[] sampleFileContent = getSampleFileContent(fileName);
sampleFileSize = sampleFileContent.length;
sampleFileToCreate = new FileModel(fileName, FileType.TEXT_PLAIN, new String(sampleFileContent));
}
/**
* calculateNodeSizeForSingleFile testcase
*/
@TestRail(section = {TestGroup.REST_API, TestGroup.NODES}, executionType = ExecutionType.SANITY)
@Test(groups = {TestGroup.REST_API, TestGroup.NODES, TestGroup.SANITY})
public void calculateNodeSizeForSingleFile() throws Exception
{
STEP("1. Create a folder in the test site.");
folder = dataContent.usingUser(user1).usingSite(siteModel).createFolder(FolderModel.getRandomFolderModel());
STEP("2. Upload a text document to the folder.");
dataContent.usingUser(user1)
.usingSite(siteModel)
.usingResource(folder)
.createContent(sampleFileToCreate);
STEP("3. Wait for 30 seconds so that the content is indexed in Search Service.");
Utility.waitToLoopTime(30);
RestSizeDetailsModel restSizeDetailsModel = restClient.authenticateUser(user1).withCoreAPI().usingNode(folder).executeSizeDetails();
restClient.assertStatusCodeIs(HttpStatus.ACCEPTED);
restSizeDetailsModel.assertThat().field("jobId").isNotEmpty();
jobId = restSizeDetailsModel.getJobId();
STEP("4. Wait for 5 seconds for the processing to complete.");
Awaitility
.await()
.atMost(Duration.ofSeconds(5))
.pollInterval(Durations.ONE_SECOND)
.ignoreExceptions()
.untilAsserted(() -> {
RestSizeDetailsModel sizeDetailsModel = restClient.authenticateUser(user1)
.withCoreAPI()
.usingNode(folder)
.getSizeDetails(jobId);
restClient.assertStatusCodeIs(HttpStatus.OK);
sizeDetailsModel.assertThat()
.field("sizeInBytes")
.isNotEmpty();
Assert.assertEquals(sizeDetailsModel.getSizeInBytes(), sampleFileSize,
"Value of sizeInBytes " + sizeDetailsModel.getSizeInBytes()
+ " is not equal to " + sampleFileSize);
});
}
/**
* checkJobIdPresentInCache testcase
*/
@TestRail(section = {TestGroup.REST_API, TestGroup.NODES}, executionType = ExecutionType.SANITY)
@Test(groups = {TestGroup.REST_API, TestGroup.NODES, TestGroup.SANITY})
public void checkJobIdPresentInCache() throws Exception
{
STEP("1. Verifying that same JobId is coming or not");
RestSizeDetailsModel restSizeDetailsModel = restClient.authenticateUser(user1).withCoreAPI().usingNode(folder).executeSizeDetails();
restClient.assertStatusCodeIs(HttpStatus.ACCEPTED);
restSizeDetailsModel.assertThat().field("jobId").isNotEmpty();
Assert.assertEquals(restSizeDetailsModel.getJobId(), jobId, "jobId should be present in cache, actual :" + restSizeDetailsModel.getJobId() + " expected: " + jobId);
}
/**
* checkSizeDetailsWithInvalidJobId testcase
*/
@TestRail(section = {TestGroup.REST_API, TestGroup.NODES}, executionType = ExecutionType.SANITY)
@Test(groups = {TestGroup.REST_API, TestGroup.NODES, TestGroup.SANITY})
public void checkSizeDetailsWithInvalidJobId() throws Exception
{
STEP("1. Create a folder in the test site.");
folder = dataContent.usingUser(user1).usingSite(siteModel).createFolder(FolderModel.getRandomFolderModel());
STEP("2. Upload a text document to the folder.");
dataContent.usingUser(user1)
.usingSite(siteModel)
.usingResource(folder)
.createContent(sampleFileToCreate);
STEP("3. Wait for 30 seconds so that the content is indexed in Search Service.");
Utility.waitToLoopTime(30);
RestSizeDetailsModel restSizeDetailsModel = restClient.authenticateUser(user1).withCoreAPI().usingNode(folder).executeSizeDetails();
restClient.assertStatusCodeIs(HttpStatus.ACCEPTED);
restSizeDetailsModel.assertThat().field("jobId").isNotEmpty();
jobId = restSizeDetailsModel.getJobId();
STEP("4. Adding random content to jobId ");
jobId += RandomStringUtils.randomAlphanumeric(2);
STEP("5. Wait for 10 seconds for the processing to complete.");
Awaitility
.await()
.atMost(Duration.ofSeconds(10))
.pollInterval(Durations.ONE_SECOND)
.ignoreExceptions()
.untilAsserted(() -> {
restClient.authenticateUser(user1)
.withCoreAPI()
.usingNode(folder)
.getSizeDetails(jobId);
restClient.assertStatusCodeIs(HttpStatus.NOT_FOUND);
});
}
/**
* checkSizeDetailsWithoutExecuteSizeDetails testcase
*/
@TestRail(section = {TestGroup.REST_API, TestGroup.NODES}, executionType = ExecutionType.SANITY)
@Test(groups = {TestGroup.REST_API, TestGroup.NODES, TestGroup.SANITY})
public void checkSizeDetailsWithoutExecuteSizeDetails() throws Exception
{
STEP("1. Create a folder in the test site.");
folder = dataContent.usingUser(user1).usingSite(siteModel).createFolder(FolderModel.getRandomFolderModel());
STEP("2. Upload a text document to the folder.");
String status = "NOT_INITIATED";
FileModel fileModel = dataContent.usingUser(user1)
.usingSite(siteModel)
.usingResource(folder)
.createContent(sampleFileToCreate);
Assert.assertNotNull(fileModel, "fileModel should not be null");
STEP("3. Wait for 30 seconds so that the content is indexed in Search Service.");
Awaitility
.await()
.atMost(Duration.ofSeconds(30))
.pollInterval(Durations.ONE_SECOND)
.ignoreExceptions()
.untilAsserted(() -> {
RestSizeDetailsModel sizeDetailsModel = restClient.authenticateUser(user1)
.withCoreAPI()
.usingNode(folder)
.getSizeDetails(jobId);
restClient.assertStatusCodeIs(HttpStatus.OK);
sizeDetailsModel.assertThat().field("status").isNotEmpty();
Assert.assertEquals(sizeDetailsModel.getStatus().toString(), status, "Value of status should be same, actual :" + sizeDetailsModel.getStatus().toString() + " expected: " + status);
});
}
/**
* Unauthenticated user not able to execute POST /nodes/{nodeId}/size-details: 401 STATUS CODE
*/
@TestRail(section = {TestGroup.REST_API, TestGroup.NODES}, executionType = ExecutionType.SANITY)
@Test(groups = {TestGroup.REST_API, TestGroup.NODES, TestGroup.SANITY})
public void unauthenticatedUserIsNotAbleGetSizeDetails()
{
restClient.authenticateUser(new UserModel("random user", "random password"));
restClient.withCoreAPI().usingNode(folder).executeSizeDetails();
restClient.assertStatusCodeIs(HttpStatus.UNAUTHORIZED);
}
/**
* Node Id Not Exist: 404 STATUS CODE
*/
@TestRail(section = {TestGroup.REST_API, TestGroup.NODES}, executionType = ExecutionType.SANITY)
@Test(groups = {TestGroup.REST_API, TestGroup.NODES, TestGroup.SANITY})
public void nodeIdNotExist()
{
folder.setNodeRef(RandomStringUtils.randomAlphanumeric(20));
restClient.authenticateUser(user1).withCoreAPI().usingNode(folder).executeSizeDetails();
restClient.assertStatusCodeIs(HttpStatus.NOT_FOUND);
}
/**
* Value of nodeId is invalid: 422 STATUS CODE
*/
@TestRail(section = {TestGroup.REST_API, TestGroup.NODES}, executionType = ExecutionType.SANITY)
@Test(groups = {TestGroup.REST_API, TestGroup.NODES, TestGroup.SANITY})
public void nodeIdNotValid()
{
FileModel document = dataContent.usingSite(siteModel).usingUser(user1).createContent(DocumentType.TEXT_PLAIN);
restClient.authenticateUser(user1).withCoreAPI().usingNode(document).executeSizeDetails();
restClient.assertStatusCodeIs(HttpStatus.UNPROCESSABLE_ENTITY);
}
/**
*
* calculateNodeSizeForMultipleFiles testCase
*/
@TestRail(section = {TestGroup.REST_API, TestGroup.NODES}, executionType = ExecutionType.SANITY)
@Test(groups = {TestGroup.REST_API, TestGroup.NODES, TestGroup.SANITY})
public void calculateNodeSizeForMultipleFiles() throws InterruptedException
{
STEP("1. Create a parent folder in the test site.");
FolderModel folder = dataContent.usingUser(user1).usingSite(siteModel).createFolder(FolderModel.getRandomFolderModel());
STEP("2. Creating a 5 nested folders in the folder-1");
AtomicLong fileSize = new AtomicLong(0);
IntStream.rangeClosed(1, 5).forEach(i -> {
String folder0Name = "childFolder" + i + RandomStringUtils.randomAlphanumeric(2);
FolderModel folderModel = new FolderModel();
folderModel.setName(folder0Name);
FolderModel childFolder = dataContent.usingUser(user1)
.usingSite(siteModel)
.usingResource(folder)
.createFolder(folderModel);
STEP("3. Upload a text document to the childFolders.");
dataContent.usingUser(user1)
.usingSite(siteModel)
.usingResource(childFolder)
.createContent(sampleFileToCreate);
fileSize.addAndGet(sampleFileSize);
});
STEP("4. Wait for 30 seconds so that the content is indexed in Search Service.");
Utility.waitToLoopTime(30);
RestSizeDetailsModel restSizeDetailsModel = restClient
.authenticateUser(user1)
.withCoreAPI()
.usingNode(folder)
.executeSizeDetails();
restClient.assertStatusCodeIs(HttpStatus.ACCEPTED);
restSizeDetailsModel.assertThat().field("jobId").isNotEmpty();
String jobId = restSizeDetailsModel.getJobId();
STEP("5. Wait for 5 seconds for the processing to complete.");
Awaitility
.await()
.atMost(Duration.ofSeconds(5))
.pollInterval(Durations.ONE_SECOND)
.ignoreExceptions()
.untilAsserted(() -> {
RestSizeDetailsModel sizeDetailsModel = restClient.authenticateUser(user1)
.withCoreAPI()
.usingNode(folder)
.getSizeDetails(jobId);
restClient.assertStatusCodeIs(HttpStatus.OK);
sizeDetailsModel.assertThat()
.field("sizeInBytes")
.isNotEmpty();
Assert.assertEquals(sizeDetailsModel.getSizeInBytes(), fileSize.get(),
"Value of sizeInBytes " + sizeDetailsModel.getSizeInBytes()
+ " is not equal to " + fileSize.get());
});
}
/**
*
* checkNumberOfFiles testCase
*/
@TestRail(section = {TestGroup.REST_API, TestGroup.NODES}, executionType = ExecutionType.SANITY)
@Test(groups = {TestGroup.REST_API, TestGroup.NODES, TestGroup.SANITY})
public void checkNumberOfFiles() throws InterruptedException
{
STEP("1. Create a parent folder in the test site.");
FolderModel folder = dataContent.usingUser(user1).usingSite(siteModel).createFolder(FolderModel.getRandomFolderModel());
STEP("2. Creating a 10 nested folders in the folder-1");
IntStream.rangeClosed(1, 10).forEach(i -> {
String folder0Name = "childFolder" + i + RandomStringUtils.randomAlphanumeric(2);
FolderModel folderModel = new FolderModel();
folderModel.setName(folder0Name);
FolderModel childFolder = dataContent.usingUser(user1)
.usingSite(siteModel)
.usingResource(folder)
.createFolder(folderModel);
STEP("3. Upload a text document to the childFolders.");
dataContent.usingUser(user1)
.usingSite(siteModel)
.usingResource(childFolder)
.createContent(sampleFileToCreate);
});
STEP("4. Wait for 30 seconds so that the content is indexed in Search Service.");
Utility.waitToLoopTime(30);
RestSizeDetailsModel restSizeDetailsModel = restClient
.authenticateUser(user1)
.withCoreAPI()
.usingNode(folder)
.executeSizeDetails();
restClient.assertStatusCodeIs(HttpStatus.ACCEPTED);
restSizeDetailsModel.assertThat().field("jobId").isNotEmpty();
String jobId = restSizeDetailsModel.getJobId();
STEP("5. Wait for 10 seconds for the processing to complete.");
Awaitility
.await()
.atMost(Duration.ofSeconds(10))
.pollInterval(Durations.ONE_SECOND)
.ignoreExceptions()
.untilAsserted(() -> {
RestSizeDetailsModel sizeDetailsModel = restClient.authenticateUser(user1)
.withCoreAPI()
.usingNode(folder)
.getSizeDetails(jobId);
restClient.assertStatusCodeIs(HttpStatus.OK);
sizeDetailsModel.assertThat().field("numberOfFiles").isNotEmpty();
Assert.assertEquals(sizeDetailsModel.getNumberOfFiles(), 10, "Value of NumberOfFiles " + sizeDetailsModel.getNumberOfFiles() + " is not equal to " + 10);
});
}
private byte[] getSampleFileContent(String fileName) throws IOException
{
final String fileClasspathLocation = "/shared-resources/testdata/" + fileName;
try (InputStream fileStream = getClass().getResourceAsStream(fileClasspathLocation))
{
requireNonNull(fileStream, "Couldn't locate `" + fileClasspathLocation + "`");
return fileStream.readAllBytes();
}
}
@AfterClass(alwaysRun = true)
public void cleanup() throws Exception
{
dataSite.usingUser(user1).deleteSite(siteModel);
}
}

View File

@@ -0,0 +1,600 @@
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.
Sample text.

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<developers>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<properties>

39
pom.xml
View File

@@ -2,7 +2,7 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>alfresco-community-repo</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
<packaging>pom</packaging>
<name>Alfresco Community Repo Parent</name>
@@ -23,8 +23,8 @@
</modules>
<properties>
<acs.version.major>23</acs.version.major>
<acs.version.minor>4</acs.version.minor>
<acs.version.major>25</acs.version.major>
<acs.version.minor>1</acs.version.minor>
<acs.version.revision>0</acs.version.revision>
<acs.version.label />
<amp.min.version>${acs.version.major}.0.0</amp.min.version>
@@ -51,25 +51,25 @@
<dependency.alfresco-server-root.version>7.0.1</dependency.alfresco-server-root.version>
<dependency.activiti-engine.version>5.23.0</dependency.activiti-engine.version>
<dependency.activiti.version>5.23.0</dependency.activiti.version>
<dependency.alfresco-transform-core.version>5.1.5-A1</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-service.version>4.1.5-A1</dependency.alfresco-transform-service.version>
<dependency.alfresco-transform-core.version>5.1.5</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-service.version>4.1.5</dependency.alfresco-transform-service.version>
<dependency.alfresco-greenmail.version>7.0</dependency.alfresco-greenmail.version>
<dependency.acs-event-model.version>0.0.33</dependency.acs-event-model.version>
<dependency.aspectj.version>1.9.22.1</dependency.aspectj.version>
<dependency.spring.version>6.1.13</dependency.spring.version>
<dependency.spring-security.version>6.3.3</dependency.spring-security.version>
<dependency.spring.version>6.1.14</dependency.spring.version>
<dependency.spring-security.version>6.3.4</dependency.spring-security.version>
<dependency.antlr.version>3.5.3</dependency.antlr.version>
<dependency.jackson.version>2.17.2</dependency.jackson.version>
<dependency.cxf.version>4.0.5</dependency.cxf.version>
<dependency.opencmis.version>1.0.0-jakarta-1</dependency.opencmis.version>
<dependency.webscripts.version>9.4</dependency.webscripts.version>
<dependency.bouncycastle.version>1.78.1</dependency.bouncycastle.version>
<dependency.bouncycastle.version>1.79</dependency.bouncycastle.version>
<dependency.mockito-core.version>5.14.1</dependency.mockito-core.version>
<dependency.assertj.version>3.26.3</dependency.assertj.version>
<dependency.org-json.version>20240303</dependency.org-json.version>
<dependency.commons-dbcp.version>2.12.0</dependency.commons-dbcp.version>
<dependency.commons-io.version>2.16.1</dependency.commons-io.version>
<dependency.commons-io.version>2.18.0</dependency.commons-io.version>
<dependency.gson.version>2.11.0</dependency.gson.version>
<dependency.guava.version>33.3.1-jre</dependency.guava.version>
<dependency.httpclient.version>4.5.14</dependency.httpclient.version>
@@ -80,7 +80,7 @@
<dependency.xercesImpl.version>2.12.2</dependency.xercesImpl.version>
<dependency.slf4j.version>2.0.16</dependency.slf4j.version>
<dependency.log4j.version>2.23.1</dependency.log4j.version>
<dependency.groovy.version>3.0.22</dependency.groovy.version>
<dependency.groovy.version>3.0.23</dependency.groovy.version>
<dependency.tika.version>2.9.2</dependency.tika.version>
<dependency.truezip.version>7.7.10</dependency.truezip.version>
<dependency.poi.version>5.3.0</dependency.poi.version>
@@ -96,6 +96,7 @@
<dependency.maven-artifact.version>3.8.6</dependency.maven-artifact.version>
<dependency.jdom2.version>2.0.6.1</dependency.jdom2.version>
<dependency.pooled-jms.version>3.1.6</dependency.pooled-jms.version>
<dependency.kxml2.version>2.3.0</dependency.kxml2.version>
<dependency.jakarta-ee-jaxb-api.version>4.0.2</dependency.jakarta-ee-jaxb-api.version>
<dependency.jakarta-ee-jaxb-impl.version>4.0.5</dependency.jakarta-ee-jaxb-impl.version>
@@ -113,8 +114,8 @@
<dependency.jakarta-json-path.version>2.9.0</dependency.jakarta-json-path.version>
<dependency.json-smart.version>2.5.1</dependency.json-smart.version>
<alfresco.googledrive.version>4.1.0</alfresco.googledrive.version>
<alfresco.aos-module.version>3.1.0</alfresco.aos-module.version>
<alfresco.api-explorer.version>23.3.0</alfresco.api-explorer.version> <!-- Also in alfresco-enterprise-share -->
<alfresco.aos-module.version>3.2.0</alfresco.aos-module.version>
<alfresco.api-explorer.version>23.4.0</alfresco.api-explorer.version> <!-- Also in alfresco-enterprise-share -->
<alfresco.maven-plugin.version>2.2.0</alfresco.maven-plugin.version>
<license-maven-plugin.version>2.4.0</license-maven-plugin.version>
@@ -154,7 +155,7 @@
<connection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</connection>
<developerConnection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</developerConnection>
<url>https://github.com/Alfresco/alfresco-community-repo</url>
<tag>23.4.0.39</tag>
<tag>25.1.0.3</tag>
</scm>
<distributionManagement>
@@ -371,7 +372,7 @@
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
<version>2.3.2</version>
<version>2.5.0</version>
<!-- exclude c3p0 -->
<!-- see https://issues.alfresco.com/jira/browse/REPO-3447 -->
<exclusions>
@@ -465,7 +466,7 @@
<dependency>
<groupId>org.apache.xmlbeans</groupId>
<artifactId>xmlbeans</artifactId>
<version>5.2.1</version>
<version>5.2.2</version>
</dependency>
<dependency>
<groupId>org.json</groupId>
@@ -703,7 +704,7 @@
<dependency>
<groupId>com.networknt</groupId>
<artifactId>json-schema-validator</artifactId>
<version>1.5.1</version>
<version>1.5.3</version>
</dependency>
<!-- upgrade dependency from TIKA -->
<dependency>
@@ -950,7 +951,7 @@
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.34</version>
<version>1.18.36</version>
<scope>provided</scope>
</dependency>
<dependency>
@@ -1015,7 +1016,7 @@
</plugin>
<plugin>
<artifactId>maven-failsafe-plugin</artifactId>
<version>3.5.0</version>
<version>3.5.2</version>
</plugin>
<plugin>
<artifactId>maven-jar-plugin</artifactId>
@@ -1028,7 +1029,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>3.10.1</version>
<version>3.11.1</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<dependencies>
@@ -45,7 +45,7 @@
<dependency>
<groupId>org.apache.santuario</groupId>
<artifactId>xmlsec</artifactId>
<version>4.0.2</version>
<version>4.0.3</version>
</dependency>
<!-- newer version, see REPO-3133 -->
<dependency>

View File

@@ -0,0 +1,36 @@
/*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2024 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.api;
import org.alfresco.repo.node.sizedetails.NodeSizeDetailsServiceImpl.NodeSizeDetails;
public interface SizeDetails
{
NodeSizeDetails generateNodeSizeDetailsRequest(String nodeId);
NodeSizeDetails getNodeSizeDetails(String nodeId, String jobId);
}

View File

@@ -26,6 +26,7 @@
package org.alfresco.rest.api.impl;
import static org.alfresco.rest.api.Nodes.PARAM_INCLUDE_ALLOWABLEOPERATIONS;
import static org.alfresco.rest.api.Nodes.PARAM_INCLUDE_ASPECTNAMES;
import java.util.AbstractList;
import java.util.ArrayList;
@@ -38,6 +39,8 @@ import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.collections.CollectionUtils;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
import org.alfresco.query.PagingResults;
@@ -77,9 +80,6 @@ import org.alfresco.service.cmr.site.SiteService;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.Pair;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Public REST API: Centralises access to favourites functionality and maps between representations repository and api representations.
@@ -89,15 +89,13 @@ import org.apache.commons.logging.LogFactory;
*/
public class FavouritesImpl implements Favourites
{
private static final Log logger = LogFactory.getLog(FavouritesImpl.class);
private static final List<String> ALLOWED_INCLUDES = List.of(PARAM_INCLUDE_PROPERTIES, PARAM_INCLUDE_ALLOWABLEOPERATIONS);
private static final List<String> ALLOWED_INCLUDES = List.of(PARAM_INCLUDE_PROPERTIES, PARAM_INCLUDE_ASPECTNAMES, PARAM_INCLUDE_ALLOWABLEOPERATIONS);
private People people;
private Sites sites;
private Nodes nodes;
private FavouritesService favouritesService;
private SiteService siteService;
private Sites sites;
private Nodes nodes;
private FavouritesService favouritesService;
private SiteService siteService;
private NamespaceService namespaceService;
// additional exclude properties for favourites as these can be already top-level properties
@@ -105,92 +103,95 @@ public class FavouritesImpl implements Favourites
ContentModel.PROP_TITLE,
ContentModel.PROP_DESCRIPTION,
SiteModel.PROP_SITE_VISIBILITY,
SiteModel.PROP_SITE_PRESET
);
SiteModel.PROP_SITE_PRESET);
public void setPeople(People people)
{
this.people = people;
}
public void setPeople(People people)
{
this.people = people;
}
public void setSites(Sites sites)
{
this.sites = sites;
}
public void setSites(Sites sites)
{
this.sites = sites;
}
public void setNodes(Nodes nodes)
{
this.nodes = nodes;
}
public void setNodes(Nodes nodes)
{
this.nodes = nodes;
}
public void setFavouritesService(FavouritesService favouritesService)
{
this.favouritesService = favouritesService;
}
public void setFavouritesService(FavouritesService favouritesService)
{
this.favouritesService = favouritesService;
}
public void setSiteService(SiteService siteService)
{
this.siteService = siteService;
}
public void setSiteService(SiteService siteService)
{
this.siteService = siteService;
}
public void setNamespaceService(NamespaceService namespaceService)
{
this.namespaceService = namespaceService;
}
private Target getTarget(PersonFavourite personFavourite, Parameters parameters)
{
Target target = null;
NodeRef nodeRef = personFavourite.getNodeRef();
Type type = personFavourite.getType();
if(type.equals(Type.FILE))
{
Document document = nodes.getDocument(nodeRef);
private Target getTarget(PersonFavourite personFavourite, Parameters parameters)
{
Target target;
NodeRef nodeRef = personFavourite.getNodeRef();
Type type = personFavourite.getType();
if (type.equals(Type.FILE))
{
Document document = nodes.getDocument(nodeRef);
setPathInfo(document, parameters.getInclude());
target = new DocumentTarget(document);
}
else if(type.equals(Type.FOLDER))
{
Folder folder = nodes.getFolder(nodeRef);
}
else if (type.equals(Type.FOLDER))
{
Folder folder = nodes.getFolder(nodeRef);
setPathInfo(folder, parameters.getInclude());
target = new FolderTarget(folder);
}
else if(type.equals(Type.SITE))
{
SiteInfo siteInfo = siteService.getSite(nodeRef);
String role = sites.getSiteRole(siteInfo.getShortName());
Site site = new Site(siteInfo, role);
target = new SiteTarget(site);
}
else
{
throw new AlfrescoRuntimeException("Unexpected favourite target type: " + type);
}
target = new FolderTarget(folder);
}
else if (type.equals(Type.SITE))
{
SiteInfo siteInfo = siteService.getSite(nodeRef);
String role = sites.getSiteRole(siteInfo.getShortName());
Site site = new Site(siteInfo, role);
target = new SiteTarget(site);
}
else
{
throw new AlfrescoRuntimeException("Unexpected favourite target type: " + type);
}
return target;
}
return target;
}
private Favourite getFavourite(PersonFavourite personFavourite, Parameters parameters)
{
Favourite fav = new Favourite();
fav.setTargetGuid(personFavourite.getNodeRef().getId());
fav.setCreatedAt(personFavourite.getCreatedAt());
Target target = getTarget(personFavourite, parameters);
fav.setTarget(target);
private Favourite getFavourite(PersonFavourite personFavourite, Parameters parameters)
{
Favourite fav = new Favourite();
fav.setTargetGuid(personFavourite.getNodeRef().getId());
fav.setCreatedAt(personFavourite.getCreatedAt());
Target target = getTarget(personFavourite, parameters);
fav.setTarget(target);
// REPO-1147 allow retrieving additional properties
// REPO-1147 allow retrieving additional properties
final List<String> paramsInclude = parameters.getInclude();
if (!Collections.disjoint(paramsInclude, ALLOWED_INCLUDES))
{
final List<String> includes = ALLOWED_INCLUDES.stream().filter(a -> paramsInclude.contains(a)).collect(Collectors.toList());
final List<String> includes = ALLOWED_INCLUDES.stream().filter(paramsInclude::contains).collect(Collectors.toList());
// get node representation with only properties included
Node node = nodes.getFolderOrDocument(personFavourite.getNodeRef(), null, null, includes, null);
// Create a map from node properties excluding properties already in this Favorite
Map<String, Object> filteredNodeProperties = filterProps(node.getProperties(), EXCLUDED_PROPS);
if(filteredNodeProperties.size() > 0 && paramsInclude.contains(PARAM_INCLUDE_PROPERTIES))
if (!filteredNodeProperties.isEmpty() && paramsInclude.contains(PARAM_INCLUDE_PROPERTIES))
{
fav.setProperties(filteredNodeProperties);
}
if (paramsInclude.contains(PARAM_INCLUDE_ASPECTNAMES))
{
fav.setAspectNames(node.getAspectNames());
}
final List<String> allowableOperations = node.getAllowableOperations();
if (CollectionUtils.isNotEmpty(allowableOperations) && paramsInclude.contains(PARAM_INCLUDE_ALLOWABLEOPERATIONS))
{
@@ -198,8 +199,8 @@ public class FavouritesImpl implements Favourites
}
}
return fav;
}
return fav;
}
private Map<String, Object> filterProps(Map<String, Object> properties, List<QName> toRemove)
{
@@ -211,34 +212,33 @@ public class FavouritesImpl implements Favourites
private CollectionWithPagingInfo<Favourite> wrap(Paging paging, PagingResults<PersonFavourite> personFavourites, Parameters parameters)
{
final List<PersonFavourite> page = personFavourites.getPage();
final List<Favourite> list = new AbstractList<Favourite>()
{
@Override
public Favourite get(int index)
{
PersonFavourite personFavourite = page.get(index);
Favourite fav = getFavourite(personFavourite, parameters);
return fav;
}
final List<PersonFavourite> page = personFavourites.getPage();
final List<Favourite> list = new AbstractList<>() {
@Override
public Favourite get(int index)
{
PersonFavourite personFavourite = page.get(index);
Favourite fav = getFavourite(personFavourite, parameters);
return fav;
}
@Override
public int size()
{
return page.size();
}
};
Pair<Integer, Integer> pair = personFavourites.getTotalResultCount();
Integer total = null;
if(pair.getFirst().equals(pair.getSecond()))
{
total = pair.getFirst();
}
return CollectionWithPagingInfo.asPaged(paging, list, personFavourites.hasMoreItems(), total);
@Override
public int size()
{
return page.size();
}
};
Pair<Integer, Integer> pair = personFavourites.getTotalResultCount();
Integer total = null;
if (pair.getFirst().equals(pair.getSecond()))
{
total = pair.getFirst();
}
return CollectionWithPagingInfo.asPaged(paging, list, personFavourites.hasMoreItems(), total);
}
@Override
public Favourite addFavourite(String personId, Favourite favourite)
public Favourite addFavourite(String personId, Favourite favourite)
{
Parameters parameters = getDefaultParameters(personId, null);
return addFavourite(personId, favourite, parameters);
@@ -251,13 +251,13 @@ public class FavouritesImpl implements Favourites
personId = people.validatePerson(personId, true);
Target target = favourite.getTarget();
if(target == null)
if (target == null)
{
throw new InvalidArgumentException("target is missing");
}
else if(target instanceof SiteTarget)
else if (target instanceof SiteTarget)
{
SiteTarget siteTarget = (SiteTarget)target;
SiteTarget siteTarget = (SiteTarget) target;
String guid = siteTarget.getSite().getGuid();
SiteInfo siteInfo = sites.validateSite(new NodeRef(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE, guid));
NodeRef siteNodeRef = siteInfo.getNodeRef();
@@ -268,16 +268,16 @@ public class FavouritesImpl implements Favourites
PersonFavourite personFavourite = favouritesService.addFavourite(personId, siteNodeRef);
ret = getFavourite(personFavourite, parameters);
}
catch(SiteDoesNotExistException e)
catch (SiteDoesNotExistException e)
{
throw new RelationshipResourceNotFoundException(personId, siteId);
}
}
else if(target instanceof DocumentTarget)
else if (target instanceof DocumentTarget)
{
DocumentTarget documentTarget = (DocumentTarget)target;
DocumentTarget documentTarget = (DocumentTarget) target;
NodeRef nodeRef = documentTarget.getFile().getGuid();
if(!nodes.nodeMatches(nodeRef, Collections.singleton(ContentModel.TYPE_CONTENT), null))
if (!nodes.nodeMatches(nodeRef, Collections.singleton(ContentModel.TYPE_CONTENT), null))
{
throw new RelationshipResourceNotFoundException(personId, nodeRef.getId());
}
@@ -285,11 +285,11 @@ public class FavouritesImpl implements Favourites
PersonFavourite personFavourite = favouritesService.addFavourite(personId, nodeRef);
ret = getFavourite(personFavourite, parameters);
}
else if(target instanceof FolderTarget)
else if (target instanceof FolderTarget)
{
FolderTarget folderTarget = (FolderTarget)target;
FolderTarget folderTarget = (FolderTarget) target;
NodeRef nodeRef = folderTarget.getFolder().getGuid();
if(!nodes.nodeMatches(nodeRef, Collections.singleton(ContentModel.TYPE_FOLDER), Collections.singleton(SiteModel.TYPE_SITE)))
if (!nodes.nodeMatches(nodeRef, Collections.singleton(ContentModel.TYPE_FOLDER), Collections.singleton(SiteModel.TYPE_SITE)))
{
throw new RelationshipResourceNotFoundException(personId, nodeRef.getId());
}
@@ -304,33 +304,33 @@ public class FavouritesImpl implements Favourites
@Override
public void removeFavourite(String personId, String id)
{
personId = people.validatePerson(personId, true);
NodeRef nodeRef = nodes.validateNode(id);
boolean exists = false;
personId = people.validatePerson(personId, true);
NodeRef nodeRef = nodes.validateNode(id);
boolean exists = false;
Type type = favouritesService.getType(nodeRef);
if(type.equals(Type.SITE))
{
SiteInfo siteInfo = siteService.getSite(nodeRef);
if(siteInfo == null)
{
// shouldn't happen because the type implies it's a site
throw new AlfrescoRuntimeException("Unable to find site with nodeRef " + nodeRef);
}
exists = favouritesService.removeFavourite(personId, siteInfo.getNodeRef());
}
else if(type.equals(Type.FILE))
{
exists = favouritesService.removeFavourite(personId, nodeRef);
}
else if(type.equals(Type.FOLDER))
{
exists = favouritesService.removeFavourite(personId, nodeRef);
}
if(!exists)
{
throw new RelationshipResourceNotFoundException(personId, id);
}
Type type = favouritesService.getType(nodeRef);
if (type.equals(Type.SITE))
{
SiteInfo siteInfo = siteService.getSite(nodeRef);
if (siteInfo == null)
{
// shouldn't happen because the type implies it's a site
throw new AlfrescoRuntimeException("Unable to find site with nodeRef " + nodeRef);
}
exists = favouritesService.removeFavourite(personId, siteInfo.getNodeRef());
}
else if (type.equals(Type.FILE))
{
exists = favouritesService.removeFavourite(personId, nodeRef);
}
else if (type.equals(Type.FOLDER))
{
exists = favouritesService.removeFavourite(personId, nodeRef);
}
if (!exists)
{
throw new RelationshipResourceNotFoundException(personId, id);
}
}
@Override
@@ -347,7 +347,7 @@ public class FavouritesImpl implements Favourites
personId = people.validatePerson(personId, true);
PersonFavourite personFavourite = favouritesService.getFavourite(personId, nodeRef);
if(personFavourite != null)
if (personFavourite != null)
{
Favourite favourite = getFavourite(personFavourite, parameters);
return favourite;
@@ -361,52 +361,53 @@ public class FavouritesImpl implements Favourites
@Override
public CollectionWithPagingInfo<Favourite> getFavourites(String personId, final Parameters parameters)
{
personId = people.validatePerson(personId, true);
personId = people.validatePerson(personId, true);
Paging paging = parameters.getPaging();
Paging paging = parameters.getPaging();
List<Pair<FavouritesService.SortFields, Boolean>> sortProps = getSortProps(parameters);
List<Pair<FavouritesService.SortFields, Boolean>> sortProps = getSortProps(parameters);
final Set<Type> filteredByClientQuery = new HashSet<Type>();
Set<Type> filterTypes = FavouritesService.Type.ALL_FILTER_TYPES; //Default all
final Set<Type> filteredByClientQuery = new HashSet<>();
Set<Type> filterTypes = FavouritesService.Type.ALL_FILTER_TYPES; // Default all
// filterType is of the form 'target.<site|file|folder>'
QueryHelper.walk(parameters.getQuery(), new WalkerCallbackAdapter()
{
@Override
public void or() {
//OR is supported but exists() will be called for each EXISTS so we don't
//need to do anything here. If we don't override it then it will be assumed
//that OR in the grammar is not supported.
}
// filterType is of the form 'target.<site|file|folder>'
QueryHelper.walk(parameters.getQuery(), new WalkerCallbackAdapter() {
@Override
public void or()
{
// OR is supported but exists() will be called for each EXISTS so we don't
// need to do anything here. If we don't override it then it will be assumed
// that OR in the grammar is not supported.
}
@Override
public void exists(String filteredByClient, boolean negated) {
if(filteredByClient != null)
@Override
public void exists(String filteredByClient, boolean negated)
{
if (filteredByClient != null)
{
int idx = filteredByClient.lastIndexOf("/");
if(idx == -1 || idx == filteredByClient.length())
int idx = filteredByClient.lastIndexOf('/');
if (idx == -1 || idx == filteredByClient.length())
{
throw new InvalidArgumentException();
}
else
{
String filtertype = filteredByClient.substring(idx + 1).toUpperCase();
String filtertype = filteredByClient.substring(idx + 1).toUpperCase();
filteredByClientQuery.add(Type.valueOf(filtertype));
}
}
}
}
});
if (filteredByClientQuery.size() > 0)
{
filterTypes = filteredByClientQuery;
}
if (!filteredByClientQuery.isEmpty())
{
filterTypes = filteredByClientQuery;
}
final PagingResults<PersonFavourite> favourites = favouritesService.getPagedFavourites(personId, filterTypes, sortProps, Util.getPagingRequest(paging));
return wrap(paging, favourites, parameters);
return wrap(paging, favourites, parameters);
}
private void setPathInfo(Node node, List<String> includeParam)
@@ -419,13 +420,12 @@ public class FavouritesImpl implements Favourites
}
/**
* Returns a {@code {@link Parameters} object where almost all of its values are null.
* the non-null value is the {@literal include} and whatever value is passed for {@code personId} and {@code favouriteId}
* Returns a {@code {@link Parameters} object where almost all of its values are null. the non-null value is the {@literal include} and whatever value is passed for {@code personId} and {@code favouriteId}
*/
private Parameters getDefaultParameters(String personId, String favouriteId)
{
Params.RecognizedParams recognizedParams = new Params.RecognizedParams(null, null, null, null, Collections.emptyList(), null, null, null,
false);
false);
Parameters parameters = Params.valueOf(recognizedParams, personId, favouriteId, null);
return parameters;
}
@@ -434,7 +434,7 @@ public class FavouritesImpl implements Favourites
{
List<Pair<FavouritesService.SortFields, Boolean>> sortProps = new ArrayList<>();
List<SortColumn> sortCols = parameters.getSorting();
if ((sortCols != null) && (sortCols.size() > 0))
if (sortCols != null && !sortCols.isEmpty())
{
for (SortColumn sortCol : sortCols)
{
@@ -447,7 +447,7 @@ public class FavouritesImpl implements Favourites
{
throw new InvalidArgumentException("Invalid sort field: " + sortCol.column);
}
sortProps.add(new Pair<>(sortField, (sortCol.asc ? Boolean.TRUE : Boolean.FALSE)));
sortProps.add(new Pair<>(sortField, sortCol.asc ? Boolean.TRUE : Boolean.FALSE));
}
}
else

View File

@@ -0,0 +1,119 @@
/*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2024 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.api.impl;
import java.util.Optional;
import org.springframework.beans.factory.InitializingBean;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.node.sizedetails.NodeSizeDetailsService;
import org.alfresco.repo.node.sizedetails.NodeSizeDetailsServiceImpl.NodeSizeDetails;
import org.alfresco.repo.node.sizedetails.NodeSizeDetailsServiceImpl.NodeSizeDetails.STATUS;
import org.alfresco.rest.api.Nodes;
import org.alfresco.rest.api.SizeDetails;
import org.alfresco.rest.framework.core.exceptions.InvalidNodeTypeException;
import org.alfresco.rest.framework.core.exceptions.NotFoundException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.util.GUID;
import org.alfresco.util.ParameterCheck;
public class SizeDetailsImpl implements SizeDetails, InitializingBean
{
private final Nodes nodes;
private final NodeSizeDetailsService nodeSizeDetailsService;
public SizeDetailsImpl(Nodes nodes, NodeSizeDetailsService nodeSizeDetailsService)
{
this.nodes = nodes;
this.nodeSizeDetailsService = nodeSizeDetailsService;
}
/**
* generateNodeSizeDetailsRequest : providing HTTP STATUS 202 with jobId.
*/
@Override
public NodeSizeDetails generateNodeSizeDetailsRequest(String nodeId)
{
NodeRef nodeRef = nodes.validateOrLookupNode(nodeId);
validateType(nodeRef);
Optional<NodeSizeDetails> nodeSizeDetails = nodeSizeDetailsService.getSizeDetails(nodeId);
String actionId = nodeSizeDetails.map(NodeSizeDetails::getJobId)
.orElseGet(() -> executeSizeDetails(nodeRef));
return new NodeSizeDetails(actionId);
}
/**
* getNodeSizeDetails : providing HTTP STATUS 200 with NodeSizeDetails data from cache.
*/
@Override
public NodeSizeDetails getNodeSizeDetails(final String nodeId, final String jobId)
{
NodeRef nodeRef = nodes.validateOrLookupNode(nodeId);
validateType(nodeRef);
Optional<NodeSizeDetails> optionalDetails = nodeSizeDetailsService.getSizeDetails(nodeId);
return optionalDetails.map(details -> {
String cachedJobId = details.getJobId();
if (cachedJobId != null && !jobId.equalsIgnoreCase(cachedJobId))
{
throw new NotFoundException("jobId does not exist");
}
return details;
})
.orElseGet(() -> new NodeSizeDetails(nodeId, STATUS.NOT_INITIATED));
}
/**
* Executing Asynchronously.
*/
private String executeSizeDetails(NodeRef nodeRef)
{
String jobId = GUID.generate();
NodeSizeDetails nodeSizeDetails = new NodeSizeDetails(nodeRef.getId(), jobId, STATUS.PENDING);
nodeSizeDetailsService.putSizeDetails(nodeRef.getId(), nodeSizeDetails);
nodeSizeDetailsService.invokeSizeDetailsExecutor(nodeRef, jobId);
return jobId;
}
private void validateType(NodeRef nodeRef) throws InvalidNodeTypeException
{
if (!nodes.isSubClass(nodeRef, ContentModel.TYPE_FOLDER, false))
{
throw new InvalidNodeTypeException("Node id " + nodeRef.getId() + " does not represent a folder.");
}
}
@Override
public void afterPropertiesSet() throws Exception
{
ParameterCheck.mandatory("nodes", this.nodes);
ParameterCheck.mandatory("nodeSizeDetailsServiceImpl", this.nodeSizeDetailsService);
}
}

View File

@@ -25,6 +25,8 @@
*/
package org.alfresco.rest.api.model;
import java.util.Objects;
/**
* A document target favourite.
*
@@ -33,33 +35,53 @@ package org.alfresco.rest.api.model;
*/
public class DocumentTarget extends Target
{
private Document file;
private Document file;
public DocumentTarget()
{
super();
}
public DocumentTarget()
{
super();
}
public DocumentTarget(Document file)
{
super();
this.file = file;
}
public DocumentTarget(Document file)
{
super();
this.file = file;
}
public void setDocument(Document file)
{
this.file = file;
}
public void setDocument(Document file)
{
this.file = file;
}
public Document getFile()
{
return file;
}
public Document getFile()
{
return file;
}
@Override
public String toString()
{
return "DocumentTarget [file=" + file + "]";
}
@Override
public String toString()
{
return "DocumentTarget [file=" + file + "]";
}
@Override
public boolean equals(Object o)
{
if (this == o)
{
return true;
}
if (o == null || getClass() != o.getClass())
{
return false;
}
DocumentTarget that = (DocumentTarget) o;
return Objects.equals(file, that.file);
}
@Override
public int hashCode()
{
return Objects.hashCode(file);
}
}

View File

@@ -28,6 +28,7 @@ package org.alfresco.rest.api.model;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.alfresco.rest.framework.resource.UniqueId;
@@ -39,65 +40,105 @@ import org.alfresco.rest.framework.resource.UniqueId;
*/
public class Favourite
{
private String targetGuid;
private Date createdAt;
private Target target;
private Map<String, Object> properties;
private List<String> allowableOperations;
private String targetGuid;
private Date createdAt;
private Target target;
private Map<String, Object> properties;
private List<String> aspectNames;
private List<String> allowableOperations;
public Date getCreatedAt()
{
return createdAt;
}
public Date getCreatedAt()
{
return createdAt;
}
public void setCreatedAt(Date createdAt)
{
this.createdAt = createdAt;
}
public void setCreatedAt(Date createdAt)
{
this.createdAt = createdAt;
}
@UniqueId(name="targetGuid")
public String getTargetGuid()
{
return targetGuid;
}
@UniqueId(name = "targetGuid")
public String getTargetGuid()
{
return targetGuid;
}
public void setTargetGuid(String targetGuid)
{
this.targetGuid = targetGuid;
}
public void setTargetGuid(String targetGuid)
{
this.targetGuid = targetGuid;
}
public Target getTarget()
{
return target;
}
public Target getTarget()
{
return target;
}
public void setTarget(Target target)
{
this.target = target;
}
public void setTarget(Target target)
{
this.target = target;
}
public Map<String, Object> getProperties()
{
return properties;
}
public Map<String, Object> getProperties()
{
return properties;
}
public void setProperties(Map<String, Object> properties)
{
this.properties = properties;
}
public void setProperties(Map<String, Object> properties)
{
this.properties = properties;
}
public List<String> getAllowableOperations() {
return allowableOperations;
}
public List<String> getAspectNames()
{
return aspectNames;
}
public void setAllowableOperations(List<String> allowableOperations) {
this.allowableOperations = allowableOperations;
}
public void setAspectNames(List<String> aspectNames)
{
this.aspectNames = aspectNames;
}
@Override
public String toString()
{
return "Favourite [targetGuid=" + targetGuid
+ ", createdAt=" + createdAt + ", target=" + target + ", properties=" + properties + "]";
}
public List<String> getAllowableOperations()
{
return allowableOperations;
}
public void setAllowableOperations(List<String> allowableOperations)
{
this.allowableOperations = allowableOperations;
}
@Override
public String toString()
{
return "Favourite{" +
"targetGuid='" + targetGuid + '\'' +
", createdAt=" + createdAt +
", target=" + target +
", properties=" + properties +
", aspectNames=" + aspectNames +
", allowableOperations=" + allowableOperations +
'}';
}
@Override
public boolean equals(Object o)
{
if (this == o)
{
return true;
}
if (o == null || getClass() != o.getClass())
{
return false;
}
Favourite favourite = (Favourite) o;
return Objects.equals(targetGuid, favourite.targetGuid) && Objects.equals(createdAt, favourite.createdAt) && Objects.equals(target, favourite.target) && Objects.equals(properties, favourite.properties) && Objects.equals(aspectNames, favourite.aspectNames) && Objects.equals(allowableOperations, favourite.allowableOperations);
}
@Override
public int hashCode()
{
return Objects.hash(targetGuid, createdAt, target, properties, aspectNames, allowableOperations);
}
}

View File

@@ -0,0 +1,84 @@
/*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2024 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.api.nodes;
import java.util.List;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.extensions.webscripts.Status;
import org.alfresco.repo.node.sizedetails.NodeSizeDetailsServiceImpl.NodeSizeDetails;
import org.alfresco.rest.api.SizeDetails;
import org.alfresco.rest.framework.WebApiDescription;
import org.alfresco.rest.framework.WebApiParam;
import org.alfresco.rest.framework.WebApiParameters;
import org.alfresco.rest.framework.core.ResourceParameter;
import org.alfresco.rest.framework.core.exceptions.RelationshipResourceNotFoundException;
import org.alfresco.rest.framework.resource.RelationshipResource;
import org.alfresco.rest.framework.resource.actions.interfaces.RelationshipResourceAction;
import org.alfresco.rest.framework.resource.parameters.Parameters;
import org.alfresco.util.ParameterCheck;
@RelationshipResource(name = "size-details", entityResource = NodesEntityResource.class, title = "Node Size Details")
public class NodeSizeDetailsRelation implements RelationshipResourceAction.ReadById<NodeSizeDetails>,
RelationshipResourceAction.Create<NodeSizeDetails>, InitializingBean
{
private SizeDetails sizeDetails;
public void setSizeDetails(SizeDetails sizeDetails)
{
this.sizeDetails = sizeDetails;
}
@Override
public void afterPropertiesSet()
{
ParameterCheck.mandatory("sizeDetails", this.sizeDetails);
}
@WebApiDescription(title = "Create node-size details request", successStatus = Status.STATUS_ACCEPTED)
@WebApiParam(name = "nodeSizeEntity", title = "Node Size Details Request",
description = "Request for processing Node Size.", kind = ResourceParameter.KIND.HTTP_BODY_OBJECT,
allowMultiple = false)
@Override
public List<NodeSizeDetails> create(String nodeId, List<NodeSizeDetails> nodeSizeEntity, Parameters parameters)
{
return List.of(sizeDetails.generateNodeSizeDetailsRequest(nodeId));
}
@WebApiDescription(title = "Get Node Size Details", description = "Get the Node Size Details")
@WebApiParameters({@WebApiParam(name = "nodeId", title = "The unique id of the Node being addressed",
description = "A single Node id"),
@WebApiParam(name = "jobId", title = "Job Id to get the NodeSizeDetails", description = "JobId")})
@Override
public NodeSizeDetails readById(String nodeId, String jobId, Parameters parameters)
throws RelationshipResourceNotFoundException
{
return sizeDetails.getNodeSizeDetails(nodeId, jobId);
}
}

View File

@@ -996,6 +996,25 @@
</property>
</bean>
<bean id="sizeDetailsImpl" class="org.alfresco.rest.api.impl.SizeDetailsImpl">
<constructor-arg name="nodes" ref="nodes" />
<constructor-arg name="nodeSizeDetailsService" ref="NodeSizeDetailsService" />
</bean>
<bean id="sizeDetails" class="org.springframework.aop.framework.ProxyFactoryBean">
<property name="proxyInterfaces">
<value>org.alfresco.rest.api.SizeDetails</value>
</property>
<property name="target">
<ref bean="sizeDetailsImpl" />
</property>
<property name="interceptorNames">
<list>
<idref bean="legacyExceptionInterceptor" />
</list>
</property>
</bean>
<bean class="org.alfresco.rest.api.rules.NodeRuleSettingsRelation">
<property name="ruleSettings" ref="RuleSettings" />
</bean>
@@ -1770,4 +1789,8 @@
</list>
</property>
</bean>
<bean class="org.alfresco.rest.api.nodes.NodeSizeDetailsRelation">
<property name="sizeDetails" ref="sizeDetails" />
</bean>
</beans>

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2021 Alfresco Software Limited
* Copyright (C) 2005 - 2024 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -25,60 +25,60 @@
*/
package org.alfresco;
import org.alfresco.repo.web.scripts.TestWebScriptRepoServer;
import org.alfresco.util.testing.category.DBTests;
import org.alfresco.util.testing.category.NonBuildTests;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.alfresco.repo.web.scripts.TestWebScriptRepoServer;
import org.alfresco.util.testing.category.DBTests;
import org.alfresco.util.testing.category.NonBuildTests;
@RunWith(Categories.class)
@Categories.ExcludeCategory({DBTests.class, NonBuildTests.class})
@Suite.SuiteClasses({
// [classpath:alfresco/application-context.xml, classpath:alfresco/web-scripts-application-context-test.xml,
// classpath:alfresco/web-scripts-application-context.xml]
org.alfresco.repo.web.scripts.quickshare.QuickShareRestApiTest.class,
org.alfresco.repo.web.scripts.admin.AdminWebScriptTest.class,
org.alfresco.repo.web.scripts.audit.AuditWebScriptTest.class,
org.alfresco.repo.web.scripts.blogs.BlogServiceTest.class,
org.alfresco.repo.web.scripts.dictionary.DictionaryRestApiTest.class,
org.alfresco.repo.web.scripts.discussion.DiscussionRestApiTest.class,
org.alfresco.repo.web.scripts.activities.feed.control.FeedControlTest.class,
org.alfresco.repo.web.scripts.forms.FormRestApiGet_Test.class,
org.alfresco.repo.web.scripts.forms.FormRestApiJsonPost_Test.class,
org.alfresco.repo.web.scripts.groups.GroupsTest.class,
org.alfresco.repo.web.scripts.invitation.InvitationWebScriptTest.class,
org.alfresco.repo.web.scripts.invite.InviteServiceTest.class,
org.alfresco.repo.web.scripts.LoginTest.class,
org.alfresco.repo.web.scripts.search.PersonSearchTest.class,
org.alfresco.repo.web.scripts.person.PersonServiceTest.class,
org.alfresco.repo.web.scripts.preference.PreferenceServiceTest.class,
org.alfresco.repo.web.scripts.rating.RatingRestApiTest.class,
org.alfresco.repo.web.scripts.replication.ReplicationRestApiTest.class,
org.alfresco.repo.web.scripts.RepositoryContainerTest.class,
org.alfresco.repo.web.scripts.rule.RuleServiceTest.class,
org.alfresco.repo.web.scripts.action.RunningActionRestApiTest.class,
org.alfresco.repo.web.scripts.site.SiteServiceTest.class,
org.alfresco.repo.web.scripts.tagging.TaggingServiceTest.class,
org.alfresco.repo.web.scripts.thumbnail.ThumbnailServiceTest.class,
org.alfresco.repo.web.scripts.transfer.TransferWebScriptTest.class,
org.alfresco.repo.web.scripts.workflow.ActivitiWorkflowRestApiTest.class,
org.alfresco.repo.web.scripts.solr.SOLRWebScriptTest.class,
org.alfresco.repo.web.scripts.subscriptions.SubscriptionServiceRestApiTest.class,
org.alfresco.repo.web.scripts.facet.FacetRestApiTest.class,
org.alfresco.repo.web.scripts.comment.CommentsApiTest.class,
org.alfresco.repo.web.scripts.content.ContentGetTest.class,
org.alfresco.repo.web.scripts.XssVulnerabilityTest.class,
org.alfresco.repo.web.scripts.links.LinksRestApiTest.class,
org.alfresco.repo.model.filefolder.RemoteFileFolderLoaderTest.class,
org.alfresco.repo.web.scripts.ReadOnlyTransactionInGetRestApiTest.class,
org.alfresco.repo.web.scripts.custommodel.CustomModelImportTest.class,
org.alfresco.repo.web.scripts.site.SurfConfigTest.class,
org.alfresco.repo.web.scripts.node.NodeWebScripTest.class,
org.alfresco.rest.api.impl.CommentsImplUnitTest.class,
org.alfresco.rest.api.impl.DownloadsImplCheckArchiveStatusUnitTest.class,
org.alfresco.rest.api.impl.RestApiDirectUrlConfigUnitTest.class
})
// [classpath:alfresco/application-context.xml, classpath:alfresco/web-scripts-application-context-test.xml,
// classpath:alfresco/web-scripts-application-context.xml]
org.alfresco.repo.web.scripts.quickshare.QuickShareRestApiTest.class,
org.alfresco.repo.web.scripts.admin.AdminWebScriptTest.class,
org.alfresco.repo.web.scripts.audit.AuditWebScriptTest.class,
org.alfresco.repo.web.scripts.blogs.BlogServiceTest.class,
org.alfresco.repo.web.scripts.dictionary.DictionaryRestApiTest.class,
org.alfresco.repo.web.scripts.discussion.DiscussionRestApiTest.class,
org.alfresco.repo.web.scripts.activities.feed.control.FeedControlTest.class,
org.alfresco.repo.web.scripts.forms.FormRestApiGet_Test.class,
org.alfresco.repo.web.scripts.forms.FormRestApiJsonPost_Test.class,
org.alfresco.repo.web.scripts.groups.GroupsTest.class,
org.alfresco.repo.web.scripts.invitation.InvitationWebScriptTest.class,
org.alfresco.repo.web.scripts.invite.InviteServiceTest.class, org.alfresco.repo.web.scripts.LoginTest.class,
org.alfresco.repo.web.scripts.search.PersonSearchTest.class,
org.alfresco.repo.web.scripts.person.PersonServiceTest.class,
org.alfresco.repo.web.scripts.preference.PreferenceServiceTest.class,
org.alfresco.repo.web.scripts.rating.RatingRestApiTest.class,
org.alfresco.repo.web.scripts.replication.ReplicationRestApiTest.class,
org.alfresco.repo.web.scripts.RepositoryContainerTest.class,
org.alfresco.repo.web.scripts.rule.RuleServiceTest.class,
org.alfresco.repo.web.scripts.action.RunningActionRestApiTest.class,
org.alfresco.repo.web.scripts.site.SiteServiceTest.class,
org.alfresco.repo.web.scripts.tagging.TaggingServiceTest.class,
org.alfresco.repo.web.scripts.thumbnail.ThumbnailServiceTest.class,
org.alfresco.repo.web.scripts.transfer.TransferWebScriptTest.class,
org.alfresco.repo.web.scripts.workflow.ActivitiWorkflowRestApiTest.class,
org.alfresco.repo.web.scripts.solr.SOLRWebScriptTest.class,
org.alfresco.repo.web.scripts.subscriptions.SubscriptionServiceRestApiTest.class,
org.alfresco.repo.web.scripts.facet.FacetRestApiTest.class,
org.alfresco.repo.web.scripts.comment.CommentsApiTest.class,
org.alfresco.repo.web.scripts.content.ContentGetTest.class,
org.alfresco.repo.web.scripts.XssVulnerabilityTest.class,
org.alfresco.repo.web.scripts.links.LinksRestApiTest.class,
org.alfresco.repo.model.filefolder.RemoteFileFolderLoaderTest.class,
org.alfresco.repo.web.scripts.ReadOnlyTransactionInGetRestApiTest.class,
org.alfresco.repo.web.scripts.custommodel.CustomModelImportTest.class,
org.alfresco.repo.web.scripts.site.SurfConfigTest.class,
org.alfresco.repo.web.scripts.node.NodeWebScripTest.class,
org.alfresco.rest.api.impl.CommentsImplUnitTest.class,
org.alfresco.rest.api.impl.DownloadsImplCheckArchiveStatusUnitTest.class,
org.alfresco.rest.api.impl.RestApiDirectUrlConfigUnitTest.class,
org.alfresco.rest.api.impl.SizeDetailsImplTest.class})
public class AppContext04TestSuite
{
public AppContext04TestSuite()

View File

@@ -0,0 +1,134 @@
/*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2020 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.api.impl;
import static java.util.Collections.singleton;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.openMocks;
import static org.alfresco.model.ContentModel.TYPE_CONTENT;
import static org.alfresco.service.cmr.favourites.FavouritesService.Type.FILE;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.alfresco.repo.favourites.PersonFavourite;
import org.alfresco.rest.api.Nodes;
import org.alfresco.rest.api.People;
import org.alfresco.rest.api.model.Document;
import org.alfresco.rest.api.model.DocumentTarget;
import org.alfresco.rest.api.model.Favourite;
import org.alfresco.rest.framework.resource.parameters.Parameters;
import org.alfresco.service.cmr.favourites.FavouritesService;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.namespace.NamespaceService;
/**
* Unit tests for {@link FavouritesImpl} class.
*/
public class FavouritesImplUnitTest
{
static final String NODE_ID = "12345678";
static final NodeRef NODE_REF = new NodeRef("favourite://node/" + NODE_ID);
static final String PERSON_ID = "personId";
static final String ASPECT_NAME = "some:aspect";
@InjectMocks
FavouritesImpl favouritesImpl;
@Mock
People people;
@Mock
Nodes nodes;
@Mock
FavouritesService favouritesService;
@Mock
NamespaceService namespaceService;
@Mock
Favourite favourite;
@Mock
Document document;
@Mock
PersonFavourite personFavourite;
@Before
public void setUp()
{
openMocks(this);
when(nodes.getDocument(NODE_REF)).thenReturn(document);
when(nodes.nodeMatches(NODE_REF, singleton(TYPE_CONTENT), null)).thenReturn(true);
when(document.getGuid()).thenReturn(NODE_REF);
when(people.validatePerson(PERSON_ID, true)).thenReturn(PERSON_ID);
when(personFavourite.getNodeRef()).thenReturn(NODE_REF);
when(personFavourite.getType()).thenReturn(FILE);
when(favouritesService.addFavourite(PERSON_ID, NODE_REF)).thenReturn(personFavourite);
when(namespaceService.getPrefixes(anyString())).thenReturn(List.of("prefix"));
}
@Test
public void testAddFavourite()
{
DocumentTarget documentTarget = new DocumentTarget(document);
when(favourite.getTarget()).thenReturn(documentTarget);
Favourite response = favouritesImpl.addFavourite(PERSON_ID, favourite);
Favourite expected = new Favourite();
expected.setTarget(documentTarget);
expected.setTargetGuid(NODE_ID);
assertEquals(expected, response);
}
@Test
public void testAddFavouriteIncludeAspectNames()
{
List<String> includes = List.of("aspectNames");
DocumentTarget documentTarget = new DocumentTarget(document);
when(favourite.getTarget()).thenReturn(documentTarget);
when(nodes.getFolderOrDocument(NODE_REF, null, null, includes, null)).thenReturn(document);
when(document.getAspectNames()).thenReturn(List.of(ASPECT_NAME));
Parameters parameters = mock(Parameters.class);
when(parameters.getInclude()).thenReturn(includes);
Favourite response = favouritesImpl.addFavourite(PERSON_ID, favourite, parameters);
Favourite expected = new Favourite();
expected.setTarget(documentTarget);
expected.setTargetGuid(NODE_ID);
expected.setAspectNames(List.of(ASPECT_NAME));
assertEquals(expected, response);
}
}

View File

@@ -0,0 +1,107 @@
/*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2024 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.api.impl;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.Serializable;
import java.util.Optional;
import java.util.concurrent.ThreadPoolExecutor;
import org.junit.Before;
import org.junit.Test;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.repo.node.sizedetails.NodeSizeDetailsServiceImpl;
import org.alfresco.repo.node.sizedetails.NodeSizeDetailsServiceImpl.NodeSizeDetails;
import org.alfresco.rest.api.Nodes;
import org.alfresco.rest.api.model.Node;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.QName;
/**
* Unit tests for {@link SizeDetailsImpl} class.
*/
public class SizeDetailsImplTest
{
private static final String NAMESPACE = "http://www.alfresco.org/test/NodeSizeDetailsTest";
private static final QName TYPE_FOLDER = QName.createQName(NAMESPACE, "folder");
private SizeDetailsImpl sizeDetailsImpl;
private Nodes nodes;
private NodeSizeDetailsServiceImpl nodeSizeDetailsServiceImpl;
private NodeSizeDetails nodeSizeDetails;
@Before
public void setUp() throws Exception
{
nodes = mock(Nodes.class);
SearchService searchService = mock(SearchService.class);
nodeSizeDetailsServiceImpl = mock(NodeSizeDetailsServiceImpl.class);
ThreadPoolExecutor threadPoolExecutor = mock(ThreadPoolExecutor.class);
SimpleCache<Serializable, NodeSizeDetails> simpleCache = mock(SimpleCache.class);
nodeSizeDetails = mock(NodeSizeDetails.class);
nodeSizeDetailsServiceImpl.setSearchService(searchService);
nodeSizeDetailsServiceImpl.setDefaultItems(1000);
nodeSizeDetailsServiceImpl.setSimpleCache(simpleCache);
verify(nodeSizeDetailsServiceImpl).setSimpleCache(simpleCache);
nodeSizeDetailsServiceImpl.setThreadPoolExecutor(threadPoolExecutor);
sizeDetailsImpl = new SizeDetailsImpl(nodes, nodeSizeDetailsServiceImpl);
}
@Test
public void calculateNodeSizeDetails()
{
String nodeName = "folderNode";
String nodeId = "node-id";
String jobId = "job-id";
NodeRef nodeRef = new NodeRef("protocol", "identifier", nodeId);
Node node = new Node();
node.setIsFolder(true);
node.setNodeRef(nodeRef);
node.setName(nodeName);
node.setNodeType(TYPE_FOLDER.getLocalName());
node.setNodeId(nodeRef.getId());
when(nodes.validateOrLookupNode(nodeId)).thenReturn(nodeRef);
when(nodes.isSubClass(nodeRef, ContentModel.TYPE_FOLDER, false)).thenReturn(true);
when(nodeSizeDetailsServiceImpl.getSizeDetails(nodeId)).thenReturn(Optional.ofNullable(nodeSizeDetails));
NodeSizeDetails requestSizeDetails = sizeDetailsImpl.generateNodeSizeDetailsRequest(nodeId);
assertNotNull("After executing POST/size-details, it will provide with 202 status code", requestSizeDetails);
NodeSizeDetails nodeSizeDetails = sizeDetailsImpl.getNodeSizeDetails(nodeId, jobId);
assertNotNull("After executing GET/size-details, it will provide with 200 status code", nodeSizeDetails);
}
}

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* Copyright (C) 2005 - 2024 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -25,27 +25,45 @@
*/
package org.alfresco.rest.api.tests;
import org.alfresco.repo.content.directurl.SystemWideDirectUrlConfig;
import org.alfresco.rest.api.impl.directurl.RestApiDirectUrlConfig;
import org.alfresco.rest.api.tests.client.PublicApiHttpClient;
import static org.alfresco.rest.api.tests.util.RestApiUtil.toJsonAsString;
import static org.alfresco.rest.api.tests.util.RestApiUtil.toJsonAsStringNonNull;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.alfresco.rest.api.tests.util.RestApiUtil.toJsonAsString;
import static org.alfresco.rest.api.tests.util.RestApiUtil.toJsonAsStringNonNull;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.json.simple.JSONObject;
import org.junit.After;
import org.junit.Before;
import org.junit.experimental.categories.Category;
import org.springframework.util.ResourceUtils;
import org.alfresco.repo.content.directurl.SystemWideDirectUrlConfig;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.tenant.TenantService;
import org.alfresco.repo.tenant.TenantUtil;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.rest.api.Nodes;
import org.alfresco.rest.api.impl.directurl.RestApiDirectUrlConfig;
import org.alfresco.rest.api.model.Site;
import org.alfresco.rest.api.nodes.NodesEntityResource;
import org.alfresco.rest.api.tests.RepoService.TestNetwork;
import org.alfresco.rest.api.tests.client.HttpResponse;
import org.alfresco.rest.api.tests.client.PublicApiClient;
import org.alfresco.rest.api.tests.client.PublicApiHttpClient;
import org.alfresco.rest.api.tests.client.PublicApiHttpClient.BinaryPayload;
import org.alfresco.rest.api.tests.client.PublicApiHttpClient.RequestBuilder;
import org.alfresco.rest.api.tests.client.RequestContext;
@@ -65,22 +83,6 @@ import org.alfresco.service.cmr.security.PersonService;
import org.alfresco.service.cmr.site.SiteVisibility;
import org.alfresco.util.TempFileProvider;
import org.alfresco.util.testing.category.LuceneTests;
import org.json.simple.JSONObject;
import org.junit.After;
import org.junit.Before;
import org.junit.experimental.categories.Category;
import org.springframework.util.ResourceUtils;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
/**
* Generic methods for calling the Api (originally taken and adapted from BaseCustomModelApiTest)
@@ -94,53 +96,39 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
{
public static final String LAST_MODIFIED_HEADER = "Last-Modified";
public static final String IF_MODIFIED_SINCE_HEADER = "If-Modified-Since";
private static final String RESOURCE_PREFIX = "publicapi/upload/";
protected static final String URL_NODES = "nodes";
protected static final String URL_DELETED_NODES = "deleted-nodes";
protected static final String URL_RENDITIONS = "renditions";
protected static final String URL_VERSIONS = "versions";
private static final String URL_CHILDREN = "children";
private static final String URL_CONTENT = "content";
protected static final String TYPE_CM_FOLDER = "cm:folder";
protected static final String TYPE_CM_CONTENT = "cm:content";
protected static final String TYPE_CM_OBJECT = "cm:cmobject";
protected static final String ASPECT_CM_PREFERENCES = "cm:preferences";
protected static final String ASSOC_TYPE_CM_PREFERENCE_IMAGE = "cm:preferenceImage";
protected static final String ASSOC_TYPE_CM_CONTAINS = "cm:contains";
// TODO improve admin-related tests, including ability to override default admin un/pw
protected static final String DEFAULT_ADMIN = "admin";
protected static final String DEFAULT_ADMIN_PWD = "admin";
protected static final long PAUSE_TIME = 5000; // millisecond
protected static final int MAX_RETRY = 20;
private static final String RESOURCE_PREFIX = "publicapi/upload/";
private static final String URL_CHILDREN = "children";
private static final String URL_CONTENT = "content";
private static final String URL_CALCULATEFOLDERSIZE = "size-details";
private static final String REQUEST_DIRECT_ACCESS_URL = "request-direct-access-url";
// network1 with user1, user2 and a testsite1
protected static TestNetwork networkOne;
protected static String user1; // user1 from network1
protected static String user2; // user2 from network1
// network admin (or default super admin, if not running within a tenant/network)
protected static String networkAdmin = DEFAULT_ADMIN;
protected static String tSiteId;
protected static String tDocLibNodeId;
protected static List<String> users = new ArrayList<>();
protected static JacksonUtil jacksonUtil;
protected static MutableAuthenticationService authenticationService;
protected static PersonService personService;
protected final String RUNID = System.currentTimeMillis()+"";
private static final String REQUEST_DIRECT_ACCESS_URL = "request-direct-access-url";
protected final String RUNID = System.currentTimeMillis() + "";
@Override
@Before
@@ -154,14 +142,14 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
networkOne = getTestFixture().getRandomNetwork();
}
//userOneN1 = networkN1.createUser();
//userTwoN1 = networkN1.createUser();
// userOneN1 = networkN1.createUser();
// userTwoN1 = networkN1.createUser();
String tenantDomain = networkOne.getId();
if (! TenantService.DEFAULT_DOMAIN.equals(tenantDomain))
if (!TenantService.DEFAULT_DOMAIN.equals(tenantDomain))
{
networkAdmin = DEFAULT_ADMIN+"@"+tenantDomain;
networkAdmin = DEFAULT_ADMIN + "@" + tenantDomain;
}
// to enable admin access via test calls - eg. via PublicApiClient -> AbstractTestApi -> findUserByUserName
@@ -200,8 +188,7 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
for (final String username : users)
{
transactionHelper.doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>()
{
transactionHelper.doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable
{
@@ -233,7 +220,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
protected String getRequestArchivedRenditonContentDirectUrl(String nodeId, String renditionID)
{
return URL_DELETED_NODES + "/" + nodeId + "/" + URL_RENDITIONS + "/" + renditionID + "/" + REQUEST_DIRECT_ACCESS_URL;
return URL_DELETED_NODES + "/" + nodeId + "/" + URL_RENDITIONS + "/" + renditionID + "/"
+ REQUEST_DIRECT_ACCESS_URL;
}
protected String getRequestRenditionDirectAccessUrl(String nodeId, String renditionID)
@@ -246,7 +234,6 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return URL_NODES + "/" + nodeId + "/" + URL_VERSIONS + "/" + versionId + "/" + REQUEST_DIRECT_ACCESS_URL;
}
/**
* The api scope. either public or private
*
@@ -262,10 +249,10 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return response;
}
protected HttpResponse post(String url, byte[] body, Map<String, String> params, Map<String, String> headers, String apiName, String contentType, int expectedStatus) throws Exception
protected HttpResponse post(String url, byte[] body, Map<String, String> params, Map<String, String> headers,
String apiName, String contentType, int expectedStatus) throws Exception
{
RequestBuilder requestBuilder = httpClient.new PostRequestBuilder()
.setBodyAsByteArray(body)
RequestBuilder requestBuilder = httpClient.new PostRequestBuilder().setBodyAsByteArray(body)
.setContentType(contentType)
.setRequestContext(publicApiClient.getRequestContext())
.setScope(getScope())
@@ -279,10 +266,10 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return response;
}
protected HttpResponse post(String url, String body, Map<String, String> params, Map<String, String> headers, String apiName, int expectedStatus) throws Exception
protected HttpResponse post(String url, String body, Map<String, String> params, Map<String, String> headers,
String apiName, int expectedStatus) throws Exception
{
RequestBuilder requestBuilder = httpClient.new PostRequestBuilder()
.setBodyAsString(body)
RequestBuilder requestBuilder = httpClient.new PostRequestBuilder().setBodyAsString(body)
.setRequestContext(publicApiClient.getRequestContext())
.setScope(getScope())
.setApiName(apiName)
@@ -307,7 +294,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return response;
}
protected HttpResponse post(String url, String body, String queryString, String contentType, int expectedStatus) throws Exception
protected HttpResponse post(String url, String body, String queryString, String contentType, int expectedStatus)
throws Exception
{
if (queryString != null)
{
@@ -319,7 +307,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return response;
}
protected HttpResponse post(String url, byte[] body, String queryString, String contentType, int expectedStatus) throws Exception
protected HttpResponse post(String url, byte[] body, String queryString, String contentType, int expectedStatus)
throws Exception
{
if (queryString != null)
{
@@ -332,9 +321,12 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
}
// TODO unused queryString - fix-up usages and then remove
protected HttpResponse post(String entityCollectionName, String entityId, String relationCollectionName, byte[] body, String queryString, String contentType, int expectedStatus) throws Exception
protected HttpResponse post(String entityCollectionName, String entityId, String relationCollectionName,
byte[] body, String queryString, String contentType, int expectedStatus)
throws Exception
{
HttpResponse response = publicApiClient.post(getScope(), entityCollectionName, entityId, relationCollectionName, null, body, contentType);
HttpResponse response = publicApiClient.post(getScope(), entityCollectionName, entityId, relationCollectionName, null, body,
contentType);
checkStatus(expectedStatus, response.getStatusCode());
return response;
@@ -345,7 +337,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return getAll(url, paging, null, expectedStatus);
}
protected HttpResponse getAll(String url, PublicApiClient.Paging paging, Map<String, String> otherParams, int expectedStatus) throws Exception
protected HttpResponse getAll(String url, PublicApiClient.Paging paging, Map<String, String> otherParams,
int expectedStatus) throws Exception
{
Map<String, String> params = createParams(paging, otherParams);
@@ -355,7 +348,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return response;
}
protected HttpResponse getAll(Class<?> entityResource, PublicApiClient.Paging paging, Map<String, String> otherParams, int expectedStatus) throws Exception
protected HttpResponse getAll(Class<?> entityResource, PublicApiClient.Paging paging,
Map<String, String> otherParams, int expectedStatus) throws Exception
{
HttpResponse response = publicApiClient.get(entityResource, null, null, otherParams);
checkStatus(expectedStatus, response.getStatusCode());
@@ -363,16 +357,17 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return response;
}
protected HttpResponse getAll(String url, PublicApiClient.Paging paging, Map<String, String> otherParams, Map<String, String> headers, int expectedStatus) throws Exception
protected HttpResponse getAll(String url, PublicApiClient.Paging paging, Map<String, String> otherParams,
Map<String, String> headers, int expectedStatus) throws Exception
{
return getAll(url, paging, otherParams, headers, null, expectedStatus);
}
protected HttpResponse getAll(String url, PublicApiClient.Paging paging, Map<String, String> otherParams, Map<String, String> headers, String apiName, int expectedStatus) throws Exception
protected HttpResponse getAll(String url, PublicApiClient.Paging paging, Map<String, String> otherParams,
Map<String, String> headers, String apiName, int expectedStatus) throws Exception
{
Map<String, String> params = createParams(paging, otherParams);
RequestBuilder requestBuilder = httpClient.new GetRequestBuilder()
.setRequestContext(publicApiClient.getRequestContext())
RequestBuilder requestBuilder = httpClient.new GetRequestBuilder().setRequestContext(publicApiClient.getRequestContext())
.setScope(getScope())
.setApiName(apiName)
.setEntityCollectionName(url)
@@ -398,7 +393,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return response;
}
protected HttpResponse getSingle(String url, String entityId, Map<String, String> params, int expectedStatus) throws Exception
protected HttpResponse getSingle(String url, String entityId, Map<String, String> params, int expectedStatus)
throws Exception
{
HttpResponse response = publicApiClient.get(getScope(), url, entityId, null, null, params);
checkStatus(expectedStatus, response.getStatusCode());
@@ -406,7 +402,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return response;
}
protected HttpResponse getSingle(Class<?> entityResource, String entityId, Map<String, String> params, int expectedStatus) throws Exception
protected HttpResponse getSingle(Class<?> entityResource, String entityId, Map<String, String> params,
int expectedStatus) throws Exception
{
HttpResponse response = publicApiClient.get(entityResource, entityId, null, params);
checkStatus(expectedStatus, response.getStatusCode());
@@ -414,15 +411,16 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return response;
}
protected HttpResponse getSingle(String url, String entityId, Map<String, String> params, Map<String, String> headers, int expectedStatus) throws Exception
protected HttpResponse getSingle(String url, String entityId, Map<String, String> params,
Map<String, String> headers, int expectedStatus) throws Exception
{
return getSingle(url, entityId, params, headers, null, expectedStatus);
}
protected HttpResponse getSingle(String url, String entityId, Map<String, String> params, Map<String, String> headers, String apiName, int expectedStatus) throws Exception
protected HttpResponse getSingle(String url, String entityId, Map<String, String> params,
Map<String, String> headers, String apiName, int expectedStatus) throws Exception
{
RequestBuilder requestBuilder = httpClient.new GetRequestBuilder()
.setRequestContext(publicApiClient.getRequestContext())
RequestBuilder requestBuilder = httpClient.new GetRequestBuilder().setRequestContext(publicApiClient.getRequestContext())
.setScope(getScope())
.setApiName(apiName)
.setEntityCollectionName(url)
@@ -437,7 +435,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
}
protected HttpResponse getSingleWithDelayRetry(String url, String entityId, Map<String, String> params,
Map<String, String> headers, int repeat, long pauseInMillisecond, int expectedStatus) throws Exception
Map<String, String> headers, int repeat, long pauseInMillisecond,
int expectedStatus) throws Exception
{
int retryCount = 0;
while (retryCount < repeat)
@@ -455,7 +454,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return null;
}
protected HttpResponse put(String url, String entityId, String body, String queryString, int expectedStatus) throws Exception
protected HttpResponse put(String url, String entityId, String body, String queryString, int expectedStatus)
throws Exception
{
if (queryString != null)
{
@@ -467,8 +467,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return response;
}
protected HttpResponse putBinary(String url, int version, BinaryPayload payload, String queryString, Map<String, String> params,
int expectedStatus) throws Exception
protected HttpResponse putBinary(String url, int version, BinaryPayload payload, String queryString,
Map<String, String> params, int expectedStatus) throws Exception
{
if (queryString != null)
{
@@ -482,7 +482,7 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
}
protected HttpResponse putBinary(String url, BinaryPayload payload, String queryString, Map<String, String> params,
int expectedStatus) throws Exception
int expectedStatus) throws Exception
{
return putBinary(url, 1, payload, queryString, params, expectedStatus);
}
@@ -492,7 +492,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return delete(url, entityId, null, expectedStatus);
}
protected HttpResponse delete(String url, String entityId, Map<String, String> params, int expectedStatus) throws Exception
protected HttpResponse delete(String url, String entityId, Map<String, String> params, int expectedStatus)
throws Exception
{
HttpResponse response = publicApiClient.delete(getScope(), 1, url, entityId, null, null, params);
checkStatus(expectedStatus, response.getStatusCode());
@@ -500,10 +501,10 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return response;
}
protected HttpResponse delete(String url, String entityId, Map<String, String> params, Map<String, String> headers, String apiName, int expectedStatus) throws Exception
protected HttpResponse delete(String url, String entityId, Map<String, String> params, Map<String, String> headers,
String apiName, int expectedStatus) throws Exception
{
RequestBuilder requestBuilder = httpClient.new DeleteRequestBuilder()
.setRequestContext(publicApiClient.getRequestContext())
RequestBuilder requestBuilder = httpClient.new DeleteRequestBuilder().setRequestContext(publicApiClient.getRequestContext())
.setScope(getScope())
.setApiName(apiName)
.setEntityCollectionName(url)
@@ -524,7 +525,10 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
protected String createUser(String usernameIn, String password, TestNetwork network)
{
return createUser(new PersonInfo(usernameIn, usernameIn, usernameIn, password, null, null, null, null, null, null, null), network);
return createUser(
new PersonInfo(usernameIn, usernameIn, usernameIn, password, null, null, null, null, null, null,
null),
network);
}
/**
@@ -534,16 +538,15 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
{
final String tenantDomain = (network != null ? network.getId() : TenantService.DEFAULT_DOMAIN);
return AuthenticationUtil.runAs(new AuthenticationUtil.RunAsWork<String>()
{
return AuthenticationUtil.runAs(new AuthenticationUtil.RunAsWork<String>() {
@Override
public String doWork() throws Exception
{
return TenantUtil.runAsTenant(new TenantUtil.TenantRunAsWork<String>()
{
return TenantUtil.runAsTenant(new TenantUtil.TenantRunAsWork<String>() {
public String doWork() throws Exception
{
String username = repoService.getPublicApiContext().createUserName(personInfo.getUsername(), tenantDomain);
String username = repoService.getPublicApiContext()
.createUserName(personInfo.getUsername(), tenantDomain);
personInfo.setUsername(username);
RepoService.TestPerson person = repoService.createUser(personInfo, username, network);
return person.getId();
@@ -561,17 +564,17 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
{
final String tenantDomain = (network != null ? network.getId() : TenantService.DEFAULT_DOMAIN);
return AuthenticationUtil.runAs(new AuthenticationUtil.RunAsWork<String>()
{
return AuthenticationUtil.runAs(new AuthenticationUtil.RunAsWork<String>() {
@Override
public String doWork() throws Exception
{
return TenantUtil.runAsTenant(new TenantUtil.TenantRunAsWork<String>()
{
return TenantUtil.runAsTenant(new TenantUtil.TenantRunAsWork<String>() {
public String doWork() throws Exception
{
String username = repoService.getPublicApiContext().createUserName(usernameIn, tenantDomain);
PersonInfo personInfo = new PersonInfo(username, username, username, password, null, null, null, null, null, null, null);
String username = repoService.getPublicApiContext()
.createUserName(usernameIn, tenantDomain);
PersonInfo personInfo = new PersonInfo(username, username, username, password, null, null, null, null, null,
null, null);
RepoService.TestPerson person = repoService.getOrCreateUser(personInfo, username, network);
return person.getId();
}
@@ -587,13 +590,11 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
{
final String tenantDomain = (network != null ? network.getId() : TenantService.DEFAULT_DOMAIN);
return AuthenticationUtil.runAs(new AuthenticationUtil.RunAsWork<String>()
{
return AuthenticationUtil.runAs(new AuthenticationUtil.RunAsWork<String>() {
@Override
public String doWork() throws Exception
{
return TenantUtil.runAsTenant(new TenantUtil.TenantRunAsWork<String>()
{
return TenantUtil.runAsTenant(new TenantUtil.TenantRunAsWork<String>() {
public String doWork() throws Exception
{
repoService.deleteUser(username, network);
@@ -607,9 +608,11 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
protected SiteMember addSiteMember(String siteId, String userId, final SiteRole siteRole) throws Exception
{
SiteMember siteMember = new SiteMember(userId, siteRole.name());
HttpResponse response = publicApiClient.post(getScope(), "sites", siteId, "members", null, siteMember.toJSON().toString());
HttpResponse response = publicApiClient.post(getScope(), "sites", siteId, "members", null, siteMember.toJSON()
.toString());
checkStatus(201, response.getStatusCode());
return SiteMember.parseSiteMember(siteMember.getSiteId(), (JSONObject)response.getJsonResponse().get("entry"));
return SiteMember.parseSiteMember(siteMember.getSiteId(), (JSONObject) response.getJsonResponse()
.get("entry"));
}
protected Site createSite(String siteTitle, SiteVisibility siteVisibility) throws Exception
@@ -617,7 +620,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return createSite(null, siteTitle, null, siteVisibility, 201);
}
protected Site createSite(String siteId, String siteTitle, String siteDescription, SiteVisibility siteVisibility, int expectedStatus) throws Exception
protected Site createSite(String siteId, String siteTitle, String siteDescription, SiteVisibility siteVisibility,
int expectedStatus) throws Exception
{
Site site = new Site();
site.setId(siteId);
@@ -633,7 +637,7 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
protected HttpResponse deleteSite(String siteId, boolean permanent, int expectedStatus) throws Exception
{
Map params = null;
if (permanent == true)
if (permanent)
{
params = Collections.singletonMap("permanent", "true");
}
@@ -672,9 +676,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
}
/**
* @deprecated
*
* @param runAsUser
* @deprecated
*/
protected void setRequestContext(String runAsUser)
{
@@ -702,7 +705,7 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
}
else if ((runAsUser != null) && runAsUser.equals(DEFAULT_ADMIN))
{
runAsUser = runAsUser+"@"+runAsNetwork;
runAsUser = runAsUser + "@" + runAsNetwork;
}
publicApiClient.setRequestContext(new RequestContext(runAsNetwork, runAsUser, password));
@@ -750,17 +753,18 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
protected String createUniqueContent(String folderId) throws Exception
{
Document documentResp = createTextFile(folderId, "file-" + System.currentTimeMillis(),
"some text-" + System.currentTimeMillis(), "UTF-8", null);
"some text-" + System.currentTimeMillis(), "UTF-8", null);
return documentResp.getId();
}
protected Node createNode(String parentId, String nodeName, String nodeType, Map<String, Object> props) throws Exception
protected Node createNode(String parentId, String nodeName, String nodeType, Map<String, Object> props)
throws Exception
{
return createNode(parentId, nodeName, nodeType, props, Node.class);
}
protected <T> T createNode(String parentId, String nodeName, String nodeType, Map<String, Object> props, Class<T> returnType)
throws Exception
protected <T> T createNode(String parentId, String nodeName, String nodeType, Map<String, Object> props,
Class<T> returnType) throws Exception
{
Node n = new Node();
n.setName(nodeName);
@@ -786,7 +790,7 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
protected void deleteNode(String nodeId, boolean permanent, int expectedStatus) throws Exception
{
Map params = null;
if (permanent == true)
if (permanent)
{
params = Collections.singletonMap("permanent", "true");
}
@@ -794,17 +798,19 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
delete(URL_NODES, nodeId, params, expectedStatus);
}
protected Document createTextFile(String parentId, String fileName, String textContent) throws IOException, Exception
protected Document createTextFile(String parentId, String fileName, String textContent) throws Exception
{
return createTextFile(parentId, fileName, textContent, "UTF-8", null);
}
protected Document createTextFile(String parentId, String fileName, String textContent, String encoding, Map<String, String> props) throws IOException, Exception
protected Document createTextFile(String parentId, String fileName, String textContent, String encoding,
Map<String, String> props) throws Exception
{
return createTextFile(parentId, fileName, textContent, encoding, props, 201);
}
protected Document createTextFile(String parentId, String fileName, String textContent, String encoding, Map<String, String> props, int expectedStatus) throws IOException, Exception
protected Document createTextFile(String parentId, String fileName, String textContent, String encoding,
Map<String, String> props, int expectedStatus) throws Exception
{
if (props == null)
{
@@ -820,9 +826,11 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
.setProperties(props)
.build();
HttpResponse response = post(getNodeChildrenUrl(parentId), reqBody.getBody(), null, reqBody.getContentType(), expectedStatus);
HttpResponse response = post(getNodeChildrenUrl(parentId), reqBody.getBody(), null, reqBody.getContentType(),
expectedStatus);
if (response.getJsonResponse().get("error") != null)
if (response.getJsonResponse()
.get("error") != null)
{
return null;
}
@@ -835,7 +843,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return createEmptyTextFile(parentFolderId, docName, null, 201);
}
protected Document createEmptyTextFile(String parentFolderId, String docName, Map<String, String> params, int expectedStatus) throws Exception
protected Document createEmptyTextFile(String parentFolderId, String docName, Map<String, String> params,
int expectedStatus) throws Exception
{
Document d1 = new Document();
d1.setName(docName);
@@ -845,7 +854,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
d1.setContent(ci);
// create empty file
HttpResponse response = post(getNodeChildrenUrl(parentFolderId), toJsonAsStringNonNull(d1), params, null, "alfresco", expectedStatus);
HttpResponse response = post(getNodeChildrenUrl(parentFolderId), toJsonAsStringNonNull(d1), params, null, "alfresco",
expectedStatus);
if (expectedStatus != 201)
{
return null;
@@ -858,7 +868,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return updateTextFile(contentId, textContent, params, 200);
}
protected Document updateTextFile(String contentId, String textContent, Map<String, String> params, int expectedStatus) throws Exception
protected Document updateTextFile(String contentId, String textContent, Map<String, String> params,
int expectedStatus) throws Exception
{
ByteArrayInputStream inputStream = new ByteArrayInputStream(textContent.getBytes());
File txtFile = TempFileProvider.createTempFile(inputStream, getClass().getSimpleName(), ".txt");
@@ -874,7 +885,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
protected File getResourceFile(String fileName) throws FileNotFoundException
{
URL url = NodeApiTest.class.getClassLoader().getResource(RESOURCE_PREFIX + fileName);
URL url = NodeApiTest.class.getClassLoader()
.getResource(RESOURCE_PREFIX + fileName);
if (url == null)
{
fail("Cannot get the resource: " + fileName);
@@ -907,14 +919,15 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
* @return
* @throws Exception
*/
protected String updateFileVersions(String userId, String contentNodeId, int cnt,
String textContentPrefix, int verCnt,
Boolean majorVersion, String currentVersionLabel) throws Exception
protected String updateFileVersions(String userId, String contentNodeId, int cnt, String textContentPrefix,
int verCnt, Boolean majorVersion, String currentVersionLabel) throws Exception
{
String[] parts = currentVersionLabel.split("\\.");
int majorVer = Integer.valueOf(parts[0]).intValue();
int minorVer = Integer.valueOf(parts[1]).intValue();
int majorVer = Integer.valueOf(parts[0])
.intValue();
int minorVer = Integer.valueOf(parts[1])
.intValue();
Map<String, String> params = new HashMap<>();
params.put(Nodes.PARAM_OVERWRITE, "true");
@@ -928,7 +941,6 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
majorVersion = false;
}
if (majorVersion)
{
minorVer = 0;
@@ -961,30 +973,33 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
HttpResponse response = putBinary(getNodeContentUrl(contentNodeId), payload, null, params, 200);
Node nodeResp = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), Node.class);
assertTrue(nodeResp.getAspectNames().contains("cm:versionable"));
assertTrue(nodeResp.getAspectNames()
.contains("cm:versionable"));
assertNotNull(nodeResp.getProperties());
assertEquals(currentVersionLabel, nodeResp.getProperties().get("cm:versionLabel"));
assertEquals((majorVersion ? "MAJOR" : "MINOR"), nodeResp.getProperties().get("cm:versionType"));
assertEquals(currentVersionLabel, nodeResp.getProperties()
.get("cm:versionLabel"));
assertEquals((majorVersion ? "MAJOR" : "MINOR"), nodeResp.getProperties()
.get("cm:versionType"));
// double-check - get version node info
response = getSingle(getNodeVersionsUrl(contentNodeId), currentVersionLabel, null, 200);
nodeResp = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), Node.class);
assertEquals(currentVersionLabel, nodeResp.getProperties().get("cm:versionLabel"));
assertEquals((majorVersion ? "MAJOR" : "MINOR"), nodeResp.getProperties().get("cm:versionType"));
assertEquals(currentVersionLabel, nodeResp.getProperties()
.get("cm:versionLabel"));
assertEquals((majorVersion ? "MAJOR" : "MINOR"), nodeResp.getProperties()
.get("cm:versionType"));
}
return currentVersionLabel;
}
protected static final long PAUSE_TIME = 5000; //millisecond
protected static final int MAX_RETRY = 20;
protected Rendition waitAndGetRendition(String sourceNodeId, String versionId, String renditionId) throws Exception
{
return waitAndGetRendition(sourceNodeId, versionId, renditionId, MAX_RETRY, PAUSE_TIME);
}
protected Rendition waitAndGetRendition(String sourceNodeId, String versionId, String renditionId, int maxRetry, long pauseTime) throws Exception
protected Rendition waitAndGetRendition(String sourceNodeId, String versionId, String renditionId, int maxRetry,
long pauseTime) throws Exception
{
int retryCount = 0;
while (retryCount < maxRetry)
@@ -992,7 +1007,7 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
try
{
HttpResponse response;
if ((versionId != null) && (! versionId.isEmpty()))
if ((versionId != null) && (!versionId.isEmpty()))
{
response = getSingle(getNodeVersionRenditionsUrl(sourceNodeId, versionId), renditionId, 200);
}
@@ -1011,7 +1026,7 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
// wait for 'PAUSE_TIME' and try again.
retryCount++;
System.out.println("waitAndGetRendition: "+retryCount);
System.out.println("waitAndGetRendition: " + retryCount);
Thread.sleep(pauseTime);
}
}
@@ -1024,7 +1039,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
return createAndGetRendition(sourceNodeId, null, renditionId);
}
protected Rendition createAndGetRendition(String sourceNodeId, String versionId, String renditionId) throws Exception
protected Rendition createAndGetRendition(String sourceNodeId, String versionId, String renditionId)
throws Exception
{
Rendition renditionRequest = new Rendition();
renditionRequest.setId(renditionId);
@@ -1035,9 +1051,10 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
try
{
HttpResponse response;
if ((versionId != null) && (! versionId.isEmpty()))
if ((versionId != null) && (!versionId.isEmpty()))
{
response = post(getNodeVersionRenditionsUrl(sourceNodeId, versionId), toJsonAsString(renditionRequest), 202);
response = post(getNodeVersionRenditionsUrl(sourceNodeId, versionId),
toJsonAsString(renditionRequest), 202);
}
else
{
@@ -1052,7 +1069,7 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
// wait for 'PAUSE_TIME' and try again.
retryCount++;
System.out.println("waitAndGetRendition: "+retryCount);
System.out.println("waitAndGetRendition: " + retryCount);
Thread.sleep(PAUSE_TIME);
}
}
@@ -1072,7 +1089,8 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
protected String getNodeVersionRenditionIdUrl(String nodeId, String versionId, String renditionID)
{
return URL_NODES + "/" + nodeId + "/" + URL_VERSIONS + "/" + versionId + "/" + URL_RENDITIONS + "/" + renditionID;
return URL_NODES + "/" + nodeId + "/" + URL_VERSIONS + "/" + versionId + "/" + URL_RENDITIONS + "/"
+ renditionID;
}
protected String getNodeVersionsUrl(String nodeId)
@@ -1120,5 +1138,14 @@ public abstract class AbstractBaseApiTest extends EnterpriseTestApi
RestApiDirectUrlConfig restDauConfig = (RestApiDirectUrlConfig) applicationContext.getBean("restApiDirectUrlConfig");
restDauConfig.setEnabled(false);
}
}
protected String generateNodeSizeDetailsUrl(String nodeId)
{
return URL_NODES + "/" + nodeId + "/" + URL_CALCULATEFOLDERSIZE;
}
protected String getNodeSizeDetailsUrl(String nodeId, String jobId)
{
return URL_NODES + "/" + nodeId + "/" + URL_CALCULATEFOLDERSIZE + "/" + jobId;
}
}

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.4.0.39</version>
<version>25.1.0.3</version>
</parent>
<dependencies>
@@ -850,12 +850,12 @@
</execution>
</executions>
<configuration>
<complianceLevel>1.8</complianceLevel>
<complianceLevel>17</complianceLevel>
<outxml>false</outxml>
<verbose>true</verbose>
<showWeaveInfo>true</showWeaveInfo>
<source>1.8</source>
<target>1.8</target>
<source>17</source>
<target>17</target>
<additionalCompilerArgs>
<arg>-parameters</arg>
</additionalCompilerArgs>

View File

@@ -28,30 +28,26 @@ package org.alfresco.repo.cache.lookup;
import java.io.Serializable;
import java.sql.Savepoint;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.extensions.surf.util.ParameterCheck;
import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.repo.domain.control.ControlDAO;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.util.Pair;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.extensions.surf.util.ParameterCheck;
/**
* A cache for two-way lookups of database entities. These are characterized by having a unique
* key (perhaps a database ID) and a separate unique key that identifies the object. If no cache
* is given, then all calls are passed through to the backing DAO.
* A cache for two-way lookups of database entities. These are characterized by having a unique key (perhaps a database ID) and a separate unique key that identifies the object. If no cache is given, then all calls are passed through to the backing DAO.
* <p>
* The keys must have good <code>equals</code> and <code>hashCode</code> implementations and
* must respect the case-sensitivity of the use-case.
* The keys must have good <code>equals</code> and <code>hashCode</code> implementations and must respect the case-sensitivity of the use-case.
* <p>
* All keys will be unique to the given cache region, allowing the cache to be shared
* between instances of this class.
* All keys will be unique to the given cache region, allowing the cache to be shared between instances of this class.
* <p>
* Generics:
* <ul>
* <li>K: The database unique identifier.</li>
* <li>V: The value stored against K.</li>
* <li>VK: The a value-derived key that will be used as a cache key when caching K for lookups by V.
* This can be the value itself if it is itself a good key.</li>
* <li>K: The database unique identifier.</li>
* <li>V: The value stored against K.</li>
* <li>VK: The a value-derived key that will be used as a cache key when caching K for lookups by V. This can be the value itself if it is itself a good key.</li>
* </ul>
*
* @author Derek Hulley
@@ -65,105 +61,95 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
public static interface EntityLookupCallbackDAO<K1 extends Serializable, V1 extends Object, VK1 extends Serializable>
{
/**
* Resolve the given value into a unique value key that can be used to find the entity's ID.
* A return value should be small and efficient; don't return a value if this is not possible.
* Resolve the given value into a unique value key that can be used to find the entity's ID. A return value should be small and efficient; don't return a value if this is not possible.
* <p/>
* Implementations will often return the value itself, provided that the value is both
* serializable and has a good <code>equals</code> and <code>hashCode</code>.
* Implementations will often return the value itself, provided that the value is both serializable and has a good <code>equals</code> and <code>hashCode</code>.
* <p/>
* Were no adequate key can be generated for the value, then <tt>null</tt> can be returned.
* In this case, the {@link #findByValue(Object) findByValue} method might not even do a search
* and just return <tt>null</tt> itself i.e. if it is difficult to look the value up in storage
* then it is probably difficult to generate a cache key from it, too.. In this scenario, the
* cache will be purely for key-based lookups
* Were no adequate key can be generated for the value, then <tt>null</tt> can be returned. In this case, the {@link #findByValue(Object) findByValue} method might not even do a search and just return <tt>null</tt> itself i.e. if it is difficult to look the value up in storage then it is probably difficult to generate a cache key from it, too.. In this scenario, the cache will be purely for key-based lookups
*
* @param value the full value being keyed (never <tt>null</tt>)
* @return Returns the business key representing the entity, or <tt>null</tt>
* if an economical key cannot be generated.
* @param value
* the full value being keyed (never <tt>null</tt>)
* @return Returns the business key representing the entity, or <tt>null</tt> if an economical key cannot be generated.
*/
VK1 getValueKey(V1 value);
/**
* Find an entity for a given key.
*
* @param key the key (ID) used to identify the entity (never <tt>null</tt>)
* @return Return the entity or <tt>null</tt> if no entity is exists for the ID
* @param key
* the key (ID) used to identify the entity (never <tt>null</tt>)
* @return Return the entity or <tt>null</tt> if no entity is exists for the ID
*/
Pair<K1, V1> findByKey(K1 key);
/**
* Find and entity using the given value key. The <code>equals</code> and <code>hashCode</code>
* methods of the value object should respect case-sensitivity in the same way that this
* lookup treats case-sensitivity i.e. if the <code>equals</code> method is <b>case-sensitive</b>
* then this method should look the entity up using a <b>case-sensitive</b> search.
* Find and entity using the given value key. The <code>equals</code> and <code>hashCode</code> methods of the value object should respect case-sensitivity in the same way that this lookup treats case-sensitivity i.e. if the <code>equals</code> method is <b>case-sensitive</b> then this method should look the entity up using a <b>case-sensitive</b> search.
* <p/>
* Since this is a cache backed by some sort of database, <tt>null</tt> values are allowed by the
* cache. The implementation of this method can throw an exception if <tt>null</tt> is not
* appropriate for the use-case.
* Since this is a cache backed by some sort of database, <tt>null</tt> values are allowed by the cache. The implementation of this method can throw an exception if <tt>null</tt> is not appropriate for the use-case.
* <p/>
* If the search is impossible or expensive, this method should just return <tt>null</tt>. This
* would usually be the case if the {@link #getValueKey(Object) getValueKey} method also returned
* <tt>null</tt> i.e. if it is difficult to look the value up in storage then it is probably
* difficult to generate a cache key from it, too.
* If the search is impossible or expensive, this method should just return <tt>null</tt>. This would usually be the case if the {@link #getValueKey(Object) getValueKey} method also returned <tt>null</tt> i.e. if it is difficult to look the value up in storage then it is probably difficult to generate a cache key from it, too.
*
* @param value the value (business object) used to identify the entity (<tt>null</tt> allowed).
* @return Return the entity or <tt>null</tt> if no entity matches the given value
* @param value
* the value (business object) used to identify the entity (<tt>null</tt> allowed).
* @return Return the entity or <tt>null</tt> if no entity matches the given value
*/
Pair<K1, V1> findByValue(V1 value);
/**
* Create an entity using the given values. It is valid to assume that the entity does not exist
* within the current transaction at least.
* Create an entity using the given values. It is valid to assume that the entity does not exist within the current transaction at least.
* <p/>
* Since persistence mechanisms often allow <tt>null</tt> values, these can be expected here. The
* implementation must throw an exception if <tt>null</tt> is not allowed for the specific use-case.
* Since persistence mechanisms often allow <tt>null</tt> values, these can be expected here. The implementation must throw an exception if <tt>null</tt> is not allowed for the specific use-case.
*
* @param value the value (business object) used to identify the entity (<tt>null</tt> allowed).
* @return Return the newly-created entity ID-value pair
* @param value
* the value (business object) used to identify the entity (<tt>null</tt> allowed).
* @return Return the newly-created entity ID-value pair
*/
Pair<K1, V1> createValue(V1 value);
/**
* Update the entity identified by the given key.
* <p/>
* It is up to the client code to decide if a <tt>0</tt> return value indicates a concurrency violation
* or not.
* It is up to the client code to decide if a <tt>0</tt> return value indicates a concurrency violation or not.
*
* @param key the existing key (ID) used to identify the entity (never <tt>null</tt>)
* @param value the new value
* @return Returns the row update count.
* @throws UnsupportedOperationException if entity updates are not supported
* @param key
* the existing key (ID) used to identify the entity (never <tt>null</tt>)
* @param value
* the new value
* @return Returns the row update count.
* @throws UnsupportedOperationException
* if entity updates are not supported
*/
int updateValue(K1 key, V1 value);
/**
* Delete an entity for the given key.
* <p/>
* It is up to the client code to decide if a <tt>0</tt> return value indicates a concurrency violation
* or not.
* It is up to the client code to decide if a <tt>0</tt> return value indicates a concurrency violation or not.
*
* @param key the key (ID) used to identify the entity (never <tt>null</tt>)
* @return Returns the row deletion count.
* @throws UnsupportedOperationException if entity deletion is not supported
* @param key
* the key (ID) used to identify the entity (never <tt>null</tt>)
* @return Returns the row deletion count.
* @throws UnsupportedOperationException
* if entity deletion is not supported
*/
int deleteByKey(K1 key);
/**
* Delete an entity for the given value.
* <p/>
* It is up to the client code to decide if a <tt>0</tt> return value indicates a concurrency violation
* or not.
* It is up to the client code to decide if a <tt>0</tt> return value indicates a concurrency violation or not.
*
* @param value the value (business object) used to identify the enitity (<tt>null</tt> allowed)
* @return Returns the row deletion count.
* @throws UnsupportedOperationException if entity deletion is not supported
* @param value
* the value (business object) used to identify the enitity (<tt>null</tt> allowed)
* @return Returns the row deletion count.
* @throws UnsupportedOperationException
* if entity deletion is not supported
*/
int deleteByValue(V1 value);
}
/**
* Adaptor for implementations that support immutable entities. The update and delete operations
* throw {@link UnsupportedOperationException}.
* Adaptor for implementations that support immutable entities. The update and delete operations throw {@link UnsupportedOperationException}.
*
* @author Derek Hulley
* @since 3.2
@@ -174,7 +160,7 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
/**
* This implementation never finds a value and is backed by {@link #getValueKey(Object)} returning nothing.
*
* @return Returns <tt>null</tt> always
* @return Returns <tt>null</tt> always
*/
public Pair<K2, V2> findByValue(V2 value)
{
@@ -184,7 +170,7 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
/**
* This implementation does not find by value and is backed by {@link #findByValue(Object)} returning nothing.
*
* @return Returns <tt>null</tt> always
* @return Returns <tt>null</tt> always
*/
public VK2 getValueKey(V2 value)
{
@@ -194,7 +180,8 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
/**
* Disallows the operation.
*
* @throws UnsupportedOperationException always
* @throws UnsupportedOperationException
* always
*/
public int updateValue(K2 key, V2 value)
{
@@ -204,7 +191,8 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
/**
* Disallows the operation.
*
* @throws UnsupportedOperationException always
* @throws UnsupportedOperationException
* always
*/
public int deleteByKey(K2 key)
{
@@ -214,7 +202,8 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
/**
* Disallows the operation.
*
* @throws UnsupportedOperationException always
* @throws UnsupportedOperationException
* always
*/
public int deleteByValue(V2 value)
{
@@ -240,10 +229,10 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
private final String cacheRegion;
/**
* Construct the lookup cache <b>without any cache</b>. All calls are passed directly to the
* underlying DAO entity lookup.
* Construct the lookup cache <b>without any cache</b>. All calls are passed directly to the underlying DAO entity lookup.
*
* @param entityLookup the instance that is able to find and persist entities
* @param entityLookup
* the instance that is able to find and persist entities
*/
public EntityLookupCache(EntityLookupCallbackDAO<K, V, VK> entityLookup)
{
@@ -253,8 +242,10 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
/**
* Construct the lookup cache, using the {@link #CACHE_REGION_DEFAULT default cache region}.
*
* @param cache the cache that will back the two-way lookups
* @param entityLookup the instance that is able to find and persist entities
* @param cache
* the cache that will back the two-way lookups
* @param entityLookup
* the instance that is able to find and persist entities
*/
@SuppressWarnings("rawtypes")
public EntityLookupCache(SimpleCache cache, EntityLookupCallbackDAO<K, V, VK> entityLookup)
@@ -265,15 +256,16 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
/**
* Construct the lookup cache, using the given cache region.
* <p>
* All keys will be unique to the given cache region, allowing the cache to be shared
* between instances of this class.
* All keys will be unique to the given cache region, allowing the cache to be shared between instances of this class.
*
* @param cache the cache that will back the two-way lookups; <tt>null</tt> to have no backing
* in a cache.
* @param cacheRegion the region within the cache to use.
* @param entityLookup the instance that is able to find and persist entities
* @param cache
* the cache that will back the two-way lookups; <tt>null</tt> to have no backing in a cache.
* @param cacheRegion
* the region within the cache to use.
* @param entityLookup
* the instance that is able to find and persist entities
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
@SuppressWarnings({"rawtypes", "unchecked"})
public EntityLookupCache(SimpleCache cache, String cacheRegion, EntityLookupCallbackDAO<K, V, VK> entityLookup)
{
ParameterCheck.mandatory("cacheRegion", cacheRegion);
@@ -284,15 +276,13 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
}
/**
* Find the entity associated with the given key.
* The {@link EntityLookupCallbackDAO#findByKey(Serializable) entity callback} will be used if necessary.
* Find the entity associated with the given key. The {@link EntityLookupCallbackDAO#findByKey(Serializable) entity callback} will be used if necessary.
* <p/>
* It is up to the client code to decide if a <tt>null</tt> return value indicates a concurrency violation
* or not; the former would normally result in a concurrency-related exception such as
* {@link ConcurrencyFailureException}.
* It is up to the client code to decide if a <tt>null</tt> return value indicates a concurrency violation or not; the former would normally result in a concurrency-related exception such as {@link ConcurrencyFailureException}.
*
* @param key The entity key, which may be valid or invalid (<tt>null</tt> not allowed)
* @return Returns the key-value pair or <tt>null</tt> if the key doesn't reference an entity
* @param key
* The entity key, which may be valid or invalid (<tt>null</tt> not allowed)
* @return Returns the key-value pair or <tt>null</tt> if the key doesn't reference an entity
*/
@SuppressWarnings("unchecked")
public Pair<K, V> getByKey(K key)
@@ -337,7 +327,7 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
{
value = entityPair.getSecond();
// Get the value key
VK valueKey = (value == null) ? (VK)VALUE_NULL : entityLookup.getValueKey(value);
VK valueKey = (value == null) ? (VK) VALUE_NULL : entityLookup.getValueKey(value);
// Check if the value has a good key
if (valueKey != null)
{
@@ -354,15 +344,13 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
}
/**
* Find the entity associated with the given value.
* The {@link EntityLookupCallbackDAO#findByValue(Object) entity callback} will be used if no entry exists in the cache.
* Find the entity associated with the given value. The {@link EntityLookupCallbackDAO#findByValue(Object) entity callback} will be used if no entry exists in the cache.
* <p/>
* It is up to the client code to decide if a <tt>null</tt> return value indicates a concurrency violation
* or not; the former would normally result in a concurrency-related exception such as
* {@link ConcurrencyFailureException}.
* It is up to the client code to decide if a <tt>null</tt> return value indicates a concurrency violation or not; the former would normally result in a concurrency-related exception such as {@link ConcurrencyFailureException}.
*
* @param value The entity value, which may be valid or invalid (<tt>null</tt> is allowed)
* @return Returns the key-value pair or <tt>null</tt> if the value doesn't reference an entity
* @param value
* The entity value, which may be valid or invalid (<tt>null</tt> is allowed)
* @return Returns the key-value pair or <tt>null</tt> if the value doesn't reference an entity
*/
@SuppressWarnings("unchecked")
public Pair<K, V> getByValue(V value)
@@ -376,7 +364,7 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
// Get the value key.
// The cast to (VK) is counter-intuitive, but works because they're all just Serializable
// It's nasty, but hidden from the cache client code.
VK valueKey = (value == null) ? (VK)VALUE_NULL : entityLookup.getValueKey(value);
VK valueKey = (value == null) ? (VK) VALUE_NULL : entityLookup.getValueKey(value);
// Check if the value has a good key
if (valueKey == null)
{
@@ -423,16 +411,15 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
/**
* Attempt to create the entity and, failing that, look it up.<br/>
* This method takes the opposite approach to {@link #getOrCreateByValue(Object)}, which assumes the entity's
* existence: in this case the entity is assumed to NOT exist.
* The {@link EntityLookupCallbackDAO#createValue(Object)} and {@link EntityLookupCallbackDAO#findByValue(Object)}
* will be used if necessary.<br/>
* This method takes the opposite approach to {@link #getOrCreateByValue(Object)}, which assumes the entity's existence: in this case the entity is assumed to NOT exist. The {@link EntityLookupCallbackDAO#createValue(Object)} and {@link EntityLookupCallbackDAO#findByValue(Object)} will be used if necessary.<br/>
* <p/>
* Use this method when the data involved is seldom reused.
*
* @param value The entity value (<tt>null</tt> is allowed)
* @param controlDAO an essential DAO required in order to ensure a transactionally-safe attempt at data creation
* @return Returns the key-value pair (new or existing and never <tt>null</tt>)
* @param value
* The entity value (<tt>null</tt> is allowed)
* @param controlDAO
* an essential DAO required in order to ensure a transactionally-safe attempt at data creation
* @return Returns the key-value pair (new or existing and never <tt>null</tt>)
*/
public Pair<K, V> createOrGetByValue(V value, ControlDAO controlDAO)
{
@@ -448,6 +435,8 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
// Cache it
if (cache != null)
{
VK valueKey = (value == null) ? (VK) VALUE_NULL : entityLookup.getValueKey(value);
cache.put(new CacheRegionValueKey(cacheRegion, valueKey), entityPair.getFirst());
cache.put(
new CacheRegionKey(cacheRegion, entityPair.getFirst()),
(entityPair.getSecond() == null ? VALUE_NULL : entityPair.getSecond()));
@@ -464,12 +453,11 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
}
/**
* Find the entity associated with the given value and create it if it doesn't exist.
* The {@link EntityLookupCallbackDAO#findByValue(Object)} and {@link EntityLookupCallbackDAO#createValue(Object)}
* will be used if necessary.
* Find the entity associated with the given value and create it if it doesn't exist. The {@link EntityLookupCallbackDAO#findByValue(Object)} and {@link EntityLookupCallbackDAO#createValue(Object)} will be used if necessary.
*
* @param value The entity value (<tt>null</tt> is allowed)
* @return Returns the key-value pair (new or existing and never <tt>null</tt>)
* @param value
* The entity value (<tt>null</tt> is allowed)
* @return Returns the key-value pair (new or existing and never <tt>null</tt>)
*/
@SuppressWarnings("unchecked")
public Pair<K, V> getOrCreateByValue(V value)
@@ -488,7 +476,7 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
// Get the value key
// The cast to (VK) is counter-intuitive, but works because they're all just Serializable.
// It's nasty, but hidden from the cache client code.
VK valueKey = (value == null) ? (VK)VALUE_NULL : entityLookup.getValueKey(value);
VK valueKey = (value == null) ? (VK) VALUE_NULL : entityLookup.getValueKey(value);
// Check if the value has a good key
if (valueKey == null)
{
@@ -530,17 +518,15 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
}
/**
* Update the entity associated with the given key.
* The {@link EntityLookupCallbackDAO#updateValue(Serializable, Object)} callback
* will be used if necessary.
* Update the entity associated with the given key. The {@link EntityLookupCallbackDAO#updateValue(Serializable, Object)} callback will be used if necessary.
* <p/>
* It is up to the client code to decide if a <tt>0</tt> return value indicates a concurrency violation
* or not; usually the former will generate {@link ConcurrencyFailureException} or something recognised
* by the {@link RetryingTransactionHelper#RETRY_EXCEPTIONS RetryingTransactionHelper}.
* It is up to the client code to decide if a <tt>0</tt> return value indicates a concurrency violation or not; usually the former will generate {@link ConcurrencyFailureException} or something recognised by the {@link RetryingTransactionHelper#RETRY_EXCEPTIONS RetryingTransactionHelper}.
*
* @param key The entity key, which may be valid or invalid (<tt>null</tt> not allowed)
* @param value The new entity value (may be <tt>null</tt>)
* @return Returns the row update count.
* @param key
* The entity key, which may be valid or invalid (<tt>null</tt> not allowed)
* @param value
* The new entity value (may be <tt>null</tt>)
* @return Returns the row update count.
*/
@SuppressWarnings("unchecked")
public int updateValue(K key, V value)
@@ -564,7 +550,7 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
}
// Get the value key.
VK valueKey = (value == null) ? (VK)VALUE_NULL : entityLookup.getValueKey(value);
VK valueKey = (value == null) ? (VK) VALUE_NULL : entityLookup.getValueKey(value);
// Check if the value has a good key
if (valueKey != null)
{
@@ -580,11 +566,45 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
return updateCount;
}
/**
* Find the entity associated with the given value if its cached
*
* @param value
* The entity value (<tt>null</tt> is not allowed)
* @return Returns the key-value pair (existing or <tt>null</tt>)
*/
@SuppressWarnings("unchecked")
public Pair<K, V> getCachedEntityByValue(V value)
{
if (cache == null || value == null)
{
return null;
}
VK valueKey = entityLookup.getValueKey(value);
if (valueKey == null)
{
return null;
}
// Retrieve the cached value
CacheRegionValueKey valueCacheKey = new CacheRegionValueKey(cacheRegion, valueKey);
K key = (K) cache.get(valueCacheKey);
if (key != null && !key.equals(VALUE_NOT_FOUND))
{
return getByKey(key);
}
return null;
}
/**
* Cache-only operation: Get the key for a given value key (note: not 'value' but 'value key').
*
* @param valueKey The entity value key, which must be valid (<tt>null</tt> not allowed)
* @return The entity key (may be <tt>null</tt>)
* @param valueKey
* The entity value key, which must be valid (<tt>null</tt> not allowed)
* @return The entity key (may be <tt>null</tt>)
*/
@SuppressWarnings("unchecked")
public K getKey(VK valueKey)
@@ -603,8 +623,9 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
/**
* Cache-only operation: Get the value for a given key
*
* @param key The entity key, which may be valid or invalid (<tt>null</tt> not allowed)
* @return The entity value (may be <tt>null</tt>)
* @param key
* The entity key, which may be valid or invalid (<tt>null</tt> not allowed)
* @return The entity value (may be <tt>null</tt>)
*/
@SuppressWarnings("unchecked")
public V getValue(K key)
@@ -634,8 +655,10 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
/**
* Cache-only operation: Update the cache's value
*
* @param key The entity key, which may be valid or invalid (<tt>null</tt> not allowed)
* @param value The new entity value (may be <tt>null</tt>)
* @param key
* The entity key, which may be valid or invalid (<tt>null</tt> not allowed)
* @param value
* The new entity value (may be <tt>null</tt>)
*/
@SuppressWarnings("unchecked")
public void setValue(K key, V value)
@@ -651,7 +674,7 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
removeByKey(key, false);
// Get the value key.
VK valueKey = (value == null) ? (VK)VALUE_NULL : entityLookup.getValueKey(value);
VK valueKey = (value == null) ? (VK) VALUE_NULL : entityLookup.getValueKey(value);
// Check if the value has a good key
if (valueKey != null)
{
@@ -667,15 +690,13 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
}
/**
* Delete the entity associated with the given key.
* The {@link EntityLookupCallbackDAO#deleteByKey(Serializable)} callback will be used if necessary.
* Delete the entity associated with the given key. The {@link EntityLookupCallbackDAO#deleteByKey(Serializable)} callback will be used if necessary.
* <p/>
* It is up to the client code to decide if a <tt>0</tt> return value indicates a concurrency violation
* or not; usually the former will generate {@link ConcurrencyFailureException} or something recognised
* by the {@link RetryingTransactionHelper#RETRY_EXCEPTIONS RetryingTransactionHelper}.
* It is up to the client code to decide if a <tt>0</tt> return value indicates a concurrency violation or not; usually the former will generate {@link ConcurrencyFailureException} or something recognised by the {@link RetryingTransactionHelper#RETRY_EXCEPTIONS RetryingTransactionHelper}.
*
* @param key the entity key, which may be valid or invalid (<tt>null</tt> not allowed)
* @return Returns the row deletion count
* @param key
* the entity key, which may be valid or invalid (<tt>null</tt> not allowed)
* @return Returns the row deletion count
*/
public int deleteByKey(K key)
{
@@ -693,15 +714,13 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
}
/**
* Delete the entity having the given value..
* The {@link EntityLookupCallbackDAO#deleteByValue(Object)} callback will be used if necessary.
* Delete the entity having the given value.. The {@link EntityLookupCallbackDAO#deleteByValue(Object)} callback will be used if necessary.
* <p/>
* It is up to the client code to decide if a <tt>0</tt> return value indicates a concurrency violation
* or not; usually the former will generate {@link ConcurrencyFailureException} or something recognised
* by the {@link RetryingTransactionHelper#RETRY_EXCEPTIONS RetryingTransactionHelper}.
* It is up to the client code to decide if a <tt>0</tt> return value indicates a concurrency violation or not; usually the former will generate {@link ConcurrencyFailureException} or something recognised by the {@link RetryingTransactionHelper#RETRY_EXCEPTIONS RetryingTransactionHelper}.
*
* @param value the entity value, which may be valid or invalid (<tt>null</tt> allowed)
* @return Returns the row deletion count
* @param value
* the entity value, which may be valid or invalid (<tt>null</tt> allowed)
* @return Returns the row deletion count
*/
public int deleteByValue(V value)
{
@@ -735,7 +754,8 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
/**
* Cache-only operation: Remove all cache values associated with the given key.
*
* @param removeKey <tt>true</tt> to remove the given key's entry
* @param removeKey
* <tt>true</tt> to remove the given key's entry
*/
@SuppressWarnings("unchecked")
private void removeByKey(K key, boolean removeKey)
@@ -761,7 +781,8 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
/**
* Cache-only operation: Remove all cache values associated with the given value
*
* @param value The entity value (<tt>null</tt> is allowed)
* @param value
* The entity value (<tt>null</tt> is allowed)
*/
@SuppressWarnings("unchecked")
public void removeByValue(V value)
@@ -773,10 +794,10 @@ public class EntityLookupCache<K extends Serializable, V extends Object, VK exte
}
// Get the value key
VK valueKey = (value == null) ? (VK)VALUE_NULL : entityLookup.getValueKey(value);
VK valueKey = (value == null) ? (VK) VALUE_NULL : entityLookup.getValueKey(value);
if (valueKey == null)
{
// No key generated for the value. There is nothing that can be done.
// No key generated for the value. There is nothing that can be done.
return;
}
// Look in the cache

View File

@@ -31,6 +31,11 @@ import java.util.List;
import java.util.Locale;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.dao.DataIntegrityViolationException;
import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.repo.cache.lookup.EntityLookupCache;
import org.alfresco.repo.cache.lookup.EntityLookupCache.EntityLookupCallbackDAOAdaptor;
@@ -45,19 +50,13 @@ import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.util.EqualsHelper;
import org.alfresco.util.Pair;
import org.alfresco.util.transaction.TransactionListenerAdapter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.dao.DataIntegrityViolationException;
/**
* Abstract implementation for ContentData DAO.
* <p>
* This provides basic services such as caching, but defers to the underlying implementation
* for CRUD operations.
* This provides basic services such as caching, but defers to the underlying implementation for CRUD operations.
* <p>
* The DAO deals in {@link ContentData} instances. The cache is primarily present to decode
* IDs into <code>ContentData</code> instances.
* The DAO deals in {@link ContentData} instances. The cache is primarily present to decode IDs into <code>ContentData</code> instances.
*
* @author Derek Hulley
* @author sglover
@@ -127,7 +126,8 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
/**
* Set this property to enable eager cleanup of orphaned content.
*
* @param contentStoreCleaner an eager cleaner (may be <tt>null</tt>)
* @param contentStoreCleaner
* an eager cleaner (may be <tt>null</tt>)
*/
public void setContentStoreCleaner(EagerContentStoreCleaner contentStoreCleaner)
{
@@ -135,7 +135,8 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
}
/**
* @param contentDataCache the cache of IDs to ContentData and vice versa
* @param contentDataCache
* the cache of IDs to ContentData and vice versa
*/
public void setContentDataCache(SimpleCache<Long, ContentData> contentDataCache)
{
@@ -154,8 +155,7 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
}
/**
* A <b>content_url</b> entity was dereferenced. This makes no assumptions about the
* current references - dereference deletion is handled in the commit phase.
* A <b>content_url</b> entity was dereferenced. This makes no assumptions about the current references - dereference deletion is handled in the commit phase.
*/
protected void registerDereferencedContentUrl(String contentUrl)
{
@@ -205,7 +205,7 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
throw new IllegalArgumentException("Cannot look up ContentData by null ID.");
}
Pair<Long, ContentUrlEntity> pair = contentUrlCache.getByValue(contentUrl);
if(pair != null)
if (pair != null)
{
result = contentUrlCache.updateValue(pair.getFirst(), contentUrl);
}
@@ -325,7 +325,7 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
ContentDataEntity contentDataEntity = getContentDataEntity(key);
if (contentDataEntity == null)
{
return 0; // The client (outer-level code) will decide if this is an error
return 0; // The client (outer-level code) will decide if this is an error
}
return updateContentDataEntity(contentDataEntity, value);
}
@@ -343,7 +343,7 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
private class ContentUrlCallbackDAO extends EntityLookupCallbackDAOAdaptor<Long, ContentUrlEntity, String>
{
/**
* @return Returns the Node's NodeRef
* @return Returns the Node's NodeRef
*/
@Override
public String getValueKey(ContentUrlEntity value)
@@ -393,7 +393,7 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
ContentUrlEntity contentUrlEntity = getContentUrlEntity(id);
if (contentUrlEntity == null)
{
return 0; // The client (outer-level code) will decide if this is an error
return 0; // The client (outer-level code) will decide if this is an error
}
return updateContentUrlEntity(contentUrlEntity, value);
}
@@ -413,7 +413,7 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
// Decode content URL
Long contentUrlId = contentDataEntity.getContentUrlId();
String contentUrl = null;
if(contentUrlId != null)
if (contentUrlId != null)
{
Pair<Long, ContentUrlEntity> entityPair = contentUrlCache.getByKey(contentUrlId);
if (entityPair == null)
@@ -470,7 +470,15 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
ContentUrlEntity contentUrlEntity = new ContentUrlEntity();
contentUrlEntity.setContentUrl(contentUrl);
contentUrlEntity.setSize(size);
Pair<Long, ContentUrlEntity> pair = contentUrlCache.createOrGetByValue(contentUrlEntity, controlDAO);
// Attempt to get the data from cache
Pair<Long, ContentUrlEntity> pair = contentUrlCache.getCachedEntityByValue(contentUrlEntity);
if (pair == null)
{
pair = contentUrlCache.createOrGetByValue(contentUrlEntity, controlDAO);
}
contentUrlId = pair.getFirst();
}
@@ -510,7 +518,7 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
// Resolve the content URL
Long oldContentUrlId = contentDataEntity.getContentUrlId();
ContentUrlEntity contentUrlEntity = null;
if(oldContentUrlId != null)
if (oldContentUrlId != null)
{
Pair<Long, ContentUrlEntity> entityPair = contentUrlCache.getByKey(oldContentUrlId);
if (entityPair == null)
@@ -526,12 +534,12 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
{
if (oldContentUrl != null)
{
// We have a changed value. The old content URL has been dereferenced.
// We have a changed value. The old content URL has been dereferenced.
registerDereferencedContentUrl(oldContentUrl);
}
if (newContentUrl != null)
{
if(contentUrlEntity == null)
if (contentUrlEntity == null)
{
contentUrlEntity = new ContentUrlEntity();
contentUrlEntity.setContentUrl(newContentUrl);
@@ -595,7 +603,7 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
boolean success = true;
ContentUrlEntity existing = getContentUrl(contentUrlId);
if(existing != null)
if (existing != null)
{
ContentUrlEntity entity = ContentUrlEntity.setContentUrlKey(existing, contentUrlKey);
updateContentUrl(entity);
@@ -638,33 +646,37 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
}
/**
* @param contentUrl the content URL to create or search for
* @param contentUrl
* the content URL to create or search for
*/
protected abstract ContentUrlEntity createContentUrlEntity(String contentUrl, long size, ContentUrlKeyEntity contentUrlKey);
/**
* @param id the ID of the <b>content url</b> entity
* @return Return the entity or <tt>null</tt> if it doesn't exist
* @param id
* the ID of the <b>content url</b> entity
* @return Return the entity or <tt>null</tt> if it doesn't exist
*/
protected abstract ContentUrlEntity getContentUrlEntity(Long id);
protected abstract ContentUrlEntity getContentUrlEntity(String contentUrl);
/**
* @param contentUrl the URL of the <b>content url</b> entity
* @return Return the entity or <tt>null</tt> if it doesn't exist or is still
* referenced by a <b>content_data</b> entity
* @param contentUrl
* the URL of the <b>content url</b> entity
* @return Return the entity or <tt>null</tt> if it doesn't exist or is still referenced by a <b>content_data</b> entity
*/
protected abstract ContentUrlEntity getContentUrlEntityUnreferenced(String contentUrl);
/**
* Update a content URL with the given orphan time
*
* @param id the unique ID of the entity
* @param orphanTime the time (ms since epoch) that the entity was orphaned
* @param oldOrphanTime the orphan time we expect to update for optimistic locking (may be <tt>null</tt>)
* @return Returns the number of rows updated
* @param id
* the unique ID of the entity
* @param orphanTime
* the time (ms since epoch) that the entity was orphaned
* @param oldOrphanTime
* the orphan time we expect to update for optimistic locking (may be <tt>null</tt>)
* @return Returns the number of rows updated
*/
protected abstract int updateContentUrlOrphanTime(Long id, Long orphanTime, Long oldOrphanTime);
@@ -678,33 +690,37 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
Long localeId);
/**
* @param id the entity ID
* @return Returns the entity or <tt>null</tt> if it doesn't exist
* @param id
* the entity ID
* @return Returns the entity or <tt>null</tt> if it doesn't exist
*/
protected abstract ContentDataEntity getContentDataEntity(Long id);
/**
* @param nodeIds the node ID
* @return Returns the associated entities or <tt>null</tt> if none exist
* @param nodeIds
* the node ID
* @return Returns the associated entities or <tt>null</tt> if none exist
*/
protected abstract List<ContentDataEntity> getContentDataEntitiesForNodes(Set<Long> nodeIds);
/**
* Update an existing <b>alf_content_data</b> entity
*
* @param entity the existing entity that will be updated
* @return Returns the number of rows updated (should be 1)
* @param entity
* the existing entity that will be updated
* @return Returns the number of rows updated (should be 1)
*/
protected abstract int updateContentDataEntity(ContentDataEntity entity);
/**
* Delete the entity with the given ID
*
* @return Returns the number of rows deleted
* @return Returns the number of rows deleted
*/
protected abstract int deleteContentDataEntity(Long id);
protected abstract int deleteContentUrlEntity(long id);
protected abstract int updateContentUrlEntity(ContentUrlEntity existing, ContentUrlEntity entity);
/**
@@ -738,7 +754,7 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
{
// We mark the URL as orphaned.
// The content binary is not scheduled for immediate removal so just mark the
// row's orphan time. Concurrently, it is possible for multiple references
// row's orphan time. Concurrently, it is possible for multiple references
// to be made WHILE the orphan time is set, but we handle that separately.
Long contentUrlId = contentUrlEntity.getId();
Long oldOrphanTime = contentUrlEntity.getOrphanTime();

View File

@@ -204,13 +204,11 @@ public class MessageServiceImpl implements MessageService
{
if (! tenantResourceBundleBaseNames.contains(resBundlePath))
{
tenantResourceBundleBaseNames.add(resBundlePath);
putResourceBundleBaseNames(tenantDomain, tenantResourceBundleBaseNames);
tenantResourceBundleBaseNames.add(resBundlePath);
reloadResourceBundles(tenantResourceBundleBaseNames);
}
logger.info("Registered message bundle '" + resBundlePath + "'");
clearLoadedResourceBundles(tenantDomain); // force re-load of message cache
}
finally
{
@@ -290,7 +288,8 @@ public class MessageServiceImpl implements MessageService
Set<String> resourceBundleBaseNamesForAllLocales;
String tenantDomain = getTenantDomain();
LockHelper.tryLock(readLock, tryLockTimeout, "getting loaded resource bundles, messages and base names in 'MessageServiceImpl.unregisterResourceBundle()'");
LockHelper.tryLock(readLock, tryLockTimeout,
"getting loaded resource bundles, messages and base names in 'MessageServiceImpl.unregisterResourceBundle()'");
try
{
// all locales
@@ -303,7 +302,8 @@ public class MessageServiceImpl implements MessageService
readLock.unlock();
}
LockHelper.tryLock(writeLock, tryLockTimeout, "removing resource bundle by path in 'MessageServiceImpl.unregisterResourceBundle()'");
LockHelper.tryLock(writeLock, tryLockTimeout,
"removing resource bundle by path in 'MessageServiceImpl.unregisterResourceBundle()'");
try
{
// unload resource bundles for each locale (by tenant, if applicable)
@@ -329,7 +329,7 @@ public class MessageServiceImpl implements MessageService
if (idx1 != -1)
{
// load from repository
int idx2 = resBundlePath.indexOf("/", idx1+3);
int idx2 = resBundlePath.indexOf("/", idx1 + 3);
String store = resBundlePath.substring(0, idx2);
String path = resBundlePath.substring(idx2);
@@ -342,7 +342,8 @@ public class MessageServiceImpl implements MessageService
}
catch (IOException ioe)
{
throw new AlfrescoRuntimeException("Failed to read message resource bundle from repository " + resBundlePath + " : " + ioe);
throw new AlfrescoRuntimeException("Failed to read message resource bundle from repository "
+ resBundlePath + " : " + ioe);
}
}
else
@@ -372,10 +373,9 @@ public class MessageServiceImpl implements MessageService
if (resourceBundleBaseNamesForAllLocales != null)
{
resourceBundleBaseNamesForAllLocales.remove(resBundlePath);
reloadResourceBundles(resourceBundleBaseNamesForAllLocales);
logger.info("Unregistered message bundle '" + resBundlePath + "'");
}
clearLoadedResourceBundles(tenantDomain); // force re-load of message cache
}
finally
{
@@ -383,6 +383,14 @@ public class MessageServiceImpl implements MessageService
}
}
private void reloadResourceBundles(Set<String> newResourceBundles)
{
logger.debug("Reloading message bundles ...");
String tenantDomain = getTenantDomain();
putResourceBundleBaseNames(tenantDomain, newResourceBundles);
clearLoadedResourceBundles(tenantDomain); // force re-load of message cache
}
/**
* Get the messages for a locale.
* <p>
@@ -478,7 +486,7 @@ public class MessageServiceImpl implements MessageService
if (idx1 != -1)
{
// load from repository
int idx2 = resBundlePath.indexOf("/", idx1+3);
int idx2 = resBundlePath.indexOf("/", idx1 + 3);
String store = resBundlePath.substring(0, idx2);
String path = resBundlePath.substring(idx2);
@@ -491,7 +499,8 @@ public class MessageServiceImpl implements MessageService
}
catch (IOException ioe)
{
throw new AlfrescoRuntimeException("Failed to read message resource bundle from repository " + resBundlePath + " : " + ioe);
throw new AlfrescoRuntimeException(
"Failed to read message resource bundle from repository " + resBundlePath + " : " + ioe);
}
}
else

View File

@@ -0,0 +1,41 @@
/*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2024 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.node.sizedetails;
import java.util.Optional;
import org.alfresco.repo.node.sizedetails.NodeSizeDetailsServiceImpl.NodeSizeDetails;
import org.alfresco.service.cmr.repository.NodeRef;
public interface NodeSizeDetailsService
{
void invokeSizeDetailsExecutor(NodeRef nodeRef, String jobId);
void putSizeDetails(String id, NodeSizeDetails nodeSizeDetails);
Optional<NodeSizeDetails> getSizeDetails(String id);
}

View File

@@ -0,0 +1,400 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2024 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.node.sizedetails;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.ThreadPoolExecutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.repo.node.sizedetails.NodeSizeDetailsServiceImpl.NodeSizeDetails.STATUS;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.cmr.search.SearchParameters.FieldFacet;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.Pair;
import org.alfresco.util.ParameterCheck;
/**
* NodeSizeDetailsServiceImpl Executing Alfresco FTS Query to find size details of Folder Node
*/
public class NodeSizeDetailsServiceImpl implements NodeSizeDetailsService, InitializingBean
{
private static final Logger LOG = LoggerFactory.getLogger(NodeSizeDetailsServiceImpl.class);
private static final String FIELD_FACET = "content.size";
private static final String FACET_QUERY = "{!afts}content.size:[0 TO " + Integer.MAX_VALUE + "]";
private SearchService searchService;
private SimpleCache<Serializable, NodeSizeDetails> simpleCache;
private TransactionService transactionService;
private ThreadPoolExecutor threadPoolExecutor;
private int defaultItems;
public void setSearchService(SearchService searchService)
{
this.searchService = searchService;
}
@Override
public Optional<NodeSizeDetails> getSizeDetails(String id)
{
NodeSizeDetails details = simpleCache.get(id);
return Optional.ofNullable(details)
.or(() -> {
LOG.error("No Size details found for ID: " + id);
return Optional.empty();
});
}
public void setSimpleCache(SimpleCache<Serializable, NodeSizeDetails> simpleCache)
{
this.simpleCache = simpleCache;
}
public void setTransactionService(TransactionService transactionService)
{
this.transactionService = transactionService;
}
public void setThreadPoolExecutor(ThreadPoolExecutor threadPoolExecutor)
{
this.threadPoolExecutor = threadPoolExecutor;
}
public void setDefaultItems(int defaultItems)
{
this.defaultItems = defaultItems;
}
@Override
public void invokeSizeDetailsExecutor(NodeRef nodeRef, String jobId)
{
try
{
executeSizeCalculation(nodeRef, jobId);
}
catch (Exception e)
{
LOG.error("Exception occurred while executing invokeSizeDetailsExecutor method ", e);
}
}
@Override
public void putSizeDetails(String id, NodeSizeDetails nodeSizeDetails)
{
simpleCache.put(id, nodeSizeDetails);
}
private void executeSizeCalculation(NodeRef nodeRef, String jobId)
{
String authenticatedUserName = AuthenticationUtil.getFullyAuthenticatedUser();
RetryingTransactionCallback<NodeSizeDetails> executionCallback = () -> {
try
{
return calculateTotalSizeFromFacet(nodeRef, jobId);
}
catch (Exception ex)
{
LOG.error("Exception occurred in executeSizeCalculation:RetryingTransactionCallback ", ex);
throw ex;
}
};
threadPoolExecutor.execute(() -> {
NodeSizeDetails nodeSizeDetails = new NodeSizeDetails(nodeRef.getId(), jobId, STATUS.IN_PROGRESS);
putSizeDetails(nodeRef.getId(), nodeSizeDetails);
try
{
nodeSizeDetails = AuthenticationUtil.runAs(() -> transactionService.getRetryingTransactionHelper()
.doInTransaction(executionCallback, true), authenticatedUserName);
}
catch (Exception e)
{
LOG.error("Exception occurred in executeSizeCalculation", e);
nodeSizeDetails = new NodeSizeDetails(nodeRef.getId(), 0L, jobId, STATUS.FAILED);
}
finally
{
putSizeDetails(nodeRef.getId(), nodeSizeDetails);
}
});
}
private NodeSizeDetails calculateTotalSizeFromFacet(NodeRef nodeRef, String jobId)
{
long totalSizeFromFacet = 0;
int skipCount = 0;
int totalItems = defaultItems;
boolean isCalculationCompleted = false;
try
{
ResultSet results = facetQuery(nodeRef);
int resultsSize = results.getFieldFacet(FIELD_FACET)
.size();
while (!isCalculationCompleted)
{
List<Pair<String, Integer>> facetPairs = results.getFieldFacet(FIELD_FACET)
.subList(skipCount, Math.min(totalItems, resultsSize));
totalSizeFromFacet += facetPairs.parallelStream()
.mapToLong(pair -> Long.parseLong(pair.getFirst()) * pair.getSecond())
.sum();
if (resultsSize <= totalItems || resultsSize <= defaultItems)
{
isCalculationCompleted = true;
}
else
{
skipCount += defaultItems;
resultsSize -= totalItems;
totalItems += Math.min(resultsSize, defaultItems);
}
}
Date calculationDate = new Date(System.currentTimeMillis());
NodeSizeDetails nodeSizeDetails = new NodeSizeDetails(nodeRef.getId(), totalSizeFromFacet, calculationDate,
results.getNodeRefs()
.size(),
STATUS.COMPLETED, jobId);
return nodeSizeDetails;
}
catch (Exception e)
{
LOG.error("Exception occurred while calculating total size from facet", e);
throw e;
}
}
private ResultSet facetQuery(NodeRef nodeRef)
{
try
{
SearchParameters searchParameters = createSearchParameters(nodeRef);
ResultSet resultsWithoutFacet = searchService.query(searchParameters);
if (LOG.isDebugEnabled())
{
LOG.debug(" After Executing facet query, no. of records found " + resultsWithoutFacet.getNumberFound());
}
searchParameters.addFacetQuery(FACET_QUERY);
FieldFacet fieldFacet = new FieldFacet(FIELD_FACET);
fieldFacet.setLimitOrNull((int) resultsWithoutFacet.getNumberFound());
searchParameters.addFieldFacet(fieldFacet);
resultsWithoutFacet.close();
return searchService.query(searchParameters);
}
catch (Exception e)
{
LOG.error("Exception occurred while executing facetQuery ", e);
throw e;
}
}
private SearchParameters createSearchParameters(NodeRef nodeRef)
{
SearchParameters searchParameters = new SearchParameters();
searchParameters.addStore(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
searchParameters.setLanguage(SearchService.LANGUAGE_FTS_ALFRESCO);
searchParameters.setQuery("ANCESTOR:\"" + nodeRef + "\" AND TYPE:\"cm:content\"");
searchParameters.setTrackTotalHits(-1);
return searchParameters;
}
@Override
public void afterPropertiesSet() throws Exception
{
ParameterCheck.mandatory("searchService", this.searchService);
ParameterCheck.mandatory("simpleCache", this.simpleCache);
ParameterCheck.mandatory("transactionService", this.transactionService);
ParameterCheck.mandatory("threadPoolExecutor", this.threadPoolExecutor);
}
/**
* POJO class to hold node size details.
*/
public static class NodeSizeDetails implements Serializable
{
private static final long serialVersionUID = 1L;
private String id;
private Long sizeInBytes;
private Date calculatedAt;
private Integer numberOfFiles;
private String jobId;
private STATUS status;
public NodeSizeDetails()
{}
public NodeSizeDetails(String jobId)
{
this.jobId = jobId;
}
public NodeSizeDetails(String id, STATUS status)
{
this.id = id;
this.status = status;
}
public NodeSizeDetails(String id, String jobId, STATUS status)
{
this.id = id;
this.jobId = jobId;
this.status = status;
}
public NodeSizeDetails(String id, Long sizeInBytes, String jobId, STATUS status)
{
this.id = id;
this.sizeInBytes = sizeInBytes;
this.jobId = jobId;
this.status = status;
}
public NodeSizeDetails(String id, Long sizeInBytes, Date calculatedAt, Integer numberOfFiles,
STATUS currentStatus, String jobId)
{
this.id = id;
this.sizeInBytes = sizeInBytes;
this.calculatedAt = calculatedAt;
this.numberOfFiles = numberOfFiles;
this.status = currentStatus;
this.jobId = jobId;
}
public String getId()
{
return id;
}
public void setId(String id)
{
this.id = id;
}
public Long getSizeInBytes()
{
return sizeInBytes;
}
public void setSizeInBytes(Long sizeInBytes)
{
this.sizeInBytes = sizeInBytes;
}
public Date getCalculatedAt()
{
return calculatedAt;
}
public void setCalculatedAt(Date calculatedAt)
{
this.calculatedAt = calculatedAt;
}
public Integer getNumberOfFiles()
{
return numberOfFiles;
}
public void setNumberOfFiles(Integer numberOfFiles)
{
this.numberOfFiles = numberOfFiles;
}
public String getJobId()
{
return jobId;
}
public void setJobId(String jobId)
{
this.jobId = jobId;
}
public STATUS getStatus()
{
return status;
}
public void setStatus(STATUS status)
{
this.status = status;
}
@Override
public boolean equals(Object o)
{
if (this == o)
{
return true;
}
if (o == null || getClass() != o.getClass())
{
return false;
}
NodeSizeDetails that = (NodeSizeDetails) o;
return Objects.equals(id, that.id) && Objects.equals(sizeInBytes, that.sizeInBytes) && Objects.equals(
calculatedAt, that.calculatedAt) && Objects.equals(numberOfFiles, that.numberOfFiles)
&& Objects.equals(jobId, that.jobId) && status == that.status;
}
@Override
public int hashCode()
{
return Objects.hash(id, sizeInBytes, calculatedAt, numberOfFiles, jobId, status);
}
@Override
public String toString()
{
return "NodeSizeDetails{" + "id='" + id + '\'' + ", sizeInBytes=" + sizeInBytes + ", calculatedAt="
+ calculatedAt + ", numberOfFiles=" + numberOfFiles + ", jobId='" + jobId + '\'' + ", status="
+ status + '}';
}
public enum STATUS
{
NOT_INITIATED, PENDING, IN_PROGRESS, COMPLETED, FAILED
}
}
}

View File

@@ -514,4 +514,10 @@
<bean name="ldapInitialDirContextCache" factory-bean="cacheFactory" factory-method="createCache">
<constructor-arg value="cache.ldapInitialDirContextCache"/>
</bean>
<!-- The shared cache for Folder Node -->
<bean name="folderSizeSharedCache" factory-bean="cacheFactory" factory-method="createCache">
<constructor-arg value="cache.folderSizeSharedCache"/>
</bean>
</beans>

View File

@@ -163,7 +163,7 @@ cache.node.nodesSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.node.nodesSharedCache.maxItems=250000
cache.node.nodesSharedCache.timeToLiveSeconds=300
cache.node.nodesSharedCache.maxIdleSeconds=0
cache.node.nodesSharedCache.cluster.type=invalidating
cache.node.nodesSharedCache.cluster.type=fully-distributed
cache.node.nodesSharedCache.backup-count=1
cache.node.nodesSharedCache.eviction-policy=LRU
cache.node.nodesSharedCache.merge-policy=com.hazelcast.spi.merge.PutIfAbsentMergePolicy
@@ -394,7 +394,7 @@ cache.loadedResourceBundlesSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.loadedResourceBundlesSharedCache.maxItems=1000
cache.loadedResourceBundlesSharedCache.timeToLiveSeconds=0
cache.loadedResourceBundlesSharedCache.maxIdleSeconds=0
cache.loadedResourceBundlesSharedCache.cluster.type=invalidating
cache.loadedResourceBundlesSharedCache.cluster.type=local
cache.loadedResourceBundlesSharedCache.backup-count=1
cache.loadedResourceBundlesSharedCache.eviction-policy=LRU
cache.loadedResourceBundlesSharedCache.merge-policy=com.hazelcast.spi.merge.PutIfAbsentMergePolicy
@@ -405,7 +405,7 @@ cache.messagesSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.messagesSharedCache.maxItems=1000
cache.messagesSharedCache.timeToLiveSeconds=0
cache.messagesSharedCache.maxIdleSeconds=0
cache.messagesSharedCache.cluster.type=invalidating
cache.messagesSharedCache.cluster.type=local
cache.messagesSharedCache.backup-count=1
cache.messagesSharedCache.eviction-policy=LRU
cache.messagesSharedCache.merge-policy=com.hazelcast.spi.merge.PutIfAbsentMergePolicy
@@ -710,3 +710,12 @@ cache.ldapInitialDirContextCache.backup-count=1
cache.ldapInitialDirContextCache.eviction-policy=NONE
cache.ldapInitialDirContextCache.merge-policy=com.hazelcast.spi.merge.LatestUpdateMergePolicy
cache.ldapInitialDirContextCache.readBackupData=false
cache.folderSizeSharedCache.maxItems=1000
cache.folderSizeSharedCache.timeToLiveSeconds=300
cache.folderSizeSharedCache.maxIdleSeconds=0
cache.folderSizeSharedCache.cluster.type=fully-distributed
cache.folderSizeSharedCache.backup-count=1
cache.folderSizeSharedCache.eviction-policy=LRU
cache.folderSizeSharedCache.merge-policy=com.hazelcast.spi.merge.PutIfAbsentMergePolicy
cache.folderSizeSharedCache.readBackupData=false

View File

@@ -332,4 +332,40 @@
<property name="searchTrackingComponent" ref="searchTrackingComponent" />
</bean>
<bean id="nodeSizeThreadPool" class="org.alfresco.util.ThreadPoolExecutorFactoryBean">
<property name="poolName">
<value>defaultThreadPool</value>
</property>
<property name="corePoolSize">
<value>${default.nodeSize.corePoolSize}</value>
</property>
<property name="maximumPoolSize">
<value>${default.nodeSize.maximumPoolSize}</value>
</property>
<property name="workQueueSize">
<value>${default.nodeSize.workQueueSize}</value>
</property>
</bean>
<bean id="NodeSizeDetailsServiceImpl" class="org.alfresco.repo.node.sizedetails.NodeSizeDetailsServiceImpl">
<property name="searchService" ref="SearchService"/>
<property name="simpleCache" ref="folderSizeSharedCache" />
<property name="transactionService" ref="transactionService"/>
<property name="threadPoolExecutor">
<ref bean="nodeSizeThreadPool"/>
</property>
<property name="defaultItems" value="${default.async.folder.items}"/>
</bean>
<bean id="NodeSizeDetailsService" class="org.springframework.aop.framework.ProxyFactoryBean" >
<property name="targetName">
<value>NodeSizeDetailsServiceImpl</value>
</property>
<property name="proxyInterfaces">
<list>
<value>org.alfresco.repo.node.sizedetails.NodeSizeDetailsService</value>
</list>
</property>
</bean>
</beans>

View File

@@ -3,7 +3,7 @@
repository.name=Main Repository
# Schema number
version.schema=19300
version.schema=20000
# Directory configuration
@@ -1385,3 +1385,12 @@ scripts.execution.maxMemoryUsedInBytes=-1
# Number of instructions that will trigger the observer
scripts.execution.observerInstructionCount=5000
# Default value being used in POST/size-details endpoint to partition a huge folder into smaller chunks
# so that we can compute more efficiently and consolidate all sizes into a single unit.
default.async.folder.items=1000
# Default NodeSize Thread pool
default.nodeSize.corePoolSize=5
default.nodeSize.maximumPoolSize=10
default.nodeSize.workQueueSize=100

View File

@@ -25,12 +25,17 @@
*/
package org.alfresco.repo.cache.lookup;
import static org.junit.Assert.*;
import java.sql.Savepoint;
import java.util.Map;
import java.util.TreeMap;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import org.springframework.dao.DuplicateKeyException;
import org.alfresco.repo.cache.MemoryCache;
import org.alfresco.repo.cache.SimpleCache;
@@ -38,20 +43,16 @@ import org.alfresco.repo.cache.lookup.EntityLookupCache.EntityLookupCallbackDAO;
import org.alfresco.repo.domain.control.ControlDAO;
import org.alfresco.util.EqualsHelper;
import org.alfresco.util.Pair;
import org.mockito.Mockito;
import org.springframework.dao.DuplicateKeyException;
/**
* A cache for two-way lookups of database entities. These are characterized by having a unique
* key (perhaps a database ID) and a separate unique key that identifies the object.
* A cache for two-way lookups of database entities. These are characterized by having a unique key (perhaps a database ID) and a separate unique key that identifies the object.
* <p>
* The keys must have good <code>equals</code> and </code>hashCode</code> implementations and
* must respect the case-sensitivity of the use-case.
* The keys must have good <code>equals</code> and </code>hashCode</code> implementations and must respect the case-sensitivity of the use-case.
*
* @author Derek Hulley
* @since 3.2
*/
public class EntityLookupCacheTest extends TestCase implements EntityLookupCallbackDAO<Long, Object, String>
public class EntityLookupCacheTest implements EntityLookupCallbackDAO<Long, Object, String>
{
SimpleCache<Long, Object> cache;
private EntityLookupCache<Long, Object, String> entityLookupCacheA;
@@ -59,7 +60,7 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
private TreeMap<Long, String> database;
private ControlDAO controlDAO;
@Override
@Before
protected void setUp() throws Exception
{
cache = new MemoryCache<Long, Object>();
@@ -71,6 +72,7 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
Mockito.when(controlDAO.createSavepoint(Mockito.anyString())).thenReturn(Mockito.mock(Savepoint.class));
}
@Test
public void testLookupsUsingIncorrectValue() throws Exception
{
try
@@ -84,6 +86,7 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
}
}
@Test
public void testLookupAgainstEmpty() throws Exception
{
TestValue value = new TestValue("AAA");
@@ -114,6 +117,7 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
assertEquals("Looked-up type value incorrect", value, entityPair.getSecond());
}
@Test
public void testLookupAgainstExisting() throws Exception
{
// Put some values in the "database"
@@ -136,6 +140,7 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
assertEquals("ID is incorrect", Long.valueOf(3), entityPair.getFirst());
}
@Test
public void testRegions() throws Exception
{
TestValue valueAAA = new TestValue("AAA");
@@ -157,6 +162,7 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
assertEquals(8, cache.getKeys().size());
}
@Test
public void testNullLookups() throws Exception
{
TestValue valueNull = null;
@@ -174,9 +180,10 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
assertEquals(entityPairNull, entityPairCheck);
}
@Test
public void testGetOrCreate() throws Exception
{
TestValue valueOne = new TestValue(getName() + "-ONE");
TestValue valueOne = new TestValue(getClass().getName() + "-ONE");
Pair<Long, Object> entityPairOne = entityLookupCacheA.getOrCreateByValue(valueOne);
assertNotNull(entityPairOne);
Long id = entityPairOne.getFirst();
@@ -188,24 +195,27 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
assertEquals(id, entityPairOneCheck.getFirst());
}
@Test
public void testCreateOrGet() throws Exception
{
TestValue valueOne = new TestValue(getName() + "-ONE");
TestValue valueOne = new TestValue(getClass().getName() + "-ONE");
Pair<Long, Object> entityPairOne = entityLookupCacheA.createOrGetByValue(valueOne, controlDAO);
assertNotNull(entityPairOne);
Long id = entityPairOne.getFirst();
assertEquals(valueOne.val, database.get(id));
assertEquals(1, cache.getKeys().size());
// We cache both by value and by key, so we should have 2 entries
assertEquals(2, cache.getKeys().size());
Pair<Long, Object> entityPairOneCheck = entityLookupCacheA.createOrGetByValue(valueOne, controlDAO);
assertNotNull(entityPairOneCheck);
assertEquals(id, entityPairOneCheck.getFirst());
}
@Test
public void testUpdate() throws Exception
{
TestValue valueOne = new TestValue(getName() + "-ONE");
TestValue valueTwo = new TestValue(getName() + "-TWO");
TestValue valueOne = new TestValue(getClass().getName() + "-ONE");
TestValue valueTwo = new TestValue(getClass().getName() + "-TWO");
Pair<Long, Object> entityPairOne = entityLookupCacheA.getOrCreateByValue(valueOne);
assertNotNull(entityPairOne);
Long id = entityPairOne.getFirst();
@@ -219,9 +229,10 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
assertEquals(2, cache.getKeys().size());
}
@Test
public void testDeleteByKey() throws Exception
{
TestValue valueOne = new TestValue(getName() + "-ONE");
TestValue valueOne = new TestValue(getClass().getName() + "-ONE");
Pair<Long, Object> entityPairOne = entityLookupCacheA.getOrCreateByValue(valueOne);
assertNotNull(entityPairOne);
Long id = entityPairOne.getFirst();
@@ -235,9 +246,10 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
assertEquals(0, cache.getKeys().size());
}
@Test
public void testDeleteByValue() throws Exception
{
TestValue valueOne = new TestValue(getName() + "-ONE");
TestValue valueOne = new TestValue(getClass().getName() + "-ONE");
Pair<Long, Object> entityPairOne = entityLookupCacheA.getOrCreateByValue(valueOne);
assertNotNull(entityPairOne);
Long id = entityPairOne.getFirst();
@@ -251,9 +263,10 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
assertEquals(0, cache.getKeys().size());
}
@Test
public void testClear() throws Exception
{
TestValue valueOne = new TestValue(getName() + "-ONE");
TestValue valueOne = new TestValue(getClass().getName() + "-ONE");
Pair<Long, Object> entityPairOne = entityLookupCacheA.getOrCreateByValue(valueOne);
assertNotNull(entityPairOne);
Long id = entityPairOne.getFirst();
@@ -262,8 +275,32 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
// Clear it
entityLookupCacheA.clear();
assertEquals(valueOne.val, database.get(id)); // Must still be in database
assertEquals(0, cache.getKeys().size()); // ... but cache must be empty
assertEquals(valueOne.val, database.get(id)); // Must still be in database
assertEquals(0, cache.getKeys().size()); // ... but cache must be empty
}
@Test
public void testGetCachedValue() throws Exception
{
// Create a new value
TestValue valueCached = new TestValue(getClass().getName() + "-CACHED");
Pair<Long, Object> entityPairOne = entityLookupCacheA.createOrGetByValue(valueCached, controlDAO);
assertNotNull(entityPairOne);
Long id = entityPairOne.getFirst();
// We cache both by value and by key, so we should have 2 entries
assertEquals(2, cache.getKeys().size());
// Check the cache for the previously created value
Pair<Long, Object> entityPairCacheCheck = entityLookupCacheA.getCachedEntityByValue(valueCached);
assertNotNull(entityPairCacheCheck);
assertEquals(id, entityPairCacheCheck.getFirst());
// Clear the cache and attempt to retrieve it again
entityLookupCacheA.clear();
entityPairCacheCheck = entityLookupCacheA.getCachedEntityByValue(valueCached);
// Since we are only retrieving from cache, the value should not be found
assertNull(entityPairCacheCheck);
}
/**
@@ -272,10 +309,12 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
private static class TestValue
{
private final String val;
private TestValue(String val)
{
this.val = val;
}
@Override
public boolean equals(Object obj)
{
@@ -283,8 +322,9 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
{
return false;
}
return val.equals( ((TestValue)obj).val );
return val.equals(((TestValue) obj).val);
}
@Override
public int hashCode()
{
@@ -297,7 +337,7 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
{
assertNotNull(value);
assertTrue(value instanceof TestValue);
String dbValue = ((TestValue)value).val;
String dbValue = ((TestValue) value).val;
return dbValue;
}
@@ -318,7 +358,7 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
public Pair<Long, Object> findByValue(Object value)
{
assertTrue(value == null || value instanceof TestValue);
String dbValue = (value == null) ? null : ((TestValue)value).val;
String dbValue = (value == null) ? null : ((TestValue) value).val;
for (Map.Entry<Long, String> entry : database.entrySet())
{
@@ -336,7 +376,7 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
public Pair<Long, Object> createValue(Object value)
{
assertTrue(value == null || value instanceof TestValue);
String dbValue = (value == null) ? null : ((TestValue)value).val;
String dbValue = (value == null) ? null : ((TestValue) value).val;
// Kick out any duplicate values
if (database.containsValue(dbValue))
@@ -372,7 +412,7 @@ public class EntityLookupCacheTest extends TestCase implements EntityLookupCallb
}
else
{
database.put(key, ((TestValue)value).val);
database.put(key, ((TestValue) value).val);
return 1;
}
}

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* Copyright (C) 2005 - 2024 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -29,11 +29,12 @@ import static org.junit.Assert.assertEquals;
import java.util.UUID;
import org.alfresco.service.cmr.site.SiteInfo;
import org.alfresco.service.cmr.site.SiteVisibility;
import org.junit.Before;
import org.junit.Test;
import org.alfresco.service.cmr.site.SiteInfo;
import org.alfresco.service.cmr.site.SiteVisibility;
/**
* Test Membership constructor logs. Based on REPO-2520
*
@@ -56,10 +57,10 @@ public class SiteMembershipTest
@Before
public void createSite()
{
String sitePreset = "testSiteMembershipPreset";
String shortName = "testSiteMembershipShortName";
String title = "testSiteMembershipTile";
String description = "testSiteMembershipDescription";
var sitePreset = "testSiteMembershipPreset";
var shortName = "testSiteMembershipShortName";
var title = "testSiteMembershipTile";
var description = "testSiteMembershipDescription";
siteInfo = new SiteInfoImpl(sitePreset, shortName, title, description,
SiteVisibility.PUBLIC, null);
}

View File

@@ -59,7 +59,7 @@ services:
CLIENT_SSL_TRUST_STORE_TYPE: "JCEKS"
keycloak:
profiles: ["with-sso"]
image: quay.io/keycloak/keycloak:24.0.3
image: quay.io/keycloak/keycloak:25.0.6
environment:
- KEYCLOAK_ADMIN=admin
- KEYCLOAK_ADMIN_PASSWORD=admin