Compare commits

..

1 Commits

Author SHA1 Message Date
Damian Ujma
d31842352d Link [skip ci] 2025-02-10 12:51:00 +01:00
103 changed files with 11153 additions and 10854 deletions

View File

@@ -44,14 +44,14 @@ jobs:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- id: changed-files
uses: Alfresco/alfresco-build-tools/.github/actions/github-list-changes@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/github-list-changes@v8.2.0
with:
write-list-to-env: true
- uses: Alfresco/alfresco-build-tools/.github/actions/pre-commit@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/pre-commit@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Prepare maven cache and check compilation"
@@ -69,12 +69,12 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- uses: Alfresco/alfresco-build-tools/.github/actions/veracode@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/veracode@v8.2.0
continue-on-error: true
with:
srcclr-api-token: ${{ secrets.SRCCLR_API_TOKEN }}
@@ -92,10 +92,10 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/github-download-file@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/github-download-file@v8.2.0
with:
token: ${{ secrets.BOT_GITHUB_TOKEN }}
repository: "Alfresco/veracode-baseline-archive"
@@ -148,9 +148,9 @@ jobs:
!contains(github.event.head_commit.message, '[skip tests]') &&
!contains(github.event.head_commit.message, '[force]')
steps:
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- uses: Alfresco/ya-pmd-scan@v4.1.0
with:
classpath-build-command: "mvn test-compile -ntp -Pags -pl \"-:alfresco-community-repo-docker\""
@@ -181,14 +181,14 @@ jobs:
testAttributes: "-Dtest=AllMmtUnitTestSuite"
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }} - ${{ matrix.testModule }}
@@ -219,7 +219,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -261,9 +261,9 @@ jobs:
REQUIRES_INSTALLED_ARTIFACTS: true
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -276,7 +276,7 @@ jobs:
run: docker compose -f ./scripts/ci/docker-compose/docker-compose.yaml --profile ${{ matrix.compose-profile }} up -d
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }} - ${{ matrix.testSuite }}
@@ -307,7 +307,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -340,9 +340,9 @@ jobs:
version: ['10.5', '10.6']
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: Run MariaDB ${{ matrix.version }} database
@@ -351,7 +351,7 @@ jobs:
MARIADB_VERSION: ${{ matrix.version }}
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }} - ${{ matrix.version }}
@@ -382,7 +382,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -411,9 +411,9 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run MariaDB 10.11 database"
@@ -422,7 +422,7 @@ jobs:
MARIADB_VERSION: 10.11
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -453,7 +453,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -482,9 +482,9 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run MySQL 8 database"
@@ -493,7 +493,7 @@ jobs:
MYSQL_VERSION: 8
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -515,7 +515,7 @@ jobs:
RP_OPTS: ${{ github.ref_name == 'master' && steps.rp-prepare.outputs.mvn-opts || '' }}
run: |
eval "args=($RP_OPTS)"
mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=com.mysql.cj.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco "${args[@]}"
mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=com.mysql.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco "${args[@]}"
continue-on-error: true
- name: "Update GitHub Step Summary"
if: github.ref_name == 'master'
@@ -524,7 +524,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -552,9 +552,9 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run PostgreSQL 14.15 database"
@@ -563,7 +563,7 @@ jobs:
POSTGRES_VERSION: 14.15
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -594,7 +594,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -622,9 +622,9 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run PostgreSQL 15.10 database"
@@ -633,7 +633,7 @@ jobs:
POSTGRES_VERSION: 15.10
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -664,7 +664,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -692,9 +692,9 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run PostgreSQL 16.6 database"
@@ -703,7 +703,7 @@ jobs:
POSTGRES_VERSION: 16.6
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -734,7 +734,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -760,16 +760,16 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run ActiveMQ"
run: docker compose -f ./scripts/ci/docker-compose/docker-compose.yaml --profile activemq up -d
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -800,7 +800,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -860,9 +860,9 @@ jobs:
mvn-options: '-Dencryption.ssl.keystore.location=${CI_WORKSPACE}/keystores/alfresco/alfresco.keystore -Dencryption.ssl.truststore.location=${CI_WORKSPACE}/keystores/alfresco/alfresco.truststore'
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Set transformers tag"
@@ -885,7 +885,7 @@ jobs:
run: docker compose -f ./scripts/ci/docker-compose/docker-compose.yaml --profile ${{ matrix.compose-profile }} up -d
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }} - ${{ matrix.testSuite }} ${{ matrix.idp }}
@@ -916,7 +916,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -974,9 +974,9 @@ jobs:
REQUIRES_LOCAL_IMAGES: true
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -992,7 +992,7 @@ jobs:
run: mvn install -pl :alfresco-community-repo-integration-test -am -DskipTests -Pall-tas-tests
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }} - ${{ matrix.test-name }}
@@ -1030,7 +1030,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.tests.outcome }}
@@ -1056,16 +1056,16 @@ jobs:
!contains(github.event.head_commit.message, '[force')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- name: "Run Postgres 16.6 database"
run: docker compose -f ./scripts/ci/docker-compose/docker-compose.yaml --profile postgres up -d
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -1096,7 +1096,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -1130,9 +1130,9 @@ jobs:
REQUIRES_INSTALLED_ARTIFACTS: true
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -1140,7 +1140,7 @@ jobs:
bash ./scripts/ci/build.sh
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }} 0${{ matrix.part }} - (PostgreSQL) ${{ matrix.test-name }}
@@ -1176,9 +1176,9 @@ jobs:
REQUIRES_INSTALLED_ARTIFACTS: true
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -1186,7 +1186,7 @@ jobs:
bash ./scripts/ci/build.sh
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }} 0${{ matrix.part }} - (MySQL) ${{ matrix.test-name }}
@@ -1218,9 +1218,9 @@ jobs:
REQUIRES_LOCAL_IMAGES: true
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |
@@ -1234,7 +1234,7 @@ jobs:
mvn -B install -pl :alfresco-governance-services-automation-community-rest-api -am -Pags -Pall-tas-tests -DskipTests
- name: "Prepare Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-prepare@v8.2.0
id: rp-prepare
with:
rp-launch-prefix: ${{ env.RP_LAUNCH_PREFIX }}
@@ -1266,7 +1266,7 @@ jobs:
continue-on-error: true
- name: "Summarize Report Portal"
if: github.ref_name == 'master'
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.13.0
uses: Alfresco/alfresco-build-tools/.github/actions/reportportal-summarize@v8.2.0
id: rp-summarize
with:
tests-outcome: ${{ steps.run-tests.outcome }}
@@ -1308,9 +1308,9 @@ jobs:
!contains(github.event.head_commit.message, '[force]')
steps:
- uses: actions/checkout@v4
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Build"
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
run: |

View File

@@ -34,12 +34,12 @@ jobs:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v8.2.0
with:
username: ${{ env.GIT_USERNAME }}
email: ${{ env.GIT_EMAIL }}
@@ -63,12 +63,12 @@ jobs:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.2.0
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.2.0
- name: "Init"
run: bash ./scripts/ci/init.sh
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v8.13.0
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v8.2.0
with:
username: ${{ env.GIT_USERNAME }}
email: ${{ env.GIT_EMAIL }}

View File

@@ -1377,7 +1377,7 @@
"filename": "repository/src/test/java/org/alfresco/repo/imap/ImapMessageTest.java",
"hashed_secret": "d033e22ae348aeb5660fc2140aec35850c4da997",
"is_verified": false,
"line_number": 116,
"line_number": 118,
"is_secret": false
}
],
@@ -1431,6 +1431,26 @@
"is_secret": false
}
],
"repository/src/test/java/org/alfresco/repo/lock/LockBehaviourImplTest.java": [
{
"type": "Secret Keyword",
"filename": "repository/src/test/java/org/alfresco/repo/lock/LockBehaviourImplTest.java",
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
"is_verified": false,
"line_number": 112,
"is_secret": false
}
],
"repository/src/test/java/org/alfresco/repo/lock/LockServiceImplTest.java": [
{
"type": "Secret Keyword",
"filename": "repository/src/test/java/org/alfresco/repo/lock/LockServiceImplTest.java",
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
"is_verified": false,
"line_number": 103,
"is_secret": false
}
],
"repository/src/test/java/org/alfresco/repo/management/JmxDumpUtilTest.java": [
{
"type": "Secret Keyword",
@@ -1868,5 +1888,5 @@
}
]
},
"generated_at": "2025-02-26T15:13:52Z"
"generated_at": "2024-12-19T08:58:42Z"
}

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-automation-community-repo</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<build>

View File

@@ -23,7 +23,7 @@ Recorded content can be explicitly destroyed whilst maintaining the original nod
* License: Alfresco Community
* Issue Tracker Link: [JIRA RM](https://issues.alfresco.com/jira/projects/RM/summary)
* Contribution Model: Alfresco Closed Source
* Documentation: [docs.alfresco.com (Records Management)](https://support.hyland.com/r/Alfresco/Alfresco-Governance-Services-Community-Edition/23.4/Alfresco-Governance-Services-Community-Edition/Introduction)
* Documentation: [docs.alfresco.com (Records Management)](http://docs.alfresco.com/rm2.4/concepts/welcome-rm.html)
***

View File

@@ -21,18 +21,18 @@ RM is split into two main parts - a repository integration and a Share integrati
* [Community License](../LICENSE.txt)
* [Enterprise License](../../rm-enterprise/LICENSE.txt) (this file will only be present in clones of the Enterprise repository)
* [Issue Tracker Link](https://issues.alfresco.com/jira/projects/RM)
* [Community Documentation Link](https://support.hyland.com/r/Alfresco/Alfresco-Governance-Services-Community-Edition/23.4/Alfresco-Governance-Services-Community-Edition/Introduction)
* [Enterprise Documentation Link](https://support.hyland.com/r/Alfresco/Alfresco-Governance-Services/23.4/Alfresco-Governance-Services/Introduction)
* [Community Documentation Link](http://docs.alfresco.com/rm-community/concepts/welcome-rm.html)
* [Enterprise Documentation Link](http://docs.alfresco.com/rm/concepts/welcome-rm.html)
* [Contribution Model](../../CONTRIBUTING.md)
***
### Prerequisite Knowledge
An understanding of Alfresco Content Services is assumed. The following pages from the [developer documentation](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services-Community-Edition/23.4/Alfresco-Content-Services-Community-Edition/Develop) give useful background information:
An understanding of Alfresco Content Services is assumed. The following pages from the [developer documentation](http://docs.alfresco.com/5.2/concepts/dev-for-developers.html) give useful background information:
* [ACS Architecture](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/Software-Architecture)
* [Platform Extensions](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/Extension-Points-Overview)
* [Share Extensions](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/Share-UI-Extension-Points)
* [ACS Architecture](http://docs.alfresco.com/5.2/concepts/dev-arch-overview.html)
* [Platform Extensions](http://docs.alfresco.com/5.2/concepts/dev-platform-extensions.html)
* [Share Extensions](http://docs.alfresco.com/5.2/concepts/dev-extensions-share.html)
***
@@ -44,12 +44,12 @@ The RM Share module communicates with the repository module via REST APIs. Inter
* A DAO layer responsible for CRUD operations against the database.
#### REST API
The REST API endpoints fall into two main types - v0 (Webscripts) and v1. The [v0 API](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/In-Process-Platform-Extension-Points/Web-Scripts) is older and not recommended for integrations. The [v1 API](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/REST-API-Guide) is newer but isn't yet feature complete. If you are running RM locally then the GS API Explorer will be available at [this link](http://localhost:8080/gs-api-explorer/).
The REST API endpoints fall into two main types - v0 (Webscripts) and v1. The [v0 API](http://docs.alfresco.com/5.2/references/dev-extension-points-webscripts.html) is older and not recommended for integrations. The [v1 API](http://docs.alfresco.com/5.1/pra/1/topics/pra-welcome-aara.html) is newer but isn't yet feature complete. If you are running RM locally then the GS API Explorer will be available at [this link](http://localhost:8080/gs-api-explorer/).
Internally the GS v1 REST API is built on the [Alfresco v1 REST API framework](https://community.alfresco.com/community/ecm/blog/2016/10/11/v1-rest-api-part-1-introduction). It aims to be consistent with this in terms of behaviour and naming.
#### Java Public API
The Java service layer is fronted by a [Java Public API](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/Reference/Java-Foundation-API), which we will ensure backward compatible with previous releases. Before we remove any methods there will first be a release containing that method deprecated to allow third party integrations to migrate to a new method. The Java Public API also includes a set of POJO objects which are needed to communicate with the services. It is easy to identify classes that are part of the Java Public API as they are annotated `@AlfrescoPublicApi`.
The Java service layer is fronted by a [Java Public API](http://docs.alfresco.com/5.2/concepts/java-public-api-list.html), which we will ensure backward compatible with previous releases. Before we remove any methods there will first be a release containing that method deprecated to allow third party integrations to migrate to a new method. The Java Public API also includes a set of POJO objects which are needed to communicate with the services. It is easy to identify classes that are part of the Java Public API as they are annotated `@AlfrescoPublicApi`.
Each Java service will have at least four beans defined for it:
@@ -61,7 +61,7 @@ Each Java service will have at least four beans defined for it:
#### DAOs
The DAOs are not part of the Java Public API, but handle CRUD operations against RM stored data. We have some custom queries to improve performance for particularly heavy operations.
We use standard Alfresco [data modelling](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/In-Process-Platform-Extension-Points/Content-Model-Extension-Point) to store RM metadata. We extend the [Alfresco patching mechanism](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/In-Process-Platform-Extension-Points/Patches) to provide community and enterprise schema upgrades.
We use standard Alfresco [data modelling](http://docs.alfresco.com/5.2/references/dev-extension-points-content-model.html) to store RM metadata. We extend the [Alfresco patching mechanism](http://docs.alfresco.com/5.2/references/dev-extension-points-patch.html) to provide community and enterprise schema upgrades.
***

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -1,3 +1,3 @@
SOLR6_TAG=2.0.15
SOLR6_TAG=2.0.14
POSTGRES_TAG=16.6
ACTIVEMQ_TAG=5.18.3-jre17-rockylinux8

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -5,7 +5,7 @@
# Version label
version.major=25
version.minor=1
version.revision=1
version.revision=0
version.label=
# Edition label

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<build>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -23,6 +23,10 @@ function runAction(p_params)
if (p_params.destNode.hasAspect("cm:lockable") && !p_params.destNode.hasAspect("trx:transferred"))
{
p_params.destNode.unlock();
if(p_params.destNode.hasAspect("gd2:editingInGoogle"))
{
p_params.destNode.removeAspect("gd2:editingInGoogle");
}
}
var resultId = originalDoc.name,

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<dependencies>
@@ -145,12 +145,6 @@
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
<version>${dependency.awaitility.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@@ -1,5 +1,5 @@
/*
* Copyright (C) 2005-2025 Alfresco Software Limited.
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -18,9 +18,6 @@
*/
package org.alfresco.util;
import static org.awaitility.Awaitility.await;
import java.time.Duration;
import java.util.Map.Entry;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
@@ -29,20 +26,20 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import junit.framework.TestCase;
/**
* Tests for our instance of {@link java.util.concurrent.ThreadPoolExecutor}
*
*
* @author Nick Burch
*/
public class DynamicallySizedThreadPoolExecutorTest extends TestCase
{
private static final Duration MAX_WAIT_TIMEOUT = Duration.ofSeconds(1);
private static final Log logger = LogFactory.getLog(DynamicallySizedThreadPoolExecutorTest.class);
private static Log logger = LogFactory.getLog(DynamicallySizedThreadPoolExecutorTest.class);
private static final int DEFAULT_KEEP_ALIVE_TIME = 90;
@Override
@@ -51,9 +48,9 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
SleepUntilAllWake.reset();
}
public void testUpToCore()
public void testUpToCore() throws Exception
{
DynamicallySizedThreadPoolExecutor exec = createInstance(5, 10, DEFAULT_KEEP_ALIVE_TIME);
DynamicallySizedThreadPoolExecutor exec = createInstance(5,10, DEFAULT_KEEP_ALIVE_TIME);
assertEquals(0, exec.getPoolSize());
exec.execute(new SleepUntilAllWake());
@@ -64,15 +61,15 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
assertEquals(4, exec.getPoolSize());
exec.execute(new SleepUntilAllWake());
assertEquals(5, exec.getPoolSize());
SleepUntilAllWake.wakeAll();
waitForPoolSizeEquals(exec, 5);
Thread.sleep(100);
assertEquals(5, exec.getPoolSize());
}
public void testPastCoreButNotHugeQueue()
public void testPastCoreButNotHugeQueue() throws Exception
{
DynamicallySizedThreadPoolExecutor exec = createInstance(5, 10, DEFAULT_KEEP_ALIVE_TIME);
DynamicallySizedThreadPoolExecutor exec = createInstance(5,10, DEFAULT_KEEP_ALIVE_TIME);
assertEquals(0, exec.getPoolSize());
assertEquals(0, exec.getQueue().size());
@@ -83,7 +80,7 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
exec.execute(new SleepUntilAllWake());
assertEquals(5, exec.getPoolSize());
assertEquals(0, exec.getQueue().size());
// Need to hit max pool size before it adds more
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
@@ -92,20 +89,20 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
exec.execute(new SleepUntilAllWake());
assertEquals(5, exec.getPoolSize());
assertEquals(5, exec.getQueue().size());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
assertEquals(5, exec.getPoolSize());
assertEquals(7, exec.getQueue().size());
SleepUntilAllWake.wakeAll();
waitForPoolSizeEquals(exec, 5);
Thread.sleep(100);
assertEquals(5, exec.getPoolSize());
}
public void testToExpandQueue() throws Exception
{
DynamicallySizedThreadPoolExecutor exec = createInstance(2, 4, 5);
DynamicallySizedThreadPoolExecutor exec = createInstance(2,4,1);
assertEquals(0, exec.getPoolSize());
assertEquals(0, exec.getQueue().size());
@@ -113,39 +110,168 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
exec.execute(new SleepUntilAllWake());
assertEquals(2, exec.getPoolSize());
assertEquals(0, exec.getQueue().size());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
assertEquals(2, exec.getPoolSize());
assertEquals(3, exec.getQueue().size());
// Next should add one
exec.execute(new SleepUntilAllWake());
waitForPoolSizeEquals(exec, 3); // Let the new thread spin up
Thread.sleep(20); // Let the new thread spin up
assertEquals(3, exec.getPoolSize());
assertEquals(3, exec.getQueue().size());
// And again
exec.execute(new SleepUntilAllWake());
waitForPoolSizeEquals(exec, 4); // Let the new thread spin up
Thread.sleep(20); // Let the new thread spin up
assertEquals(4, exec.getPoolSize());
assertEquals(3, exec.getQueue().size());
// But no more will be added, as we're at max
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
assertEquals(4, exec.getPoolSize());
assertEquals(6, exec.getQueue().size());
SleepUntilAllWake.wakeAll();
Thread.sleep(100);
// All threads still running, as 5 second timeout
// All threads still running, as 1 second timeout
assertEquals(4, exec.getPoolSize());
}
public void offTestToExpandThenContract() throws Exception
{
DynamicallySizedThreadPoolExecutor exec = createInstance(2,4,1);
exec.setKeepAliveTime(30, TimeUnit.MILLISECONDS);
assertEquals(0, exec.getPoolSize());
assertEquals(0, exec.getQueue().size());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
assertEquals(2, exec.getPoolSize());
assertEquals(0, exec.getQueue().size());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
assertEquals(2, exec.getPoolSize());
assertEquals(3, exec.getQueue().size());
// Next should add one
exec.execute(new SleepUntilAllWake());
Thread.sleep(20); // Let the new thread spin up
assertEquals(3, exec.getPoolSize());
assertEquals(3, exec.getQueue().size());
// And again
exec.execute(new SleepUntilAllWake());
Thread.sleep(20); // Let the new thread spin up
assertEquals(4, exec.getPoolSize());
assertEquals(3, exec.getQueue().size());
// But no more will be added, as we're at max
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
assertEquals(4, exec.getPoolSize());
assertEquals(6, exec.getQueue().size());
SleepUntilAllWake.wakeAll();
Thread.sleep(100);
// Wait longer than the timeout without any work, which should
// let all the extra threads go away
// (Depending on how closely your JVM follows the specification,
// we may fall back to the core size which is correct, or we
// may go to zero which is wrong, but hey, it's the JVM...)
logger.debug("Core pool size is " + exec.getCorePoolSize());
logger.debug("Current pool size is " + exec.getPoolSize());
logger.debug("Queue size is " + exec.getQueue().size());
assertTrue(
"Pool size should be 0-2 as everything is idle, was " + exec.getPoolSize(),
exec.getPoolSize() >= 0
);
assertTrue(
"Pool size should be 0-2 as everything is idle, was " + exec.getPoolSize(),
exec.getPoolSize() <= 2
);
SleepUntilAllWake.reset();
// Add 2 new jobs, will stay/ go to at 2 threads
assertEquals(0, exec.getQueue().size());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
// Let the idle threads grab them, then check
Thread.sleep(20);
assertEquals(2, exec.getPoolSize());
assertEquals(0, exec.getQueue().size());
// 3 more, still at 2 threads
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
assertEquals(2, exec.getPoolSize());
assertEquals(3, exec.getQueue().size());
// And again wait for it all
SleepUntilAllWake.wakeAll();
Thread.sleep(100);
assertEquals(2, exec.getPoolSize());
// Now decrease the overall pool size
// Will rise and fall to there now
exec.setCorePoolSize(1);
// Run a quick job, to ensure that the
// "can I kill one yet" logic is applied
SleepUntilAllWake.reset();
exec.execute(new SleepUntilAllWake());
SleepUntilAllWake.wakeAll();
Thread.sleep(100);
assertEquals(1, exec.getPoolSize());
assertEquals(0, exec.getQueue().size());
SleepUntilAllWake.reset();
// Push enough on to go up to 4 active threads
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
exec.execute(new SleepUntilAllWake());
Thread.sleep(20); // Let the new threads spin up
assertEquals(4, exec.getPoolSize());
assertEquals(6, exec.getQueue().size());
// Wait for them all to finish, should drop back to 1 now
// (Or zero, if your JVM can't read the specification...)
SleepUntilAllWake.wakeAll();
Thread.sleep(100);
assertTrue(
"Pool size should be 0 or 1 as everything is idle, was " + exec.getPoolSize(),
exec.getPoolSize() >= 0
);
assertTrue(
"Pool size should be 0 or 1 as everything is idle, was " + exec.getPoolSize(),
exec.getPoolSize() <= 1
);
}
private DynamicallySizedThreadPoolExecutor createInstance(int corePoolSize, int maximumPoolSize, int keepAliveTime)
{
// We need a thread factory
@@ -165,11 +291,6 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
new ThreadPoolExecutor.CallerRunsPolicy());
}
private void waitForPoolSizeEquals(DynamicallySizedThreadPoolExecutor exec, int expectedSize)
{
await().atMost(MAX_WAIT_TIMEOUT).until(() -> exec.getPoolSize() == expectedSize);
}
public static class SleepUntilAllWake implements Runnable
{
private static ConcurrentMap<String, Thread> sleeping = new ConcurrentHashMap<String, Thread>();
@@ -178,33 +299,31 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
@Override
public void run()
{
if (allAwake)
return;
if(allAwake) return;
// Track us, and wait for the bang
logger.debug("Adding thread: " + Thread.currentThread().getName());
sleeping.put(Thread.currentThread().getName(), Thread.currentThread());
try
{
Thread.sleep(30 * 1000);
Thread.sleep(30*1000);
System.err.println("Warning - Thread finished sleeping without wake!");
}
catch (InterruptedException e)
catch(InterruptedException e)
{
logger.debug("Interrupted thread: " + Thread.currentThread().getName());
}
}
public static void wakeAll()
{
allAwake = true;
for (Entry<String, Thread> t : sleeping.entrySet())
for(Entry<String, Thread> t : sleeping.entrySet())
{
logger.debug("Interrupting thread: " + t.getKey());
t.getValue().interrupt();
}
}
public static void reset()
{
logger.debug("Resetting.");

View File

@@ -1,5 +1,5 @@
/*
* Copyright (C) 2005-2025 Alfresco Software Limited.
* Copyright (C) 2005-2023 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -20,11 +20,13 @@ package org.alfresco.util.transaction;
import java.util.NoSuchElementException;
import java.util.Objects;
import jakarta.transaction.RollbackException;
import jakarta.transaction.Status;
import jakarta.transaction.UserTransaction;
import junit.framework.TestCase;
import org.springframework.transaction.CannotCreateTransactionException;
import org.springframework.transaction.NoTransactionException;
import org.springframework.transaction.TransactionDefinition;
@@ -33,20 +35,21 @@ import org.springframework.transaction.support.AbstractPlatformTransactionManage
import org.springframework.transaction.support.DefaultTransactionStatus;
/**
* @author Derek Hulley
* @see org.alfresco.util.transaction.SpringAwareUserTransaction
*
* @author Derek Hulley
*/
public class SpringAwareUserTransactionTest extends TestCase
{
private DummyTransactionManager transactionManager;
private FailingTransactionManager failingTransactionManager;
private UserTransaction txn;
public SpringAwareUserTransactionTest()
{
super();
}
@Override
protected void setUp() throws Exception
{
@@ -54,7 +57,7 @@ public class SpringAwareUserTransactionTest extends TestCase
failingTransactionManager = new FailingTransactionManager();
txn = getTxn();
}
private UserTransaction getTxn()
{
return new SpringAwareUserTransaction(
@@ -64,13 +67,13 @@ public class SpringAwareUserTransactionTest extends TestCase
TransactionDefinition.PROPAGATION_REQUIRED,
TransactionDefinition.TIMEOUT_DEFAULT);
}
public void testSetUp() throws Exception
{
assertNotNull(transactionManager);
assertNotNull(txn);
}
private void checkNoStatusOnThread()
{
try
@@ -83,7 +86,7 @@ public class SpringAwareUserTransactionTest extends TestCase
// expected
}
}
public void testNoTxnStatus() throws Exception
{
checkNoStatusOnThread();
@@ -131,7 +134,7 @@ public class SpringAwareUserTransactionTest extends TestCase
}
checkNoStatusOnThread();
}
public void testSimpleTxnWithRollback() throws Exception
{
testNoTxnStatus();
@@ -153,7 +156,7 @@ public class SpringAwareUserTransactionTest extends TestCase
transactionManager.getStatus());
checkNoStatusOnThread();
}
public void testNoBeginCommit() throws Exception
{
testNoTxnStatus();
@@ -168,7 +171,7 @@ public class SpringAwareUserTransactionTest extends TestCase
}
checkNoStatusOnThread();
}
public void testPostRollbackCommitDetection() throws Exception
{
testNoTxnStatus();
@@ -186,7 +189,7 @@ public class SpringAwareUserTransactionTest extends TestCase
}
checkNoStatusOnThread();
}
public void testPostSetRollbackOnlyCommitDetection() throws Exception
{
testNoTxnStatus();
@@ -205,7 +208,7 @@ public class SpringAwareUserTransactionTest extends TestCase
}
checkNoStatusOnThread();
}
public void testMismatchedBeginCommit() throws Exception
{
UserTransaction txn1 = getTxn();
@@ -215,18 +218,18 @@ public class SpringAwareUserTransactionTest extends TestCase
txn1.begin();
txn2.begin();
txn2.commit();
txn1.commit();
checkNoStatusOnThread();
txn1 = getTxn();
txn2 = getTxn();
txn1.begin();
txn2.begin();
try
{
txn1.commit();
@@ -242,6 +245,58 @@ public class SpringAwareUserTransactionTest extends TestCase
checkNoStatusOnThread();
}
/**
* Test for leaked transactions (no guarantee it will succeed due to reliance
* on garbage collector), so disabled by default.
*
* Also, if it succeeds, transaction call stack tracing will be enabled
* potentially hitting the performance of all subsequent tests.
*
* @throws Exception
*/
public void xtestLeakedTransactionLogging() throws Exception
{
assertFalse(SpringAwareUserTransaction.isCallStackTraced());
TrxThread t1 = new TrxThread();
t1.start();
System.gc();
Thread.sleep(1000);
TrxThread t2 = new TrxThread();
t2.start();
System.gc();
Thread.sleep(1000);
assertTrue(SpringAwareUserTransaction.isCallStackTraced());
TrxThread t3 = new TrxThread();
t3.start();
System.gc();
Thread.sleep(3000);
System.gc();
Thread.sleep(3000);
}
private class TrxThread extends Thread
{
public void run()
{
try
{
getTrx();
}
catch (Exception e) {}
}
public void getTrx() throws Exception
{
UserTransaction txn = getTxn();
txn.begin();
txn = null;
}
}
public void testConnectionPoolException() throws Exception
{
testNoTxnStatus();
@@ -256,7 +311,7 @@ public class SpringAwareUserTransactionTest extends TestCase
// Expected fail
}
}
private UserTransaction getFailingTxn()
{
return new SpringAwareUserTransaction(
@@ -266,7 +321,7 @@ public class SpringAwareUserTransactionTest extends TestCase
TransactionDefinition.PROPAGATION_REQUIRED,
TransactionDefinition.TIMEOUT_DEFAULT);
}
public void testTransactionListenerOrder() throws Throwable
{
testNoTxnStatus();
@@ -305,12 +360,12 @@ public class SpringAwareUserTransactionTest extends TestCase
}
checkNoStatusOnThread();
}
private static class TestTransactionListener extends TransactionListenerAdapter
{
private final String name;
private final StringBuffer buffer;
public TestTransactionListener(String name, StringBuffer buffer)
{
Objects.requireNonNull(name);
@@ -318,18 +373,18 @@ public class SpringAwareUserTransactionTest extends TestCase
this.name = name;
this.buffer = buffer;
}
@Override
public void beforeCommit(boolean readOnly)
{
buffer.append(name);
}
public String getName()
{
return name;
}
@Override
public boolean equals(Object obj)
{
@@ -339,17 +394,17 @@ public class SpringAwareUserTransactionTest extends TestCase
}
return false;
}
@Override
public int hashCode()
{
return name.hashCode();
}
}
/**
* Used to check that the transaction manager is being called correctly
*
*
* @author Derek Hulley
*/
@SuppressWarnings("serial")
@@ -357,7 +412,7 @@ public class SpringAwareUserTransactionTest extends TestCase
{
private int status = Status.STATUS_NO_TRANSACTION;
private Object txn = new Object();
/**
* @return Returns one of the {@link Status Status.STATUS_XXX} constants
*/
@@ -386,10 +441,10 @@ public class SpringAwareUserTransactionTest extends TestCase
status = Status.STATUS_ROLLEDBACK;
}
}
/**
* Throws {@link NoSuchElementException} on begin()
*
*
* @author alex.mukha
*/
private static class FailingTransactionManager extends AbstractPlatformTransactionManager
@@ -397,7 +452,7 @@ public class SpringAwareUserTransactionTest extends TestCase
private static final long serialVersionUID = 1L;
private int status = Status.STATUS_NO_TRANSACTION;
private Object txn = new Object();
/**
* @return Returns one of the {@link Status Status.STATUS_XXX} constants
*/

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -9,6 +9,6 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
</project>

View File

@@ -1,5 +1,5 @@
# More infos about this image: https://github.com/Alfresco/alfresco-docker-base-tomcat
FROM alfresco/alfresco-base-tomcat:tomcat10-jre17-rockylinux9@sha256:9622418e142fb4fe1c5320666ad61ea292bc5c98f3dd0b550b6add33d18f659f
FROM alfresco/alfresco-base-tomcat:tomcat10-jre17-rockylinux9@sha256:395664f9d9be0c9f73d3b722a58fd559ee7231609b263dfe19502617652740e3
# Set default docker_context.
ARG resource_path=target

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -1,3 +1,3 @@
SOLR6_TAG=2.0.15
SOLR6_TAG=2.0.14
POSTGRES_TAG=16.6
ACTIVEMQ_TAG=5.18.3-jre17-rockylinux8

View File

@@ -6,7 +6,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -27,7 +27,7 @@
## Synopsis
**TAS**( **T**est **A**utomation **S**ystem)- **CMIS** is the project that handles the automated tests related only to CMIS API integrated with Alfresco One [Alfresco CMIS API](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/Reference/CMIS-API).
**TAS**( **T**est **A**utomation **S**ystem)- **CMIS** is the project that handles the automated tests related only to CMIS API integrated with Alfresco One [Alfresco CMIS API](http://docs.alfresco.com/5.1/pra/1/topics/cmis-welcome.html).
It is based on Apache Maven, compatible with major IDEs and is using also Spring capabilities for dependency injection.

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<organization>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -27,7 +27,7 @@ Back to [TAS Master Documentation](https://git.alfresco.com/tas/alfresco-tas-uti
## Synopsis
**TAS**( **T**est **A**utomation **S**ystem)- **RESTAPI** is the project that handles the automated tests related only to [Alfresco REST API](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/REST-API-Guide).
**TAS**( **T**est **A**utomation **S**ystem)- **RESTAPI** is the project that handles the automated tests related only to [Alfresco REST API](http://docs.alfresco.com/5.1/pra/1/topics/pra-welcome.html).
It is based on Apache Maven, compatible with major IDEs and is using also Spring capabilities for dependency injection.
@@ -271,7 +271,7 @@ restClient.onResponse().assertThat().body("entry.modifiedBy.firstName", org.hamc
### How to generate models or check coverage
There are some simple generators that could parse [Swagger YAML](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/REST-API-Guide/Things-to-Know-Before-You-Start/The-API-Explorer-is-Your-Source-of-Truth) files and provide some usefull information to you like:
There are some simple generators that could parse [Swagger YAML](http://docs.alfresco.com/community/concepts/alfresco-sdk-tutorials-using-rest-api-explorer.html) files and provide some usefull information to you like:
a) Show on screen the actual coverage of TAS vs requests that exists in each YAML file - defined in pom.xml)

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -1,49 +0,0 @@
/*-
* #%L
* alfresco-tas-restapi
* %%
* Copyright (C) 2005 - 2025 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.model;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.alfresco.utility.model.TestModel;
/**
* Authorization code implementation
*/
public class RestAuthCodeModel extends TestModel
{
@JsonProperty
private String authorizationCode;
public String getAuthorizationCode()
{
return authorizationCode;
}
public void setAuthorizationCode(String authorizationCode)
{
this.authorizationCode = authorizationCode;
}
}

View File

@@ -1,49 +0,0 @@
/*-
* #%L
* alfresco-tas-restapi
* %%
* Copyright (C) 2005 - 2025 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.model;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.alfresco.utility.model.TestModel;
/**
* Authorization key implementation
*/
public class RestAuthKeyModel extends TestModel
{
@JsonProperty(required = true)
private String authorizationKey;
public String getAuthorizationKey()
{
return authorizationKey;
}
public void setAuthorizationKey(String authorizationKey)
{
this.authorizationKey = authorizationKey;
}
}

View File

@@ -42,8 +42,6 @@ import org.alfresco.rest.core.RestWrapper;
import org.alfresco.rest.exception.EmptyJsonResponseException;
import org.alfresco.rest.exception.JsonToModelConversionException;
import org.alfresco.rest.model.RestActivityModelsCollection;
import org.alfresco.rest.model.RestAuthCodeModel;
import org.alfresco.rest.model.RestAuthKeyModel;
import org.alfresco.rest.model.RestFavoriteSiteModel;
import org.alfresco.rest.model.RestGroupsModelsCollection;
import org.alfresco.rest.model.RestNetworkModel;
@@ -448,31 +446,6 @@ public class People extends ModelRequest<People>
restWrapper.processEmptyModel(request);
}
/**
* Reauthorizes a user.
*/
public void reauthorizeUser(RestAuthKeyModel authKey)
{
var request = RestRequest.requestWithBody(HttpMethod.POST, authKey.toJson(), "people/{personId}/reauthorize", this.person.getUsername());
restWrapper.processEmptyModel(request);
}
/**
* Get the reauthorization code.
*/
public RestAuthCodeModel getReauthorizationCode()
{
var request = RestRequest.simpleRequest(HttpMethod.POST, "people/{personId}/reauthorization-code", this.person.getUsername());
try
{
return restWrapper.processModel(RestAuthCodeModel.class, request);
}
catch (JsonToModelConversionException | EmptyJsonResponseException e)
{
return null;
}
}
/**
* Update avatar image PUT call on 'people/{nodeId}/children
*/

View File

@@ -1,65 +0,0 @@
package org.alfresco.rest.people.deauthorization.community;
import org.springframework.http.HttpStatus;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.alfresco.rest.RestTest;
import org.alfresco.rest.model.RestAuthKeyModel;
import org.alfresco.utility.model.TestGroup;
import org.alfresco.utility.model.UserModel;
import org.alfresco.utility.testrail.ExecutionType;
import org.alfresco.utility.testrail.annotation.TestRail;
/**
* Verifies API behavior in community edition. Should be excluded in enterprise edition.
*/
@Test
public class ReauthorizeSanityTests extends RestTest
{
private UserModel userModel;
private UserModel adminUser;
@BeforeClass(alwaysRun = true)
public void dataPreparation()
{
adminUser = dataUser.getAdminUser();
userModel = dataUser.createRandomTestUser();
}
@Test(groups = {TestGroup.REST_API, TestGroup.PEOPLE, TestGroup.SANITY})
@TestRail(section = {TestGroup.REST_API, TestGroup.PEOPLE}, executionType = ExecutionType.SANITY,
description = "Check if reauthorization is not implemented in Community Edition")
public void reauthorizationIsNotImplementedInCommunityEdition()
{
// given
var key = new RestAuthKeyModel();
key.setAuthorizationKey("am9obnRlc3RAMTIzNDU=");
// when admin invokes API
restClient.authenticateUser(adminUser).withCoreAPI().usingUser(userModel).reauthorizeUser(key);
// then
restClient.assertStatusCodeIs(HttpStatus.NOT_IMPLEMENTED);
// when user invokes API
restClient.authenticateUser(userModel).withCoreAPI().usingUser(userModel).reauthorizeUser(key);
// then
restClient.assertStatusCodeIs(HttpStatus.NOT_IMPLEMENTED);
}
@Test(groups = {TestGroup.REST_API, TestGroup.PEOPLE, TestGroup.SANITY})
@TestRail(section = {TestGroup.REST_API, TestGroup.PEOPLE}, executionType = ExecutionType.SANITY,
description = "Check if the reauthorization code is not implemented in Community Edition")
public void reauthorizationCodeIsNotImplementedInCommunityEdition()
{
// when admin invokes API
restClient.authenticateUser(adminUser).withCoreAPI().usingUser(userModel).getReauthorizationCode();
// then
restClient.assertStatusCodeIs(HttpStatus.NOT_IMPLEMENTED);
// when user invokes API
restClient.authenticateUser(userModel).withCoreAPI().usingUser(userModel).getReauthorizationCode();
// then
restClient.assertStatusCodeIs(HttpStatus.NOT_IMPLEMENTED);
}
}

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -74,7 +74,7 @@ ModuleDetails shareServicesModule = moduleService.getModule("alfresco-share-serv
<div class="index-list">
<h4><%=descriptorService.getServerDescriptor().getEdition()%></h4>
<p></p>
<p><a href="https://support.hyland.com/p/alfresco">Online Documentation</a></p>
<p><a href="http://docs.alfresco.com/">Online Documentation</a></p>
<p></p>
<%
if (shareServicesModule != null && ModuleInstallState.INSTALLED.equals(shareServicesModule.getInstallState()))

22
pom.xml
View File

@@ -2,7 +2,7 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>alfresco-community-repo</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Alfresco Community Repo Parent</name>
@@ -25,7 +25,7 @@
<properties>
<acs.version.major>25</acs.version.major>
<acs.version.minor>1</acs.version.minor>
<acs.version.revision>1</acs.version.revision>
<acs.version.revision>0</acs.version.revision>
<acs.version.label />
<amp.min.version>${acs.version.major}.0.0</amp.min.version>
@@ -48,17 +48,17 @@
<dependency.alfresco-hb-data-sender.version>1.1.1</dependency.alfresco-hb-data-sender.version>
<dependency.alfresco-trashcan-cleaner.version>2.4.2</dependency.alfresco-trashcan-cleaner.version>
<dependency.alfresco-jlan.version>7.5</dependency.alfresco-jlan.version>
<dependency.alfresco-server-root.version>7.0.2</dependency.alfresco-server-root.version>
<dependency.alfresco-server-root.version>7.0.1</dependency.alfresco-server-root.version>
<dependency.activiti-engine.version>5.23.0</dependency.activiti-engine.version>
<dependency.activiti.version>5.23.0</dependency.activiti.version>
<dependency.alfresco-transform-core.version>5.1.7</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-service.version>4.1.7</dependency.alfresco-transform-service.version>
<dependency.alfresco-transform-core.version>5.1.6</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-service.version>4.1.6</dependency.alfresco-transform-service.version>
<dependency.alfresco-greenmail.version>7.1</dependency.alfresco-greenmail.version>
<dependency.acs-event-model.version>1.0.2</dependency.acs-event-model.version>
<dependency.aspectj.version>1.9.22.1</dependency.aspectj.version>
<dependency.spring.version>6.2.2</dependency.spring.version>
<dependency.spring-security.version>6.3.7</dependency.spring-security.version>
<dependency.spring.version>6.2.1</dependency.spring.version>
<dependency.spring-security.version>6.3.4</dependency.spring-security.version>
<dependency.antlr.version>3.5.3</dependency.antlr.version>
<dependency.jackson.version>2.17.2</dependency.jackson.version>
<dependency.cxf.version>4.1.0</dependency.cxf.version>
@@ -86,7 +86,7 @@
<dependency.poi.version>5.3.0</dependency.poi.version>
<dependency.jboss.logging.version>3.5.0.Final</dependency.jboss.logging.version>
<dependency.camel.version>4.6.0</dependency.camel.version> <!-- when bumping this version, please keep track/sync with included netty.io dependencies -->
<dependency.netty.version>4.1.118.Final</dependency.netty.version> <!-- must be in sync with camels transitive dependencies, e.g.: netty-common -->
<dependency.netty.version>4.1.117.Final</dependency.netty.version> <!-- must be in sync with camels transitive dependencies, e.g.: netty-common -->
<dependency.activemq.version>5.18.6</dependency.activemq.version>
<dependency.apache-compress.version>1.27.1</dependency.apache-compress.version>
<dependency.awaitility.version>4.2.2</dependency.awaitility.version>
@@ -112,10 +112,10 @@
<dependency.jakarta-ee-json-api.version>2.1.3</dependency.jakarta-ee-json-api.version>
<dependency.jakarta-ee-json-impl.version>1.1.7</dependency.jakarta-ee-json-impl.version>
<dependency.jakarta-json-path.version>2.9.0</dependency.jakarta-json-path.version>
<dependency.json-smart.version>2.5.2</dependency.json-smart.version>
<dependency.json-smart.version>2.5.1</dependency.json-smart.version>
<alfresco.googledrive.version>4.1.0</alfresco.googledrive.version>
<alfresco.aos-module.version>3.2.0</alfresco.aos-module.version>
<alfresco.api-explorer.version>25.1.0</alfresco.api-explorer.version> <!-- Also in alfresco-enterprise-share -->
<alfresco.api-explorer.version>25.1.0-A1</alfresco.api-explorer.version> <!-- Also in alfresco-enterprise-share -->
<alfresco.maven-plugin.version>2.2.0</alfresco.maven-plugin.version>
<license-maven-plugin.version>2.4.0</license-maven-plugin.version>
@@ -155,7 +155,7 @@
<connection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</connection>
<developerConnection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</developerConnection>
<url>https://github.com/Alfresco/alfresco-community-repo</url>
<tag>25.1.1.2</tag>
<tag>HEAD</tag>
</scm>
<distributionManagement>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -1,32 +0,0 @@
/*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2025 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.api.model;
/**
* An object representing user authorization code.
*/
public record AuthCode(String authorizationCode)
{}

View File

@@ -1,43 +0,0 @@
/*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2025 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.api.model;
import static org.apache.commons.lang3.StringUtils.length;
/**
* An object representing user authorization key request body.
*/
public record AuthKey(String authorizationKey)
{
@Override
public String toString()
{
// for security reasons the key content should be never logged
return "AuthKey[" +
"authorizationKeyLength=" + length(authorizationKey) +
']';
}
}

View File

@@ -34,8 +34,6 @@ import org.springframework.beans.factory.InitializingBean;
import org.alfresco.model.ContentModel;
import org.alfresco.rest.api.People;
import org.alfresco.rest.api.model.AuthCode;
import org.alfresco.rest.api.model.AuthKey;
import org.alfresco.rest.api.model.Client;
import org.alfresco.rest.api.model.PasswordReset;
import org.alfresco.rest.api.model.Person;
@@ -247,29 +245,4 @@ public class PeopleEntityResource implements EntityResourceAction.ReadById<Perso
{
// functionality is not implemented in community edition
}
/**
* Get the authorization code.
*
* Not supported in community edition.
*/
@Operation("reauthorization-code")
@WebApiDescription(title = "Get the reauthorization code", description = "Get the reauthorization code", successStatus = HttpServletResponse.SC_NOT_IMPLEMENTED)
public AuthCode getReauthorizationCode(String personId, Void body, Parameters parameters, WithResponse withResponse)
{
// functionality is not implemented in community edition
return null;
}
/**
* Reauthorize user.
*
* Not supported in community edition.
*/
@Operation("reauthorize")
@WebApiDescription(title = "Reauthorize user", description = "Performs user reauthorization", successStatus = HttpServletResponse.SC_NOT_IMPLEMENTED)
public void reauthorizeUser(String personId, AuthKey authKey, Parameters parameters, WithResponse withResponse)
{
// functionality is not implemented in community edition
}
}

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=Admin Console
admin-console.help=Help
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=Successfully saved values.
admin-console.host=Host

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=Konzole pro spr\u00e1vce
admin-console.help=N\u00e1pov\u011bda
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=Hodnoty byly \u00fasp\u011b\u0161n\u011b ulo\u017eeny.
admin-console.host=Hostitel

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=Administrationskonsol
admin-console.help=Hj\u00e6lp
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=V\u00e6rdierne blev gemt.
admin-console.host=V\u00e6rt

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=Administratorkonsole
admin-console.help=Hilfe
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=Erfolgreich gespeicherte Werte.
admin-console.host=Host

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=Consola de administraci\u00f3n
admin-console.help=Ayuda
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=Valores guardados correctamente.
admin-console.host=Host

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=Hallintakonsoli
admin-console.help=Ohje
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=Arvot tallennettiin.
admin-console.host=Is\u00e4nt\u00e4

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=Console d'administration
admin-console.help=Aide
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=Les valeurs ont bien \u00e9t\u00e9 enregistr\u00e9es.
admin-console.host=H\u00f4te

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=Console di amministrazione
admin-console.help=Aiuto
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=I valori sono stati salvati.
admin-console.host=Host

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=\u7ba1\u7406\u30b3\u30f3\u30bd\u30fc\u30eb
admin-console.help=\u30d8\u30eb\u30d7
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=\u5024\u3092\u6b63\u5e38\u306b\u4fdd\u5b58\u3057\u307e\u3057\u305f\u3002
admin-console.host=\u30db\u30b9\u30c8

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=Admin-konsoll
admin-console.help=Hjelp
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=Verdier som ble lagret.
admin-console.host=Vert

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=Beheerconsole
admin-console.help=Help
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=Waarden zijn opgeslagen.
admin-console.host=Host

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=Konsola administracyjna
admin-console.help=Pomoc
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=Warto\u015bci zosta\u0142y zapisane pomy\u015blnie.
admin-console.host=Host

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=Console de administra\u00e7\u00e3o
admin-console.help=Ajuda
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=Valores salvos com sucesso.
admin-console.host=Host

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=\u041a\u043e\u043d\u0441\u043e\u043b\u044c \u0430\u0434\u043c\u0438\u043d\u0438\u0441\u0442\u0440\u0430\u0442\u043e\u0440\u0430
admin-console.help=\u0421\u043f\u0440\u0430\u0432\u043a\u0430
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=\u0423\u0441\u043f\u0435\u0448\u043d\u043e \u0441\u043e\u0445\u0440\u0430\u043d\u0435\u043d\u043d\u044b\u0435 \u0437\u043d\u0430\u0447\u0435\u043d\u0438\u044f.
admin-console.host=\u0425\u043e\u0441\u0442

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=Admin-konsol
admin-console.help=Hj\u00e4lp
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=V\u00e4rden sparades.
admin-console.host=V\u00e4rd

View File

@@ -1,7 +1,7 @@
# I18N messages for the Repository Admin Console
admin-console.header=\u7ba1\u7406\u63a7\u5236\u53f0
admin-console.help=\u5e2e\u52a9
admin-console.help-link=https://support.hyland.com/p/alfresco
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
admin-console.success=\u5df2\u6210\u529f\u4fdd\u5b58\u7684\u503c\u3002
admin-console.host=\u4e3b\u673a

View File

@@ -12,9 +12,9 @@
<#macro page title readonly=false controller=DEFAULT_CONTROLLER!"/admin" params="" dialog=false>
<#assign FORM_ID="admin-jmx-form" />
<#if server.edition == "Community">
<#assign docsEdition = "/Alfresco-Content-Services-Community-Edition/" + server.getVersionMajor() + "." + server.getVersionMinor() + "/Alfresco-Content-Services-Community-Edition" />
<#assign docsEdition = "community" />
<#elseif server.edition == "Enterprise" >
<#assign docsEdition = "/Alfresco-Content-Services/" + server.getVersionMajor() + "." + server.getVersionMinor() + "/Alfresco-Content-Services" />
<#assign docsEdition = server.getVersionMajor() + "." + server.getVersionMinor() />
</#if>
<#if metadata??>
<#assign HOSTNAME>${msg("admin-console.host")}: ${metadata.hostname}</#assign>
@@ -551,7 +551,7 @@ Admin.addEventListener(window, 'load', function() {
Template for a full page view
-->
<div class="sticky-wrapper">
<div class="header">
<span><a href="${url.serviceContext}${DEFAULT_CONTROLLER!"/admin"}">${msg("admin-console.header")}</a></span><#if metadata??><span class="meta">${HOSTNAME}</span><span class="meta">${HOSTADDR}</span></#if>
<div style="float:right"><a href="${msg("admin-console.help-link", docsEdition)}" target="_blank">${msg("admin-console.help")}</a></div>
@@ -908,4 +908,4 @@ Admin.addEventListener(window, 'load', function() {
<#macro button label description="" onclick="" style="" id="" class="" disabled="false">
<input class="<#if class?has_content>${class?html}<#else>inline</#if>" <#if id?has_content>id="${id?html}"</#if> <#if style?has_content>style="${style?html}"</#if> type="button" value="${label?html}" onclick="${onclick?html}" <#if disabled="true">disabled="true"</#if> />
<#if description?has_content><span class="description">${description?html}</span></#if>
</#macro>
</#macro>

View File

@@ -27,7 +27,7 @@ to integrate with a number of external Authentication providers including
* https://github.com/Alfresco/alfresco-data-model/tree/master/src/main/java/org/alfresco/repo/security/authentication
* License: LGPL
* Issue Tracker Link: https://issues.alfresco.com/jira/issues/?jql=project%3DREPO
* Documentation Link: https://support.hyland.com/r/Alfresco/Alfresco-Content-Services-Community-Edition/23.4/Alfresco-Content-Services-Community-Edition/Administer/Manage-Security/Authentication-and-sync
* Documentation Link: http://docs.alfresco.com/5.2/concepts/auth-intro.html
* Contribution Model: Alfresco Open Source
***

View File

@@ -16,7 +16,7 @@
* Source Code Link:m https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/
* License: LGPL
* Issue Tracker Link: https://issues.alfresco.com/jira/secure/RapidBoard.jspa?projectKey=REPO&useStoredSettings=true&rapidView=379
* Documentation Link: https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Configure/Repository/About-Versioning
* Documentation Link: http://docs.alfresco.com/5.1/concepts/versioning.html
* Contribution Model: Alfresco publishes the source code and will review proposed patch requests
***

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>25.1.1.2</version>
<version>25.1.0.46-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -542,7 +542,10 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
@Override
protected void onShutdown(ApplicationEvent applicationEvent)
{
// NOOP
if (eventSender != null)
{
eventSender.destroy();
}
}
protected class EventTransactionListener extends TransactionListenerAdapter
@@ -816,4 +819,4 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
return peerAssocs;
}
}
}
}

View File

@@ -52,7 +52,7 @@ public interface EventSender
}
/**
* It's called when the bean instance is destroyed, allowing to perform cleanup operations.
* It's called when the application context is closing, allowing {@link org.alfresco.repo.event2.EventGenerator} to perform cleanup operations.
*/
default void destroy()
{
@@ -63,4 +63,4 @@ public interface EventSender
{
return false;
}
}
}

View File

@@ -25,16 +25,15 @@
*/
package org.alfresco.repo.event2;
import java.util.Optional;
import java.util.concurrent.Executor;
import jakarta.annotation.Nonnull;
import org.alfresco.util.PropertyCheck;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.config.AbstractFactoryBean;
import org.springframework.core.env.PropertyResolver;
import org.alfresco.util.PropertyCheck;
import java.util.Optional;
import java.util.concurrent.Executor;
public class EventSenderFactoryBean extends AbstractFactoryBean<EventSender>
{
@@ -52,7 +51,7 @@ public class EventSenderFactoryBean extends AbstractFactoryBean<EventSender>
private boolean legacySkipQueueConfig;
public EventSenderFactoryBean(@Autowired PropertyResolver propertyResolver, Event2MessageProducer event2MessageProducer,
Executor enqueueThreadPoolExecutor, Executor dequeueThreadPoolExecutor)
Executor enqueueThreadPoolExecutor, Executor dequeueThreadPoolExecutor)
{
super();
PropertyCheck.mandatory(this, "propertyResolver", propertyResolver);
@@ -156,13 +155,4 @@ public class EventSenderFactoryBean extends AbstractFactoryBean<EventSender>
{
return event2MessageProducer;
}
@Override
protected void destroyInstance(EventSender eventSender)
{
if (eventSender != null)
{
eventSender.destroy();
}
}
}
}

View File

@@ -37,7 +37,6 @@ import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.transform.config.CoreFunction;
import org.alfresco.util.PropertyCheck;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.InitializingBean;
@@ -47,7 +46,6 @@ import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import static org.alfresco.model.ContentModel.PROP_CONTENT;
import static org.alfresco.transform.common.RequestParamMap.DIRECT_ACCESS_URL;
@@ -70,7 +68,6 @@ public class LocalTransformClient implements TransformClient, InitializingBean
private ContentService contentService;
private RenditionService2Impl renditionService2;
private boolean directAccessUrlEnabled;
private int threadPoolSize;
private ExecutorService executorService;
private ThreadLocal<LocalTransform> transform = new ThreadLocal<>();
@@ -100,11 +97,6 @@ public class LocalTransformClient implements TransformClient, InitializingBean
this.directAccessUrlEnabled = directAccessUrlEnabled;
}
public void setThreadPoolSize(int threadPoolSize)
{
this.threadPoolSize = threadPoolSize;
}
public void setExecutorService(ExecutorService executorService)
{
this.executorService = executorService;
@@ -118,11 +110,9 @@ public class LocalTransformClient implements TransformClient, InitializingBean
PropertyCheck.mandatory(this, "contentService", contentService);
PropertyCheck.mandatory(this, "renditionService2", renditionService2);
PropertyCheck.mandatory(this, "directAccessUrlEnabled", directAccessUrlEnabled);
PropertyCheck.mandatory(this, "threadPoolSize", threadPoolSize);
if (executorService == null)
{
ThreadFactory threadFactory = new ThreadFactoryBuilder().setNameFormat("local-transform-%d").build();
executorService = Executors.newFixedThreadPool(threadPoolSize, threadFactory);
executorService = Executors.newCachedThreadPool();
}
}

View File

@@ -28,17 +28,11 @@ package org.alfresco.repo.workflow.activiti.script;
import java.util.Map;
import org.activiti.engine.RepositoryService;
import org.activiti.engine.delegate.VariableScope;
import org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl;
import org.activiti.engine.impl.context.Context;
import org.activiti.engine.impl.el.Expression;
import org.activiti.engine.impl.persistence.entity.DeploymentEntity;
import org.activiti.engine.impl.persistence.entity.ExecutionEntity;
import org.activiti.engine.repository.ProcessDefinition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
import org.alfresco.repo.workflow.WorkflowDeployer;
@@ -51,12 +45,13 @@ import org.alfresco.service.cmr.security.PersonService;
import org.alfresco.service.cmr.workflow.WorkflowException;
/**
* Base class for execution scripts, using {@link ScriptService} as part of activiti workflow.
* Base class for execution scripts, using {@link ScriptService} as part of
* activiti workflow.
*
* @author Frederik Heremans
* @since 3.4.e
*/
public class ActivitiScriptBase
public class ActivitiScriptBase
{
protected static final String PERSON_BINDING_NAME = "person";
protected static final String USERHOME_BINDING_NAME = "userhome";
@@ -66,19 +61,17 @@ public class ActivitiScriptBase
protected Expression runAs;
protected Expression scriptProcessor;
private static final Logger LOGGER = LoggerFactory.getLogger(ActivitiScriptBase.class);
protected Object executeScript(String theScript, Map<String, Object> model, String scriptProcessorName, String runAsUser)
{
String user = AuthenticationUtil.getFullyAuthenticatedUser();
Object scriptResult = null;
if (runAsUser == null && user != null)
{
// Just execute the script using the current user
scriptResult = executeScript(theScript, model, scriptProcessorName);
}
else
else
{
if (runAsUser != null)
{
@@ -94,25 +87,26 @@ public class ActivitiScriptBase
}
return scriptResult;
}
protected Object executeScriptAsUser(final String theScript, final Map<String, Object> model, final String scriptProcessorName, final String runAsUser)
{
// execute as specified runAsUser
return AuthenticationUtil.runAs(new AuthenticationUtil.RunAsWork<>() {
return AuthenticationUtil.runAs(new AuthenticationUtil.RunAsWork<Object>()
{
public Object doWork() throws Exception
{
return executeScript(theScript, model, scriptProcessorName);
}
}, runAsUser);
}
protected Object executeScript(String theScript, Map<String, Object> model, String scriptProcessorName)
{
// Execute the script using the appropriate processor
Object scriptResult = null;
// Checks if current workflow is secure
boolean secure = isSecure(model);
boolean secure = isSecure();
if (scriptProcessorName != null)
{
@@ -123,11 +117,11 @@ public class ActivitiScriptBase
// Use default script-processor
scriptResult = getServiceRegistry().getScriptService().executeScriptString(theScript, model, secure);
}
return scriptResult;
}
protected String getStringValue(Expression expression, VariableScope scope)
protected String getStringValue(Expression expression, VariableScope scope)
{
if (expression != null)
{
@@ -139,15 +133,15 @@ public class ActivitiScriptBase
protected ServiceRegistry getServiceRegistry()
{
ProcessEngineConfigurationImpl config = Context.getProcessEngineConfiguration();
if (config != null)
if (config != null)
{
// Fetch the registry that is injected in the activiti spring-configuration
ServiceRegistry registry = (ServiceRegistry) config.getBeans().get(ActivitiConstants.SERVICE_REGISTRY_BEAN_KEY);
if (registry == null)
{
throw new RuntimeException(
"Service-registry not present in ProcessEngineConfiguration beans, expected ServiceRegistry with key" +
ActivitiConstants.SERVICE_REGISTRY_BEAN_KEY);
"Service-registry not present in ProcessEngineConfiguration beans, expected ServiceRegistry with key" +
ActivitiConstants.SERVICE_REGISTRY_BEAN_KEY);
}
return registry;
}
@@ -155,136 +149,42 @@ public class ActivitiScriptBase
}
/**
* Checks whether the workflow must be considered secure or not - based on {@link DeploymentEntity} category. If it is not considered secure, the workflow will be executed in sandbox context with more restrictions
* Checks whether the workflow must be considered secure or not - based on {@link DeploymentEntity} category.
* If it is not considered secure, the workflow will be executed in sandbox context with more restrictions
*
* @return true if workflow is considered secure, false otherwise
*/
private boolean isSecure(Map<String, Object> model)
{
String category = getDeploymentCategory(model);
// iF The deployment category matches the condition (either internal or full access) the workflow is considered secure
return category != null && (WorkflowDeployer.CATEGORY_ALFRESCO_INTERNAL.equals(category) || WorkflowDeployer.CATEGORY_FULL_ACCESS.equals(category));
}
/**
* Gets the deployment category from the execution context. If no execution context is available, a query to obtain the deployment is performed so the category can be returned.
*
* @param model
* a map with workflow model
* @return the deployment category
*/
private String getDeploymentCategory(Map<String, Object> model)
{
String category = getDeploymentCategoryFromContext();
if (category == null)
{
String deploymentId = null;
String processDefinitionId = null;
if (model != null && model.containsKey(EXECUTION_BINDING_NAME) && model.get(EXECUTION_BINDING_NAME) instanceof ExecutionEntity)
{
ExecutionEntity executionEntity = (ExecutionEntity) model.get(EXECUTION_BINDING_NAME);
deploymentId = executionEntity.getDeploymentId();
processDefinitionId = executionEntity.getProcessDefinitionId();
}
category = getDeploymentCategoryFromQuery(deploymentId, processDefinitionId);
}
return category;
}
/**
* Obtains the deployment category from current execution context
*
* @return the category for current execution deployment, otherwise null
*/
private String getDeploymentCategoryFromContext()
private boolean isSecure()
{
String category = null;
try
{
if (Context.isExecutionContextActive())
{
category = Context.getExecutionContext().getDeployment().getCategory();
}
else
{
if (LOGGER.isDebugEnabled())
{
LOGGER.debug("No execution context available");
}
}
}
catch (Exception e)
{
if (LOGGER.isDebugEnabled())
{
LOGGER.debug("Could not obtain deployment category from execution context: {}", e.getMessage());
}
// No action required
}
return category;
// If the workflow is considered secure, the deployment entity category matches the condition (either internal or full access)
return category != null && (WorkflowDeployer.CATEGORY_ALFRESCO_INTERNAL.equals(category) || WorkflowDeployer.CATEGORY_FULL_ACCESS.equals(category));
}
/**
* Obtains the deployment category through a query
*
* @param deploymentId
* the deployment id to obtain the category from
* @param processDefinitionId
* if no deployment id is provided, the process definition id can be used to obtain the deployment
* @return the category for the obtained deployment, otherwise null
* Checks that the specified 'runAs' field
* specifies a valid username.
*/
private String getDeploymentCategoryFromQuery(String deploymentId, String processDefinitionId)
private void validateRunAsUser(final String runAsUser)
{
String category = null;
try
Boolean runAsExists = AuthenticationUtil.runAs(new RunAsWork<Boolean>()
{
RepositoryService repositoryService = Context.getProcessEngineConfiguration().getRepositoryService();
if (deploymentId == null && processDefinitionId != null)
{
ProcessDefinition processDefnition = repositoryService.getProcessDefinition(processDefinitionId);
if (processDefnition != null)
{
deploymentId = processDefnition.getDeploymentId();
}
}
if (deploymentId != null)
{
DeploymentEntity deployment = (DeploymentEntity) repositoryService.createDeploymentQuery().deploymentId(deploymentId).singleResult();
if (deployment != null)
{
category = deployment.getCategory();
}
}
}
catch (Exception e)
{
if (LOGGER.isDebugEnabled())
{
LOGGER.debug("Could not obtain deployment category through a query: {}", e.getMessage());
}
}
return category;
}
/**
* Checks that the specified 'runAs' field specifies a valid username.
*/
private void validateRunAsUser(final String runAsUser)
{
Boolean runAsExists = AuthenticationUtil.runAs(new RunAsWork<>() {
// Validate using System user to ensure sufficient permissions available to access person node.
@Override
public Boolean doWork() throws Exception
public Boolean doWork() throws Exception
{
return getServiceRegistry().getPersonService().personExists(runAsUser);
}
@@ -295,21 +195,21 @@ public class ActivitiScriptBase
throw new WorkflowException("runas user '" + runAsUser + "' does not exist.");
}
}
protected ActivitiScriptNode getPersonNode(String runAsUser)
{
String userName = null;
if (runAsUser != null)
if (runAsUser != null)
{
userName = runAsUser;
}
else
else
{
userName = AuthenticationUtil.getFullyAuthenticatedUser();
}
// The "System" user is a special case, which has no person object associated with it.
if (userName != null && !AuthenticationUtil.SYSTEM_USER_NAME.equals(userName))
if(userName != null && !AuthenticationUtil.SYSTEM_USER_NAME.equals(userName))
{
ServiceRegistry services = getServiceRegistry();
PersonService personService = services.getPersonService();
@@ -321,18 +221,18 @@ public class ActivitiScriptBase
}
return null;
}
public void setScript(Expression script)
public void setScript(Expression script)
{
this.script = script;
}
public void setRunAs(Expression runAs)
public void setRunAs(Expression runAs)
{
this.runAs = runAs;
}
public void setScriptProcessor(Expression scriptProcessor)
public void setScriptProcessor(Expression scriptProcessor)
{
this.scriptProcessor = scriptProcessor;
}

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Property ''{0}'' has not been set: {1} ({2})
system.err.duplicate_name=Duplicate child name not allowed: {0}
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see http://docs.alfresco.com/{0}/tasks/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Vlastnost ''{0}'' nebyla nastavena: {1} ({2})
system.err.duplicate_name=Duplicitn\u00ed n\u00e1zvy pod\u0159\u00edzen\u00fdch objekt\u016f nejsou povoleny ({0})
system.err.lucene_not_supported=Subsyst\u00e9m hled\u00e1n\u00ed Lucene nen\u00ed podporov\u00e1n. Viz https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=Subsyst\u00e9m hled\u00e1n\u00ed Lucene nen\u00ed podporov\u00e1n. Viz http://docs.alfresco.com/{0}/tasks/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Egenskaben ''{0}'' er ikke blevet indstillet: {1} ({2})
system.err.duplicate_name=Duplikeret navn p\u00e5 underordnet er ikke tilladt: {0}
system.err.lucene_not_supported=Lucene-s\u00f8geundersystemet underst\u00f8ttes ikke. Se https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=Lucene-s\u00f8geundersystemet underst\u00f8ttes ikke. Se http://docs.alfresco.com/{0}/tasks/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Property ''{0}'' has not been set: {1} ({2})
system.err.duplicate_name=Duplicate child name not allowed: {0}
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see http://docs.alfresco.com/{0}/tasks/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Property ''{0}'' has not been set: {1} ({2})
system.err.duplicate_name=Duplicate child name not allowed: {0}
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see http://docs.alfresco.com/{0}/tasks/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Ominaisuutta {0} ei ole m\u00e4\u00e4ritetty: {1} ({2})
system.err.duplicate_name=P\u00e4\u00e4llekk\u00e4ist\u00e4 alatasonime\u00e4 ei sallita: {0}
system.err.lucene_not_supported=Lucene-hakualij\u00e4rjestelm\u00e4\u00e4 ei tueta. Saat lis\u00e4tietoja osoitteesta https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=Lucene-hakualij\u00e4rjestelm\u00e4\u00e4 ei tueta. Saat lis\u00e4tietoja osoitteesta http://docs.alfresco.com/{0}/tasks/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Property ''{0}'' has not been set : {1} ({2})
system.err.duplicate_name=Duplicate child name not allowed : {0}
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see http://docs.alfresco.com/{0}/tasks/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Property ''{0}'' has not been set: {1} ({2})
system.err.duplicate_name=Duplicate child name not allowed: {0}
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see http://docs.alfresco.com/{0}/tasks/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Property ''{0}'' has not been set: {1} ({2})
system.err.duplicate_name=Duplicate child name not allowed: {0}
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see http://docs.alfresco.com/{0}/tasks/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Property ''{0}'' has not been set: {1} ({2})
system.err.duplicate_name=Duplicate child name not allowed: {0}
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see http://docs.alfresco.com/{0}/tasks/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Property ''{0}'' has not been set: {1} ({2})
system.err.duplicate_name=Duplicate child name not allowed: {0}
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see http://docs.alfresco.com/{0}/tasks/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Nie ustawiono w\u0142a\u015bciwo\u015bci ''{0}'': {1} ({2})
system.err.duplicate_name=Zduplikowane nazwy element\u00f3w podrz\u0119dnych s\u0105 niedozwolone: {0}
system.err.lucene_not_supported=Podsystem wyszukiwania Lucene nie jest obs\u0142ugiwany. Zobacz https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=Podsystem wyszukiwania Lucene nie jest obs\u0142ugiwany. Zobacz http://docs.alfresco.com/{0}/tasks/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Property ''{0}'' has not been set: {1} ({2})
system.err.duplicate_name=Duplicate child name not allowed: {0}
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see http://docs.alfresco.com/{0}/tasks/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Property ''{0}'' has not been set: {1} ({2})
system.err.duplicate_name=Duplicate child name not allowed: {0}
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see http://docs.alfresco.com/{0}/tasks/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Egenskap ''{0}'' har inte st\u00e4llts in: {1} ({2})
system.err.duplicate_name=Dubbelt underordnat namn inte till\u00e5tet: {0}
system.err.lucene_not_supported=Lucene-s\u00f6kundersystemet st\u00f6ds inte. Se https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=Lucene-s\u00f6kundersystemet st\u00f6ds inte. Se http://docs.alfresco.com/{0}/uppgifter/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -5,7 +5,7 @@
system.err.property_not_set=Property ''{0}'' has not been set: {1} ({2})
system.err.duplicate_name=Duplicate child name not allowed: {0}
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see https://support.hyland.com/p/alfresco
system.err.lucene_not_supported=The lucene search subsystem is not supported. Please see http://docs.alfresco.com/{0}/tasks/lucene-solr4-migration.html
# Bootstrap configuration check messages

View File

@@ -82,7 +82,6 @@
<property name="contentService" ref="contentService" />
<property name="renditionService2" ref="renditionService2" />
<property name="directAccessUrlEnabled" value="${local.transform.directAccessUrl.enabled}"/>
<property name="threadPoolSize" value="${local.transform.threadPoolSize}" />
</bean>
<bean id="synchronousTransformClient" parent="localSynchronousTransformClient" />

View File

@@ -1351,9 +1351,6 @@ restApi.directAccessUrl.defaultExpiryTimeInSec=30
# Controls whether direct access url URLs may be used in transforms.
local.transform.directAccessUrl.enabled=true
# Controls size of thread pool used for transforms.
local.transform.threadPoolSize=8
# Creates additional indexes on alf_node and alf_transaction. Recommended for large repositories.
system.new-node-transaction-indexes.ignored=true

View File

@@ -161,4 +161,5 @@
<ref bean="ServiceRegistry"/>
</property>
</bean>
</beans>

View File

@@ -171,4 +171,5 @@
<value>urldecode</value>
</property>
</bean>
</beans>

View File

@@ -1,354 +1,363 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2025 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.action.executer;
import static org.awaitility.Awaitility.await;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.springframework.test.context.transaction.TestTransaction;
import org.springframework.transaction.annotation.Transactional;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.action.ActionImpl;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter;
import org.alfresco.repo.content.metadata.MetadataExtracterRegistry;
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.test_category.BaseSpringTestsCategory;
import org.alfresco.util.BaseSpringTest;
import org.alfresco.util.GUID;
/**
* Test of the ActionExecuter for extracting metadata.
*
* @author Jesper Steen Møller
*/
@Category(BaseSpringTestsCategory.class)
@Transactional
public class ContentMetadataExtracterTest extends BaseSpringTest
{
protected static final String QUICK_TITLE = "The quick brown fox jumps over the lazy dog";
protected static final String QUICK_DESCRIPTION = "Pangram, fox, dog, Gym class featuring a brown fox and lazy dog";
protected static final String QUICK_CREATOR = "Nevin Nollop";
private NodeService nodeService;
private ContentService contentService;
private MetadataExtracterRegistry registry;
private TransactionService transactionService;
private StoreRef testStoreRef;
private NodeRef rootNodeRef;
private NodeRef nodeRef;
private ContentMetadataExtracter executer;
private final static String ID = GUID.generate();
@Before
public void before() throws Exception
{
this.nodeService = (NodeService) this.applicationContext.getBean("nodeService");
this.contentService = (ContentService) this.applicationContext.getBean("contentService");
registry = (MetadataExtracterRegistry) applicationContext.getBean("metadataExtracterRegistry");
transactionService = (TransactionService) this.applicationContext.getBean("transactionService");
AuthenticationComponent authenticationComponent = (AuthenticationComponent) applicationContext.getBean("authenticationComponent");
authenticationComponent.setSystemUserAsCurrentUser();
// Create the store and get the root node
this.testStoreRef = this.nodeService.createStore(
StoreRef.PROTOCOL_WORKSPACE,
"Test_" + System.currentTimeMillis());
this.rootNodeRef = this.nodeService.getRootNode(this.testStoreRef);
// Create the node used for tests
this.nodeRef = this.nodeService.createNode(
this.rootNodeRef, ContentModel.ASSOC_CHILDREN,
QName.createQName("{test}testnode"),
ContentModel.TYPE_CONTENT).getChildRef();
// Setup the content from the PDF test data
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
cw.setMimetype(MimetypeMap.MIMETYPE_PDF);
cw.putContent(AbstractContentTransformerTest.loadQuickTestFile("pdf"));
// Get the executer instance
this.executer = (ContentMetadataExtracter) this.applicationContext.getBean("extract-metadata");
}
/**
* Test execution of the extraction itself
*/
@Test
public void testFromBlanks() throws Exception
{
// Test that the action writes properties when they don't exist or are
// unset
// Get the old props
Map<QName, Serializable> props = this.nodeService.getProperties(this.nodeRef);
props.remove(ContentModel.PROP_AUTHOR);
props.put(ContentModel.PROP_TITLE, "");
props.put(ContentModel.PROP_DESCRIPTION, null); // Wonder how this will
// be handled
this.nodeService.setProperties(this.nodeRef, props);
// Make the nodeRef visible to other transactions as it will need to be in async requests
TestTransaction.flagForCommit();
TestTransaction.end();
// Execute the action
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>() {
public Void execute() throws Throwable
{
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
executer.execute(action, nodeRef);
return null;
}
});
// Need to wait for the async extract
await().pollInSameThread()
.atMost(MAX_ASYNC_TIMEOUT)
.until(() -> nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION), Objects::nonNull);
// Check that the properties have been set
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>() {
public Void execute() throws Throwable
{
assertEquals(QUICK_TITLE, nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE));
assertEquals(QUICK_DESCRIPTION, nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION));
assertEquals(QUICK_CREATOR, nodeService.getProperty(nodeRef, ContentModel.PROP_AUTHOR));
return null;
}
});
}
private static final QName PROP_UNKNOWN_1 = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "unkown1");
private static final QName PROP_UNKNOWN_2 = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "unkown2");
private static class TestUnknownMetadataExtracter extends AbstractMappingMetadataExtracter
{
public TestUnknownMetadataExtracter()
{
Properties mappingProperties = new Properties();
mappingProperties.put("unknown1", PROP_UNKNOWN_1.toString());
mappingProperties.put("unknown2", PROP_UNKNOWN_2.toString());
setMappingProperties(mappingProperties);
}
@Override
protected Map<String, Set<QName>> getDefaultMapping()
{
// No need to give anything back as we have explicitly set the mapping already
return new HashMap<String, Set<QName>>(0);
}
@Override
public boolean isSupported(String sourceMimetype)
{
return sourceMimetype.equals(MimetypeMap.MIMETYPE_BINARY);
}
public Map<String, Serializable> extractRaw(ContentReader reader) throws Throwable
{
Map<String, Serializable> rawMap = newRawMap();
rawMap.put("unknown1", Integer.valueOf(1));
rawMap.put("unknown2", "TWO");
return rawMap;
}
}
@Test
public void testUnknownProperties()
{
TestUnknownMetadataExtracter extracterUnknown = new TestUnknownMetadataExtracter();
extracterUnknown.setRegistry(registry);
extracterUnknown.register();
// Now add some content with a binary mimetype
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
cw.setMimetype(MimetypeMap.MIMETYPE_BINARY);
cw.putContent("Content for " + getName());
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
executer.execute(action, this.nodeRef);
// The unkown properties should be present
Serializable prop1 = nodeService.getProperty(nodeRef, PROP_UNKNOWN_1);
Serializable prop2 = nodeService.getProperty(nodeRef, PROP_UNKNOWN_2);
assertNotNull("Unknown property is null", prop1);
assertNotNull("Unknown property is null", prop2);
}
private static class TestNullPropMetadataExtracter extends AbstractMappingMetadataExtracter
{
public TestNullPropMetadataExtracter()
{
Properties mappingProperties = new Properties();
mappingProperties.put("title", ContentModel.PROP_TITLE.toString());
mappingProperties.put("description", ContentModel.PROP_DESCRIPTION.toString());
setMappingProperties(mappingProperties);
}
@Override
protected Map<String, Set<QName>> getDefaultMapping()
{
// No need to give anything back as we have explicitly set the mapping already
return new HashMap<String, Set<QName>>(0);
}
@Override
public boolean isSupported(String sourceMimetype)
{
return sourceMimetype.equals(MimetypeMap.MIMETYPE_BINARY);
}
public Map<String, Serializable> extractRaw(ContentReader reader) throws Throwable
{
Map<String, Serializable> rawMap = newRawMap();
putRawValue("title", null, rawMap);
putRawValue("description", "", rawMap);
return rawMap;
}
}
/**
* Ensure that missing raw values result in node properties being removed when running with {@link ContentMetadataExtracter#setCarryAspectProperties(boolean)} set to <tt>false</tt>.
*/
@Test
public void testNullExtractedValues_ALF1823()
{
TestNullPropMetadataExtracter extractor = new TestNullPropMetadataExtracter();
extractor.setRegistry(registry);
extractor.register();
// Now set the title and description
nodeService.setProperty(nodeRef, ContentModel.PROP_TITLE, "TITLE");
nodeService.setProperty(nodeRef, ContentModel.PROP_DESCRIPTION, "DESCRIPTION");
// Now add some content with a binary mimetype
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
cw.setMimetype(MimetypeMap.MIMETYPE_BINARY);
cw.putContent("Content for " + getName());
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
executer.execute(action, this.nodeRef);
// cm:titled properties should be present
Serializable title = nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE);
Serializable descr = nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION);
assertNotNull("cm:title property is null", title);
assertNotNull("cm:description property is null", descr);
try
{
// Now change the setting to remove unset aspect properties
executer.setCarryAspectProperties(false);
// Extract again
executer.execute(action, this.nodeRef);
// cm:titled properties should *NOT* be present
title = nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE);
descr = nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION);
assertNull("cm:title property is not null", title);
assertNull("cm:description property is not null", descr);
}
finally
{
executer.setCarryAspectProperties(true);
}
}
/**
* Test execution of the pragmatic approach
*/
@Test
public void testFromPartial() throws Exception
{
// Test that the action does not overwrite properties that are already
// set
String myCreator = "Null-op";
String myTitle = "The hot dog is eaten by the city fox";
// Get the old props
Map<QName, Serializable> props = this.nodeService.getProperties(this.nodeRef);
props.put(ContentModel.PROP_AUTHOR, myCreator);
props.put(ContentModel.PROP_TITLE, myTitle);
props.remove(ContentModel.PROP_DESCRIPTION); // Allow this baby
this.nodeService.setProperties(this.nodeRef, props);
// Make the nodeRef visible to other transactions as it will need to be in async requests
TestTransaction.flagForCommit();
TestTransaction.end();
// Execute the action
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>() {
public Void execute() throws Throwable
{
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
executer.execute(action, nodeRef);
return null;
}
});
// Need to wait for the async extract
await().pollInSameThread()
.atMost(MAX_ASYNC_TIMEOUT)
.until(() -> nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION), Objects::nonNull);
// Check that the properties have been preserved, but that description has been set
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>() {
public Void execute() throws Throwable
{
assertEquals(myTitle, nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE));
assertEquals(myCreator, nodeService.getProperty(nodeRef, ContentModel.PROP_AUTHOR));
assertEquals(QUICK_DESCRIPTION, nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION));
return null;
}
});
}
}
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2023 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
/*
* Copyright (C) 2005 Jesper Steen M<>ller
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.action.executer;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.action.ActionImpl;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter;
import org.alfresco.repo.content.metadata.MetadataExtracterRegistry;
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.test_category.BaseSpringTestsCategory;
import org.alfresco.util.BaseSpringTest;
import org.alfresco.util.GUID;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.springframework.test.context.transaction.TestTransaction;
import org.springframework.transaction.annotation.Transactional;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
/**
* Test of the ActionExecuter for extracting metadata.
*
* @author Jesper Steen Møller
*/
@Category(BaseSpringTestsCategory.class)
@Transactional
public class ContentMetadataExtracterTest extends BaseSpringTest
{
protected static final String QUICK_TITLE = "The quick brown fox jumps over the lazy dog";
protected static final String QUICK_DESCRIPTION = "Pangram, fox, dog, Gym class featuring a brown fox and lazy dog";
protected static final String QUICK_CREATOR = "Nevin Nollop";
private NodeService nodeService;
private ContentService contentService;
private MetadataExtracterRegistry registry;
private TransactionService transactionService;
private StoreRef testStoreRef;
private NodeRef rootNodeRef;
private NodeRef nodeRef;
private ContentMetadataExtracter executer;
private final static String ID = GUID.generate();
@Before
public void before() throws Exception
{
this.nodeService = (NodeService) this.applicationContext.getBean("nodeService");
this.contentService = (ContentService) this.applicationContext.getBean("contentService");
registry = (MetadataExtracterRegistry) applicationContext.getBean("metadataExtracterRegistry");
transactionService = (TransactionService) this.applicationContext.getBean("transactionService");
AuthenticationComponent authenticationComponent = (AuthenticationComponent)applicationContext.getBean("authenticationComponent");
authenticationComponent.setSystemUserAsCurrentUser();
// Create the store and get the root node
this.testStoreRef = this.nodeService.createStore(
StoreRef.PROTOCOL_WORKSPACE,
"Test_" + System.currentTimeMillis());
this.rootNodeRef = this.nodeService.getRootNode(this.testStoreRef);
// Create the node used for tests
this.nodeRef = this.nodeService.createNode(
this.rootNodeRef, ContentModel.ASSOC_CHILDREN,
QName.createQName("{test}testnode"),
ContentModel.TYPE_CONTENT).getChildRef();
// Setup the content from the PDF test data
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
cw.setMimetype(MimetypeMap.MIMETYPE_PDF);
cw.putContent(AbstractContentTransformerTest.loadQuickTestFile("pdf"));
// Get the executer instance
this.executer = (ContentMetadataExtracter) this.applicationContext.getBean("extract-metadata");
}
/**
* Test execution of the extraction itself
*/
@Test
public void testFromBlanks() throws Exception
{
// Test that the action writes properties when they don't exist or are
// unset
// Get the old props
Map<QName, Serializable> props = this.nodeService.getProperties(this.nodeRef);
props.remove(ContentModel.PROP_AUTHOR);
props.put(ContentModel.PROP_TITLE, "");
props.put(ContentModel.PROP_DESCRIPTION, null); // Wonder how this will
// be handled
this.nodeService.setProperties(this.nodeRef, props);
// Make the nodeRef visible to other transactions as it will need to be in async requests
TestTransaction.flagForCommit();
TestTransaction.end();
// Execute the action
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>()
{
public Void execute() throws Throwable
{
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
executer.execute(action, nodeRef);
return null;
}
});
Thread.sleep(3000); // Need to wait for the async extract
// Check that the properties have been set
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>()
{
public Void execute() throws Throwable
{
assertEquals(QUICK_TITLE, nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE));
assertEquals(QUICK_DESCRIPTION, nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION));
assertEquals(QUICK_CREATOR, nodeService.getProperty(nodeRef, ContentModel.PROP_AUTHOR));
return null;
}
});
}
private static final QName PROP_UNKNOWN_1 = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "unkown1");
private static final QName PROP_UNKNOWN_2 = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "unkown2");
private static class TestUnknownMetadataExtracter extends AbstractMappingMetadataExtracter
{
public TestUnknownMetadataExtracter()
{
Properties mappingProperties = new Properties();
mappingProperties.put("unknown1", PROP_UNKNOWN_1.toString());
mappingProperties.put("unknown2", PROP_UNKNOWN_2.toString());
setMappingProperties(mappingProperties);
}
@Override
protected Map<String, Set<QName>> getDefaultMapping()
{
// No need to give anything back as we have explicitly set the mapping already
return new HashMap<String, Set<QName>>(0);
}
@Override
public boolean isSupported(String sourceMimetype)
{
return sourceMimetype.equals(MimetypeMap.MIMETYPE_BINARY);
}
public Map<String, Serializable> extractRaw(ContentReader reader) throws Throwable
{
Map<String, Serializable> rawMap = newRawMap();
rawMap.put("unknown1", Integer.valueOf(1));
rawMap.put("unknown2", "TWO");
return rawMap;
}
}
@Test
public void testUnknownProperties()
{
TestUnknownMetadataExtracter extracterUnknown = new TestUnknownMetadataExtracter();
extracterUnknown.setRegistry(registry);
extracterUnknown.register();
// Now add some content with a binary mimetype
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
cw.setMimetype(MimetypeMap.MIMETYPE_BINARY);
cw.putContent("Content for " + getName());
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
executer.execute(action, this.nodeRef);
// The unkown properties should be present
Serializable prop1 = nodeService.getProperty(nodeRef, PROP_UNKNOWN_1);
Serializable prop2 = nodeService.getProperty(nodeRef, PROP_UNKNOWN_2);
assertNotNull("Unknown property is null", prop1);
assertNotNull("Unknown property is null", prop2);
}
private static class TestNullPropMetadataExtracter extends AbstractMappingMetadataExtracter
{
public TestNullPropMetadataExtracter()
{
Properties mappingProperties = new Properties();
mappingProperties.put("title", ContentModel.PROP_TITLE.toString());
mappingProperties.put("description", ContentModel.PROP_DESCRIPTION.toString());
setMappingProperties(mappingProperties);
}
@Override
protected Map<String, Set<QName>> getDefaultMapping()
{
// No need to give anything back as we have explicitly set the mapping already
return new HashMap<String, Set<QName>>(0);
}
@Override
public boolean isSupported(String sourceMimetype)
{
return sourceMimetype.equals(MimetypeMap.MIMETYPE_BINARY);
}
public Map<String, Serializable> extractRaw(ContentReader reader) throws Throwable
{
Map<String, Serializable> rawMap = newRawMap();
putRawValue("title", null, rawMap);
putRawValue("description", "", rawMap);
return rawMap;
}
}
/**
* Ensure that missing raw values result in node properties being removed
* when running with {@link ContentMetadataExtracter#setCarryAspectProperties(boolean)}
* set to <tt>false</tt>.
*/
@Test
public void testNullExtractedValues_ALF1823()
{
TestNullPropMetadataExtracter extractor = new TestNullPropMetadataExtracter();
extractor.setRegistry(registry);
extractor.register();
// Now set the title and description
nodeService.setProperty(nodeRef, ContentModel.PROP_TITLE, "TITLE");
nodeService.setProperty(nodeRef, ContentModel.PROP_DESCRIPTION, "DESCRIPTION");
// Now add some content with a binary mimetype
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
cw.setMimetype(MimetypeMap.MIMETYPE_BINARY);
cw.putContent("Content for " + getName());
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
executer.execute(action, this.nodeRef);
// cm:titled properties should be present
Serializable title = nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE);
Serializable descr = nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION);
assertNotNull("cm:title property is null", title);
assertNotNull("cm:description property is null", descr);
try
{
// Now change the setting to remove unset aspect properties
executer.setCarryAspectProperties(false);
// Extract again
executer.execute(action, this.nodeRef);
// cm:titled properties should *NOT* be present
title = nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE);
descr = nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION);
assertNull("cm:title property is not null", title);
assertNull("cm:description property is not null", descr);
}
finally
{
executer.setCarryAspectProperties(true);
}
}
/**
* Test execution of the pragmatic approach
*/
@Test
public void testFromPartial() throws Exception
{
// Test that the action does not overwrite properties that are already
// set
String myCreator = "Null-op";
String myTitle = "The hot dog is eaten by the city fox";
// Get the old props
Map<QName, Serializable> props = this.nodeService.getProperties(this.nodeRef);
props.put(ContentModel.PROP_AUTHOR, myCreator);
props.put(ContentModel.PROP_TITLE, myTitle);
props.remove(ContentModel.PROP_DESCRIPTION); // Allow this baby
this.nodeService.setProperties(this.nodeRef, props);
// Make the nodeRef visible to other transactions as it will need to be in async requests
TestTransaction.flagForCommit();
TestTransaction.end();
// Execute the action
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>()
{
public Void execute() throws Throwable
{
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
executer.execute(action, nodeRef);
return null;
}
});
Thread.sleep(3000); // Need to wait for the async extract
// Check that the properties have been preserved, but that description has been set
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>()
{
public Void execute() throws Throwable
{
assertEquals(myTitle, nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE));
assertEquals(myCreator, nodeService.getProperty(nodeRef, ContentModel.PROP_AUTHOR));
assertEquals(QUICK_DESCRIPTION, nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION));
return null;
}
});
}
}

View File

@@ -1,475 +1,478 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2025 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.content.caching.cleanup;
import static org.awaitility.Awaitility.await;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.time.Duration;
import java.util.Calendar;
import java.util.GregorianCalendar;
import org.apache.commons.io.FileUtils;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.springframework.context.ApplicationContext;
import org.alfresco.repo.content.caching.CacheFileProps;
import org.alfresco.repo.content.caching.CachingContentStore;
import org.alfresco.repo.content.caching.ContentCacheImpl;
import org.alfresco.repo.content.caching.Key;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.util.ApplicationContextHelper;
import org.alfresco.util.GUID;
import org.alfresco.util.testing.category.LuceneTests;
/**
* Tests for the CachedContentCleanupJob
*
* @author Matt Ward
*/
@Category(LuceneTests.class)
public class CachedContentCleanupJobTest
{
private static final Duration MAX_WAIT_TIMEOUT = Duration.ofSeconds(10);
private enum UrlSource
{
PROPS_FILE, REVERSE_CACHE_LOOKUP, NOT_PRESENT
}
;
private static ApplicationContext ctx;
private CachingContentStore cachingStore;
private ContentCacheImpl cache;
private File cacheRoot;
private CachedContentCleaner cleaner;
@BeforeClass
public static void beforeClass()
{
String cleanerConf = "classpath:cachingstore/test-cleaner-context.xml";
ctx = ApplicationContextHelper.getApplicationContext(new String[]{cleanerConf});
}
@Before
public void setUp() throws IOException
{
cachingStore = (CachingContentStore) ctx.getBean("cachingContentStore");
cache = (ContentCacheImpl) ctx.getBean("contentCache");
cacheRoot = cache.getCacheRoot();
cleaner = (CachedContentCleaner) ctx.getBean("cachedContentCleaner");
cleaner.setMinFileAgeMillis(0);
cleaner.setMaxDeleteWatchCount(0);
// Clear the cache from disk and memory
cache.removeAll();
FileUtils.cleanDirectory(cacheRoot);
}
@Test
public void filesNotInCacheAreDeleted() throws InterruptedException
{
cleaner.setMaxDeleteWatchCount(0);
int numFiles = 300; // Must be a multiple of number of UrlSource types being tested
long totalSize = 0; // what is the total size of the sample files?
File[] files = new File[numFiles];
for (int i = 0; i < numFiles; i++)
{
// Testing with a number of files. The cached file cleaner will be able to determine the 'original'
// content URL for each file by either retrieving from the companion properties file, or performing
// a 'reverse lookup' in the cache (i.e. cache.contains(Key.forCacheFile(...))), or there will be no
// URL determinable for the file.
UrlSource urlSource = UrlSource.values()[i % UrlSource.values().length];
File cacheFile = createCacheFile(urlSource, false);
files[i] = cacheFile;
totalSize += cacheFile.length();
}
// Run cleaner
cleaner.execute();
await().pollDelay(Duration.ofMillis(100))
.atMost(MAX_WAIT_TIMEOUT)
.until(() -> !cleaner.isRunning());
// check all files deleted
for (File file : files)
{
assertFalse("File should have been deleted: " + file, file.exists());
}
assertEquals("Incorrect number of deleted files", numFiles, cleaner.getNumFilesDeleted());
assertEquals("Incorrect total size of files deleted", totalSize, cleaner.getSizeFilesDeleted());
}
@Test
public void filesNewerThanMinFileAgeMillisAreNotDeleted() throws InterruptedException
{
final long minFileAge = 5000;
cleaner.setMinFileAgeMillis(minFileAge);
cleaner.setMaxDeleteWatchCount(0);
int numFiles = 10;
File[] oldFiles = new File[numFiles];
for (int i = 0; i < numFiles; i++)
{
oldFiles[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, false);
}
// Sleep to make sure 'old' files really are older than minFileAgeMillis
Thread.sleep(minFileAge);
File[] newFiles = new File[numFiles];
long newFilesTotalSize = 0;
for (int i = 0; i < numFiles; i++)
{
newFiles[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, false);
newFilesTotalSize += newFiles[i].length();
}
// The cleaner must finish before any of the newFiles are older than minFileAge. If the files are too
// old the test will fail and it will be necessary to rethink how to test this.
cleaner.execute();
await().pollDelay(Duration.ofMillis(100))
.atMost(MAX_WAIT_TIMEOUT)
.until(() -> !cleaner.isRunning());
if (cleaner.getDurationMillis() > minFileAge)
{
fail("Test unable to complete, since cleaner took " + cleaner.getDurationMillis() + "ms" +
" which is longer than minFileAge [" + minFileAge + "ms]");
}
// check all 'old' files deleted
for (File file : oldFiles)
{
assertFalse("File should have been deleted: " + file, file.exists());
}
// check all 'new' files still present
for (File file : newFiles)
{
assertTrue("File should not have been deleted: " + file, file.exists());
}
assertEquals("Incorrect number of deleted files", newFiles.length, cleaner.getNumFilesDeleted());
assertEquals("Incorrect total size of files deleted", newFilesTotalSize, cleaner.getSizeFilesDeleted());
}
@Test
public void aggressiveCleanReclaimsTargetSpace() throws InterruptedException
{
int numFiles = 30;
File[] files = new File[numFiles];
for (int i = 0; i < numFiles; i++)
{
// Make sure it's in the cache - all the files will be in the cache, so the
// cleaner won't clean any up once it has finished aggressively reclaiming space.
files[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, true);
}
// How much space to reclaim - seven files worth (all files are same size)
long fileSize = files[0].length();
long sevenFilesSize = 7 * fileSize;
// We'll get it to clean seven files worth aggressively and then it will continue non-aggressively.
// It will delete the older files aggressively (i.e. the ones prior to the two second sleep) and
// then will examine the new files for potential deletion.
// Since some of the newer files are not in the cache, it will delete those.
cleaner.executeAggressive("aggressiveCleanReclaimsTargetSpace()", sevenFilesSize);
Thread.sleep(400);
while (cleaner.isRunning())
{
Thread.sleep(200);
}
int numDeleted = 0;
for (File f : files)
{
if (!f.exists())
{
numDeleted++;
}
}
// How many were definitely deleted?
assertEquals("Wrong number of files deleted", 7, numDeleted);
// The cleaner should have recorded the correct number of deletions
assertEquals("Incorrect number of deleted files", 7, cleaner.getNumFilesDeleted());
assertEquals("Incorrect total size of files deleted", sevenFilesSize, cleaner.getSizeFilesDeleted());
}
@Test
public void standardCleanAfterAggressiveFinished() throws InterruptedException
{
// Don't use numFiles > 59! as we're using this for the minute element in the cache file path.
final int numFiles = 30;
File[] files = new File[numFiles];
for (int i = 0; i < numFiles; i++)
{
Calendar calendar = new GregorianCalendar(2010, 11, 2, 17, i);
if (i >= 21 && i <= 24)
{
// 21 to 24 will be deleted after the aggressive deletions (once the cleaner has returned
// to normal cleaning), because they are not in the cache.
files[i] = createCacheFile(calendar, UrlSource.NOT_PRESENT, false);
}
else
{
// All other files will be in the cache
files[i] = createCacheFile(calendar, UrlSource.REVERSE_CACHE_LOOKUP, true);
}
}
// How much space to reclaim - seven files worth (all files are same size)
long fileSize = files[0].length();
long sevenFilesSize = 7 * fileSize;
// We'll get it to clean seven files worth aggressively and then it will continue non-aggressively.
// It will delete the older files aggressively (i.e. even if they are actively in the cache) and
// then will examine the new files for potential deletion.
// Since some of the newer files are not in the cache, it will delete those too.
cleaner.executeAggressive("standardCleanAfterAggressiveFinished()", sevenFilesSize);
Thread.sleep(400);
while (cleaner.isRunning())
{
Thread.sleep(200);
}
for (int i = 0; i < numFiles; i++)
{
if (i < 7)
{
assertFalse("First 7 files should have been aggressively cleaned", files[i].exists());
}
if (i >= 21 && i <= 24)
{
assertFalse("Files with indexes 21-24 should have been deleted", files[i].exists());
}
}
assertEquals("Incorrect number of deleted files", 11, cleaner.getNumFilesDeleted());
assertEquals("Incorrect total size of files deleted", (11 * fileSize), cleaner.getSizeFilesDeleted());
}
@Test
public void emptyParentDirectoriesAreDeleted() throws FileNotFoundException
{
cleaner.setMaxDeleteWatchCount(0);
File file = new File(cacheRoot, "243235984/a/b/c/d.bin");
file.getParentFile().mkdirs();
PrintWriter writer = new PrintWriter(file);
writer.println("Content for emptyParentDirectoriesAreDeleted");
writer.close();
assertTrue("Directory should exist", new File(cacheRoot, "243235984/a/b/c").exists());
cleaner.handle(file);
assertFalse("Directory should have been deleted", new File(cacheRoot, "243235984").exists());
}
@Test
public void markedFilesHaveDeletionDeferredUntilCorrectPassOfCleaner()
{
// A non-advisable setting but useful for testing, maxDeleteWatchCount of zero
// which should result in immediate deletion upon discovery of content no longer in the cache.
cleaner.setMaxDeleteWatchCount(0);
File file = createCacheFile(UrlSource.NOT_PRESENT, false);
cleaner.handle(file);
checkFilesDeleted(file);
// Anticipated to be the most common setting: maxDeleteWatchCount of 1.
cleaner.setMaxDeleteWatchCount(1);
file = createCacheFile(UrlSource.NOT_PRESENT, false);
cleaner.handle(file);
checkWatchCountForCacheFile(file, 1);
cleaner.handle(file);
checkFilesDeleted(file);
// Check that some other arbitrary figure for maxDeleteWatchCount works correctly.
cleaner.setMaxDeleteWatchCount(3);
file = createCacheFile(UrlSource.NOT_PRESENT, false);
cleaner.handle(file);
checkWatchCountForCacheFile(file, 1);
cleaner.handle(file);
checkWatchCountForCacheFile(file, 2);
cleaner.handle(file);
checkWatchCountForCacheFile(file, 3);
cleaner.handle(file);
checkFilesDeleted(file);
}
private void checkFilesDeleted(File file)
{
assertFalse("File should have been deleted: " + file, file.exists());
CacheFileProps props = new CacheFileProps(file);
assertFalse("Properties file should have been deleted, cache file: " + file, props.exists());
}
private void checkWatchCountForCacheFile(File file, Integer expectedWatchCount)
{
assertTrue("File should still exist: " + file, file.exists());
CacheFileProps props = new CacheFileProps(file);
props.load();
assertEquals("File should contain correct deleteWatchCount", expectedWatchCount, props.getDeleteWatchCount());
}
@Test
public void filesInCacheAreNotDeleted() throws InterruptedException
{
cleaner.setMaxDeleteWatchCount(0);
// The SlowContentStore will always give out content when asked,
// so asking for any content will cause something to be cached.
String url = makeContentUrl();
int numFiles = 50;
for (int i = 0; i < numFiles; i++)
{
ContentReader reader = cachingStore.getReader(url);
reader.getContentString();
}
cleaner.execute();
Thread.sleep(400);
while (cleaner.isRunning())
{
Thread.sleep(200);
}
for (int i = 0; i < numFiles; i++)
{
File cacheFile = new File(cache.getCacheFilePath(url));
assertTrue("File should exist", cacheFile.exists());
}
}
private File createCacheFile(UrlSource urlSource, boolean putInCache)
{
Calendar calendar = new GregorianCalendar();
return createCacheFile(calendar, urlSource, putInCache);
}
private File createCacheFile(Calendar calendar, /* int year, int month, int day, int hour, int minute, */
UrlSource urlSource, boolean putInCache)
{
File file = new File(cacheRoot, createNewCacheFilePath(calendar));
file.getParentFile().mkdirs();
writeSampleContent(file);
String contentUrl = makeContentUrl();
if (putInCache)
{
cache.putIntoLookup(Key.forUrl(contentUrl), file.getAbsolutePath());
}
switch (urlSource)
{
case NOT_PRESENT:
// cache won't be able to determine original content URL for the file
break;
case PROPS_FILE:
// file with content URL in properties file
CacheFileProps props = new CacheFileProps(file);
props.setContentUrl(contentUrl);
props.store();
break;
case REVERSE_CACHE_LOOKUP:
// file with content URL in reverse lookup cache - but not 'in the cache' (forward lookup).
cache.putIntoLookup(Key.forCacheFile(file), contentUrl);
}
assertTrue("File should exist", file.exists());
return file;
}
/**
* Mimick functionality of ContentCacheImpl.createNewCacheFilePath() but allowing a specific date (rather than 'now') to be used.
*
* @param calendar
* Calendar
* @return Path to use for cache file.
*/
private String createNewCacheFilePath(Calendar calendar)
{
int year = calendar.get(Calendar.YEAR);
int month = calendar.get(Calendar.MONTH) + 1; // 0-based
int day = calendar.get(Calendar.DAY_OF_MONTH);
int hour = calendar.get(Calendar.HOUR_OF_DAY);
int minute = calendar.get(Calendar.MINUTE);
// create the URL
StringBuilder sb = new StringBuilder(20);
sb.append(year).append('/')
.append(month).append('/')
.append(day).append('/')
.append(hour).append('/')
.append(minute).append('/')
.append(GUID.generate()).append(".bin");
return sb.toString();
}
private String makeContentUrl()
{
return "protocol://some/made/up/url/" + GUID.generate();
}
private void writeSampleContent(File file)
{
try
{
PrintWriter writer = new PrintWriter(file);
writer.println("Content for sample file in " + getClass().getName());
writer.close();
}
catch (Throwable e)
{
throw new RuntimeException("Couldn't write file: " + file, e);
}
}
}
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.content.caching.cleanup;
import static org.junit.Assert.*;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Calendar;
import java.util.GregorianCalendar;
import org.alfresco.repo.content.caching.CacheFileProps;
import org.alfresco.repo.content.caching.CachingContentStore;
import org.alfresco.repo.content.caching.ContentCacheImpl;
import org.alfresco.repo.content.caching.Key;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.util.ApplicationContextHelper;
import org.alfresco.util.GUID;
import org.alfresco.util.testing.category.LuceneTests;
import org.apache.commons.io.FileUtils;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.springframework.context.ApplicationContext;
/**
* Tests for the CachedContentCleanupJob
*
* @author Matt Ward
*/
@Category(LuceneTests.class)
public class CachedContentCleanupJobTest
{
private enum UrlSource { PROPS_FILE, REVERSE_CACHE_LOOKUP, NOT_PRESENT };
private static ApplicationContext ctx;
private CachingContentStore cachingStore;
private ContentCacheImpl cache;
private File cacheRoot;
private CachedContentCleaner cleaner;
@BeforeClass
public static void beforeClass()
{
String cleanerConf = "classpath:cachingstore/test-cleaner-context.xml";
ctx = ApplicationContextHelper.getApplicationContext(new String[] { cleanerConf });
}
@Before
public void setUp() throws IOException
{
cachingStore = (CachingContentStore) ctx.getBean("cachingContentStore");
cache = (ContentCacheImpl) ctx.getBean("contentCache");
cacheRoot = cache.getCacheRoot();
cleaner = (CachedContentCleaner) ctx.getBean("cachedContentCleaner");
cleaner.setMinFileAgeMillis(0);
cleaner.setMaxDeleteWatchCount(0);
// Clear the cache from disk and memory
cache.removeAll();
FileUtils.cleanDirectory(cacheRoot);
}
@Test
public void filesNotInCacheAreDeleted() throws InterruptedException
{
cleaner.setMaxDeleteWatchCount(0);
int numFiles = 300; // Must be a multiple of number of UrlSource types being tested
long totalSize = 0; // what is the total size of the sample files?
File[] files = new File[numFiles];
for (int i = 0; i < numFiles; i++)
{
// Testing with a number of files. The cached file cleaner will be able to determine the 'original'
// content URL for each file by either retrieving from the companion properties file, or performing
// a 'reverse lookup' in the cache (i.e. cache.contains(Key.forCacheFile(...))), or there will be no
// URL determinable for the file.
UrlSource urlSource = UrlSource.values()[i % UrlSource.values().length];
File cacheFile = createCacheFile(urlSource, false);
files[i] = cacheFile;
totalSize += cacheFile.length();
}
// Run cleaner
cleaner.execute();
Thread.sleep(400);
while (cleaner.isRunning())
{
Thread.sleep(200);
}
// check all files deleted
for (File file : files)
{
assertFalse("File should have been deleted: " + file, file.exists());
}
assertEquals("Incorrect number of deleted files", numFiles, cleaner.getNumFilesDeleted());
assertEquals("Incorrect total size of files deleted", totalSize, cleaner.getSizeFilesDeleted());
}
@Test
public void filesNewerThanMinFileAgeMillisAreNotDeleted() throws InterruptedException
{
final long minFileAge = 5000;
cleaner.setMinFileAgeMillis(minFileAge);
cleaner.setMaxDeleteWatchCount(0);
int numFiles = 10;
File[] oldFiles = new File[numFiles];
for (int i = 0; i < numFiles; i++)
{
oldFiles[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, false);
}
// Sleep to make sure 'old' files really are older than minFileAgeMillis
Thread.sleep(minFileAge);
File[] newFiles = new File[numFiles];
long newFilesTotalSize = 0;
for (int i = 0; i < numFiles; i++)
{
newFiles[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, false);
newFilesTotalSize += newFiles[i].length();
}
// The cleaner must finish before any of the newFiles are older than minFileAge. If the files are too
// old the test will fail and it will be necessary to rethink how to test this.
cleaner.execute();
Thread.sleep(400);
while (cleaner.isRunning())
{
Thread.sleep(200);
}
if (cleaner.getDurationMillis() > minFileAge)
{
fail("Test unable to complete, since cleaner took " + cleaner.getDurationMillis() + "ms" +
" which is longer than minFileAge [" + minFileAge + "ms]");
}
// check all 'old' files deleted
for (File file : oldFiles)
{
assertFalse("File should have been deleted: " + file, file.exists());
}
// check all 'new' files still present
for (File file : newFiles)
{
assertTrue("File should not have been deleted: " + file, file.exists());
}
assertEquals("Incorrect number of deleted files", newFiles.length, cleaner.getNumFilesDeleted());
assertEquals("Incorrect total size of files deleted", newFilesTotalSize, cleaner.getSizeFilesDeleted());
}
@Test
public void aggressiveCleanReclaimsTargetSpace() throws InterruptedException
{
int numFiles = 30;
File[] files = new File[numFiles];
for (int i = 0; i < numFiles; i++)
{
// Make sure it's in the cache - all the files will be in the cache, so the
// cleaner won't clean any up once it has finished aggressively reclaiming space.
files[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, true);
}
// How much space to reclaim - seven files worth (all files are same size)
long fileSize = files[0].length();
long sevenFilesSize = 7 * fileSize;
// We'll get it to clean seven files worth aggressively and then it will continue non-aggressively.
// It will delete the older files aggressively (i.e. the ones prior to the two second sleep) and
// then will examine the new files for potential deletion.
// Since some of the newer files are not in the cache, it will delete those.
cleaner.executeAggressive("aggressiveCleanReclaimsTargetSpace()", sevenFilesSize);
Thread.sleep(400);
while (cleaner.isRunning())
{
Thread.sleep(200);
}
int numDeleted = 0;
for (File f : files)
{
if (!f.exists())
{
numDeleted++;
}
}
// How many were definitely deleted?
assertEquals("Wrong number of files deleted", 7 , numDeleted);
// The cleaner should have recorded the correct number of deletions
assertEquals("Incorrect number of deleted files", 7, cleaner.getNumFilesDeleted());
assertEquals("Incorrect total size of files deleted", sevenFilesSize, cleaner.getSizeFilesDeleted());
}
@Test
public void standardCleanAfterAggressiveFinished() throws InterruptedException
{
// Don't use numFiles > 59! as we're using this for the minute element in the cache file path.
final int numFiles = 30;
File[] files = new File[numFiles];
for (int i = 0; i < numFiles; i++)
{
Calendar calendar = new GregorianCalendar(2010, 11, 2, 17, i);
if (i >= 21 && i <= 24)
{
// 21 to 24 will be deleted after the aggressive deletions (once the cleaner has returned
// to normal cleaning), because they are not in the cache.
files[i] = createCacheFile(calendar, UrlSource.NOT_PRESENT, false);
}
else
{
// All other files will be in the cache
files[i] = createCacheFile(calendar, UrlSource.REVERSE_CACHE_LOOKUP, true);
}
}
// How much space to reclaim - seven files worth (all files are same size)
long fileSize = files[0].length();
long sevenFilesSize = 7 * fileSize;
// We'll get it to clean seven files worth aggressively and then it will continue non-aggressively.
// It will delete the older files aggressively (i.e. even if they are actively in the cache) and
// then will examine the new files for potential deletion.
// Since some of the newer files are not in the cache, it will delete those too.
cleaner.executeAggressive("standardCleanAfterAggressiveFinished()", sevenFilesSize);
Thread.sleep(400);
while (cleaner.isRunning())
{
Thread.sleep(200);
}
for (int i = 0; i < numFiles; i++)
{
if (i < 7)
{
assertFalse("First 7 files should have been aggressively cleaned", files[i].exists());
}
if (i >= 21 && i <= 24)
{
assertFalse("Files with indexes 21-24 should have been deleted", files[i].exists());
}
}
assertEquals("Incorrect number of deleted files", 11, cleaner.getNumFilesDeleted());
assertEquals("Incorrect total size of files deleted", (11*fileSize), cleaner.getSizeFilesDeleted());
}
@Test
public void emptyParentDirectoriesAreDeleted() throws FileNotFoundException
{
cleaner.setMaxDeleteWatchCount(0);
File file = new File(cacheRoot, "243235984/a/b/c/d.bin");
file.getParentFile().mkdirs();
PrintWriter writer = new PrintWriter(file);
writer.println("Content for emptyParentDirectoriesAreDeleted");
writer.close();
assertTrue("Directory should exist", new File(cacheRoot, "243235984/a/b/c").exists());
cleaner.handle(file);
assertFalse("Directory should have been deleted", new File(cacheRoot, "243235984").exists());
}
@Test
public void markedFilesHaveDeletionDeferredUntilCorrectPassOfCleaner()
{
// A non-advisable setting but useful for testing, maxDeleteWatchCount of zero
// which should result in immediate deletion upon discovery of content no longer in the cache.
cleaner.setMaxDeleteWatchCount(0);
File file = createCacheFile(UrlSource.NOT_PRESENT, false);
cleaner.handle(file);
checkFilesDeleted(file);
// Anticipated to be the most common setting: maxDeleteWatchCount of 1.
cleaner.setMaxDeleteWatchCount(1);
file = createCacheFile(UrlSource.NOT_PRESENT, false);
cleaner.handle(file);
checkWatchCountForCacheFile(file, 1);
cleaner.handle(file);
checkFilesDeleted(file);
// Check that some other arbitrary figure for maxDeleteWatchCount works correctly.
cleaner.setMaxDeleteWatchCount(3);
file = createCacheFile(UrlSource.NOT_PRESENT, false);
cleaner.handle(file);
checkWatchCountForCacheFile(file, 1);
cleaner.handle(file);
checkWatchCountForCacheFile(file, 2);
cleaner.handle(file);
checkWatchCountForCacheFile(file, 3);
cleaner.handle(file);
checkFilesDeleted(file);
}
private void checkFilesDeleted(File file)
{
assertFalse("File should have been deleted: " + file, file.exists());
CacheFileProps props = new CacheFileProps(file);
assertFalse("Properties file should have been deleted, cache file: " + file, props.exists());
}
private void checkWatchCountForCacheFile(File file, Integer expectedWatchCount)
{
assertTrue("File should still exist: " + file, file.exists());
CacheFileProps props = new CacheFileProps(file);
props.load();
assertEquals("File should contain correct deleteWatchCount", expectedWatchCount, props.getDeleteWatchCount());
}
@Test
public void filesInCacheAreNotDeleted() throws InterruptedException
{
cleaner.setMaxDeleteWatchCount(0);
// The SlowContentStore will always give out content when asked,
// so asking for any content will cause something to be cached.
String url = makeContentUrl();
int numFiles = 50;
for (int i = 0; i < numFiles; i++)
{
ContentReader reader = cachingStore.getReader(url);
reader.getContentString();
}
cleaner.execute();
Thread.sleep(400);
while (cleaner.isRunning())
{
Thread.sleep(200);
}
for (int i = 0; i < numFiles; i++)
{
File cacheFile = new File(cache.getCacheFilePath(url));
assertTrue("File should exist", cacheFile.exists());
}
}
private File createCacheFile(UrlSource urlSource, boolean putInCache)
{
Calendar calendar = new GregorianCalendar();
return createCacheFile(calendar, urlSource, putInCache);
}
private File createCacheFile(Calendar calendar, /*int year, int month, int day, int hour, int minute,*/
UrlSource urlSource, boolean putInCache)
{
File file = new File(cacheRoot, createNewCacheFilePath(calendar));
file.getParentFile().mkdirs();
writeSampleContent(file);
String contentUrl = makeContentUrl();
if (putInCache)
{
cache.putIntoLookup(Key.forUrl(contentUrl), file.getAbsolutePath());
}
switch(urlSource)
{
case NOT_PRESENT:
// cache won't be able to determine original content URL for the file
break;
case PROPS_FILE:
// file with content URL in properties file
CacheFileProps props = new CacheFileProps(file);
props.setContentUrl(contentUrl);
props.store();
break;
case REVERSE_CACHE_LOOKUP:
// file with content URL in reverse lookup cache - but not 'in the cache' (forward lookup).
cache.putIntoLookup(Key.forCacheFile(file), contentUrl);
}
assertTrue("File should exist", file.exists());
return file;
}
/**
* Mimick functionality of ContentCacheImpl.createNewCacheFilePath()
* but allowing a specific date (rather than 'now') to be used.
*
* @param calendar Calendar
* @return Path to use for cache file.
*/
private String createNewCacheFilePath(Calendar calendar)
{
int year = calendar.get(Calendar.YEAR);
int month = calendar.get(Calendar.MONTH) + 1; // 0-based
int day = calendar.get(Calendar.DAY_OF_MONTH);
int hour = calendar.get(Calendar.HOUR_OF_DAY);
int minute = calendar.get(Calendar.MINUTE);
// create the URL
StringBuilder sb = new StringBuilder(20);
sb.append(year).append('/')
.append(month).append('/')
.append(day).append('/')
.append(hour).append('/')
.append(minute).append('/')
.append(GUID.generate()).append(".bin");
return sb.toString();
}
private String makeContentUrl()
{
return "protocol://some/made/up/url/" + GUID.generate();
}
private void writeSampleContent(File file)
{
try
{
PrintWriter writer = new PrintWriter(file);
writer.println("Content for sample file in " + getClass().getName());
writer.close();
}
catch (Throwable e)
{
throw new RuntimeException("Couldn't write file: " + file, e);
}
}
}

Some files were not shown because too many files have changed in this diff Show More