mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-09-10 14:11:58 +00:00
Compare commits
120 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
26f0681114 | ||
|
9d3b968197 | ||
|
cf447897c5 | ||
|
cf905313e3 | ||
|
aa611b2885 | ||
|
473237460f | ||
|
c2775e3d3d | ||
|
9681d99bf4 | ||
|
616604a4ef | ||
|
e2596492d5 | ||
|
659bcf5393 | ||
|
872f7c80e9 | ||
|
b8ef25b059 | ||
|
abdeddcabf | ||
|
4aa2f30cf6 | ||
|
5984d62107 | ||
|
0035c997ca | ||
|
b9686dc9b2 | ||
|
3e16565272 | ||
|
154efed2eb | ||
|
956ce6c1a2 | ||
|
c68be19071 | ||
|
bcda52f5e2 | ||
|
d92603ef2b | ||
|
9f1abd5db0 | ||
|
c4f567e7ba | ||
|
dd03941907 | ||
|
b5ee3c06a9 | ||
|
5b6d080e8a | ||
|
e03c8a72d1 | ||
|
70e7f68a4b | ||
|
19053341b8 | ||
|
24c41b97cc | ||
|
97de175cde | ||
|
6a719bb5f6 | ||
|
b8a3724ee7 | ||
|
0077d31bed | ||
|
342bd5a103 | ||
|
d848e12981 | ||
|
9cb2b23ef5 | ||
|
7db90ee90c | ||
|
9d0106e000 | ||
|
26c991a563 | ||
|
ddfabba4ba | ||
|
ab7d757412 | ||
|
fe028f5b85 | ||
|
2baf1b9c91 | ||
|
3818f94268 | ||
|
53b41068d4 | ||
|
c302bc31ff | ||
|
9a30044064 | ||
|
41edced9f1 | ||
|
15ca9e21be | ||
|
36bf6d2f81 | ||
|
cf01f167ae | ||
|
4c94059bbf | ||
|
5efe11008d | ||
|
3225eefd0b | ||
|
b6de89aa8d | ||
|
4cc1c10ce5 | ||
|
7641c128c5 | ||
|
a62ad8715e | ||
|
542f189907 | ||
|
a006b5acaf | ||
|
c2e516b69a | ||
|
2c5044896b | ||
|
24454afe6b | ||
|
aec55ed8a6 | ||
|
ddd5a4ae48 | ||
|
e523245a10 | ||
|
c4217b32fb | ||
|
2fbd21076f | ||
|
cb1419b140 | ||
|
9a6c6f2ee9 | ||
|
eaff930456 | ||
|
4a03e8cc98 | ||
|
0eaeea35f8 | ||
|
f4c632c26b | ||
|
3c96ed9482 | ||
|
0141284b37 | ||
|
2c8ed7f4b5 | ||
|
decbe6b285 | ||
|
f0f538bad0 | ||
|
c0aaf75284 | ||
|
7f5889474e | ||
|
11c6125760 | ||
|
ba4effc6ec | ||
|
3f52aec2dc | ||
|
eb3df043be | ||
|
c5aed167f4 | ||
|
a477c19e9a | ||
|
1497362d3e | ||
|
27e2775e40 | ||
|
f2ecce0f46 | ||
|
0ad54cbf77 | ||
|
3e3cd479c2 | ||
|
b9b41a10e8 | ||
|
664d0b9704 | ||
|
1493b02d8d | ||
|
70c1a1279c | ||
|
f0638e8d7d | ||
|
983dd47c35 | ||
|
24d092cb02 | ||
|
ddb299ab03 | ||
|
19d214fcb0 | ||
|
870ff8cc64 | ||
|
aed08fe5d9 | ||
|
a3b0541560 | ||
|
2f6c5614c3 | ||
|
82d3828351 | ||
|
c986498481 | ||
|
c93d81379e | ||
|
d348e0b72d | ||
|
dc5e7405cc | ||
|
3c8bb7f154 | ||
|
bb8d42d23c | ||
|
9c1aa53819 | ||
|
885f4a49a5 | ||
|
9989ec3260 | ||
|
78ad14b696 |
@@ -25,10 +25,6 @@
|
||||
<url>https://artifacts.alfresco.com/nexus/content/groups/public</url>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
<properties>
|
||||
<!-- WhiteSource token -->
|
||||
<org.whitesource.orgToken>${env.WHITESOURCE_API_KEY}</org.whitesource.orgToken>
|
||||
</properties>
|
||||
</profile>
|
||||
</profiles>
|
||||
|
411
.github/workflows/ci.yml
vendored
Normal file
411
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,411 @@
|
||||
name: Alfresco Community Repo CI
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- release/**
|
||||
- feature/**
|
||||
- fix/**
|
||||
|
||||
push:
|
||||
branches:
|
||||
- feature/**
|
||||
- fix/**
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
TAS_SCRIPTS: ../alfresco-community-repo/packaging/tests/scripts
|
||||
TAS_ENVIRONMENT: ./packaging/tests/environment
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
GITHUB_ACTIONS_DEPLOY_TIMEOUT: 60
|
||||
MAVEN_PASSWORD: ${{ secrets.NEXUS_PASSWORD }}
|
||||
MAVEN_USERNAME: ${{ secrets.NEXUS_USERNAME }}
|
||||
QUAY_PASSWORD: ${{ secrets.QUAY_PASSWORD }}
|
||||
QUAY_USERNAME: ${{ secrets.QUAY_USERNAME }}
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
name: "Prepare"
|
||||
runs-on: ubuntu-latest
|
||||
if: >
|
||||
!contains(github.event.head_commit.message, '[skip tests]') &&
|
||||
!contains(github.event.head_commit.message, '[force]')
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
|
||||
with:
|
||||
java-version: "11"
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- name: "Prepare maven cache and check compilation"
|
||||
run: bash ./scripts/ci/prepare.sh
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
|
||||
all_unit_tests_suite:
|
||||
name: "Core, Data-Model, Repository - AllUnitTestsSuite - Build and test"
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare]
|
||||
if: >
|
||||
!contains(github.event.head_commit.message, '[skip repo]') &&
|
||||
!contains(github.event.head_commit.message, '[skip tests]') &&
|
||||
!contains(github.event.head_commit.message, '[force]')
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
|
||||
with:
|
||||
java-version: "11"
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- name: "Run tests"
|
||||
run: |
|
||||
mvn -B test -pl core,data-model -am -DfailIfNoTests=false
|
||||
mvn -B test -pl "repository" -am "-Dtest=AllUnitTestsSuite" -DfailIfNoTests=false
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
|
||||
repository_app_context_test_suites:
|
||||
name: Repository - ${{ matrix.testSuite }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare]
|
||||
if: >
|
||||
!contains(github.event.head_commit.message, '[skip repo]') &&
|
||||
!contains(github.event.head_commit.message, '[skip tests]') &&
|
||||
!contains(github.event.head_commit.message, '[force]')
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- testSuite: AppContext01TestSuite
|
||||
compose-profile: with-transform-core-aio
|
||||
- testSuite: AppContext02TestSuite
|
||||
compose-profile: default
|
||||
- testSuite: AppContext03TestSuite
|
||||
compose-profile: with-transform-core-aio
|
||||
- testSuite: AppContext04TestSuite
|
||||
compose-profile: with-transform-core-aio
|
||||
- testSuite: AppContext05TestSuite
|
||||
compose-profile: default
|
||||
mvn-options: '"-Didentity-service.auth-server-url=http://${HOST_IP}:8999/auth"'
|
||||
- testSuite: AppContext06TestSuite
|
||||
compose-profile: with-transform-core-aio
|
||||
- testSuite: AppContextExtraTestSuite
|
||||
compose-profile: with-transform-core-aio
|
||||
- testSuite: MiscContextTestSuite
|
||||
compose-profile: with-transform-core-aio
|
||||
- testSuite: SearchTestSuite
|
||||
compose-profile: default
|
||||
mvn-options: '-Dindex.subsystem.name=solr6'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
|
||||
with:
|
||||
java-version: "11"
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- name: "Set transformers tag"
|
||||
run: echo "TRANSFORMERS_TAG=$(mvn help:evaluate -Dexpression=dependency.alfresco-transform-core.version -q -DforceStdout)" >> $GITHUB_ENV
|
||||
- name: "Set up the environment"
|
||||
run: |
|
||||
if [ -e ./scripts/ci/tests/${{ matrix.testSuite }}-setup.sh ]; then
|
||||
bash ./scripts/ci/tests/${{ matrix.testSuite }}-setup.sh
|
||||
fi
|
||||
docker-compose -f ./scripts/ci/docker-compose/docker-compose.yaml --profile ${{ matrix.compose-profile }} up -d
|
||||
- name: "Run tests"
|
||||
run: mvn -B test -pl repository -am -Dtest=${{ matrix.testSuite }} -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco ${{ matrix.mvn-options }}
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
|
||||
repository_mariadb_tests:
|
||||
name: Repository - MariaDB ${{ matrix.version }} tests
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare]
|
||||
if: >
|
||||
(((github.ref_name == 'master' || startsWith(github.ref_name, 'release/')) &&
|
||||
github.event_name != 'pull_request' &&
|
||||
!contains(github.event.head_commit.message, '[skip db]')) ||
|
||||
contains(github.event.head_commit.message, '[db]')) &&
|
||||
!contains(github.event.head_commit.message, '[skip tests]') &&
|
||||
!contains(github.event.head_commit.message, '[force]')
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version: ['10.2.18', '10.4']
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
|
||||
with:
|
||||
java-version: "11"
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- name: Run MariaDB ${{ matrix.version }} database
|
||||
run: docker-compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile mariadb up -d
|
||||
env:
|
||||
MARIADB_VERSION: ${{ matrix.version }}
|
||||
- name: "Run tests"
|
||||
run: mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
|
||||
repository_mariadb_10_5_tests:
|
||||
name: Repository - MariaDB 10.5 tests
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare]
|
||||
if: >
|
||||
(((github.ref_name == 'master' || startsWith(github.ref_name, 'release/') || github.event_name == 'pull_request') &&
|
||||
!contains(github.event.head_commit.message, '[skip db]')) ||
|
||||
contains(github.event.head_commit.message, '[latest db]') ||
|
||||
contains(github.event.head_commit.message, '[db]')) &&
|
||||
!contains(github.event.head_commit.message, '[skip tests]') &&
|
||||
!contains(github.event.head_commit.message, '[force]')
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
|
||||
with:
|
||||
java-version: "11"
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- name: Run MariaDB 10.5 database
|
||||
run: docker-compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile mariadb up -d
|
||||
env:
|
||||
MARIADB_VERSION: 10.5
|
||||
- name: "Run tests"
|
||||
run: mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
|
||||
repository_mysql_5_7_28_tests:
|
||||
name: Repository - MySQL ${{ matrix.version }} tests
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare]
|
||||
if: >
|
||||
(((github.ref_name == 'master' || startsWith(github.ref_name, 'release/')) &&
|
||||
github.event_name != 'pull_request' &&
|
||||
!contains(github.event.head_commit.message, '[skip db]')) ||
|
||||
contains(github.event.head_commit.message, '[db]')) &&
|
||||
!contains(github.event.head_commit.message, '[skip tests]') &&
|
||||
!contains(github.event.head_commit.message, '[force]')
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
|
||||
with:
|
||||
java-version: "11"
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- name: "Run MySQL 5.7.28 database"
|
||||
run: docker-compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile mysql up -d
|
||||
env:
|
||||
MYSQL_VERSION: 5.7.28
|
||||
- name: "Run tests"
|
||||
run: mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=com.mysql.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
|
||||
repository_mysql_8_tests:
|
||||
name: Repository - MySQL 8 tests
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare]
|
||||
if: >
|
||||
(((github.ref_name == 'master' || startsWith(github.ref_name, 'release/') || github.event_name == 'pull_request') &&
|
||||
!contains(github.event.head_commit.message, '[skip db]')) ||
|
||||
contains(github.event.head_commit.message, '[latest db]') ||
|
||||
contains(github.event.head_commit.message, '[db]')) &&
|
||||
!contains(github.event.head_commit.message, '[skip tests]') &&
|
||||
!contains(github.event.head_commit.message, '[force]')
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
|
||||
with:
|
||||
java-version: "11"
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- name: "Run MySQL 8 database"
|
||||
run: docker-compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile mysql up -d
|
||||
env:
|
||||
MYSQL_VERSION: 8
|
||||
- name: "Run tests"
|
||||
run: mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=com.mysql.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
|
||||
repository_postgresql_tests:
|
||||
name: "Repository - PostgreSQL ${{ matrix.version }} tests"
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare]
|
||||
if: >
|
||||
(((github.ref_name == 'master' || startsWith(github.ref_name, 'release/')) &&
|
||||
github.event_name != 'pull_request' &&
|
||||
!contains(github.event.head_commit.message, '[skip db]')) ||
|
||||
contains(github.event.head_commit.message, '[db]')) &&
|
||||
!contains(github.event.head_commit.message, '[skip tests]') &&
|
||||
!contains(github.event.head_commit.message, '[force]')
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version: ['10.9', '11.7', '12.4']
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
|
||||
with:
|
||||
java-version: "11"
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- name: "Run PostgreSQL ${{ matrix.version }} database"
|
||||
run: docker-compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile postgres up -d
|
||||
env:
|
||||
POSTGRES_VERSION: ${{ matrix.version }}
|
||||
- name: "Run tests"
|
||||
run: mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
|
||||
repository_postgresql_13_1_tests:
|
||||
name: "Repository - PostgreSQL 13.1 tests"
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare]
|
||||
if: >
|
||||
(((github.ref_name == 'master' || startsWith(github.ref_name, 'release/') || github.event_name == 'pull_request') &&
|
||||
!contains(github.event.head_commit.message, '[skip db]')) ||
|
||||
contains(github.event.head_commit.message, '[latest db]') ||
|
||||
contains(github.event.head_commit.message, '[db]')) &&
|
||||
!contains(github.event.head_commit.message, '[skip tests]') &&
|
||||
!contains(github.event.head_commit.message, '[force]')
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
|
||||
with:
|
||||
java-version: "11"
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- name: "Run PostgreSQL 13.1 database"
|
||||
run: docker-compose -f ./scripts/ci/docker-compose/docker-compose-db.yaml --profile postgres up -d
|
||||
env:
|
||||
POSTGRES_VERSION: 13.1
|
||||
- name: "Run tests"
|
||||
run: mvn -B test -pl repository -am -Dtest=AllDBTestsTestSuite -DfailIfNoTests=false -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
|
||||
remote_api_app_context_test_suites:
|
||||
name: Remote-api - ${{ matrix.testSuite }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare]
|
||||
if: >
|
||||
!contains(github.event.head_commit.message, '[skip repo]') &&
|
||||
!contains(github.event.head_commit.message, '[skip tests]') &&
|
||||
!contains(github.event.head_commit.message, '[force]')
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- testSuite: AppContext01TestSuite
|
||||
compose-profile: default
|
||||
- testSuite: AppContext02TestSuite
|
||||
compose-profile: with-transform-core-aio
|
||||
- testSuite: AppContext03TestSuite
|
||||
compose-profile: with-transform-core-aio
|
||||
- testSuite: AppContext04TestSuite
|
||||
compose-profile: with-transform-core-aio
|
||||
- testSuite: AppContextExtraTestSuite
|
||||
compose-profile: default
|
||||
env:
|
||||
REQUIRES_INSTALLED_ARTIFACTS: true
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
|
||||
with:
|
||||
java-version: "11"
|
||||
- name: "Build"
|
||||
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
|
||||
run: |
|
||||
bash ./scripts/ci/init.sh
|
||||
bash ./scripts/ci/build.sh
|
||||
- name: "Set transformers tag"
|
||||
run: echo "TRANSFORMERS_TAG=$(mvn help:evaluate -Dexpression=dependency.alfresco-transform-core.version -q -DforceStdout)" >> $GITHUB_ENV
|
||||
- name: "Set up the environment"
|
||||
run: docker-compose -f ./scripts/ci/docker-compose/docker-compose.yaml --profile ${{ matrix.compose-profile }} up -d
|
||||
- name: "Run tests"
|
||||
run: mvn -B test -pl remote-api -Dtest=${{ matrix.testSuite }} -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
|
||||
tas_tests:
|
||||
name: ${{ matrix.test-name }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare]
|
||||
if: >
|
||||
(((github.ref_name == 'master' || startsWith(github.ref_name, 'release/') || github.event_name == 'pull_request' ) &&
|
||||
!contains(github.event.head_commit.message, '[skip tas]')) ||
|
||||
contains(github.event.head_commit.message, '[tas]')) &&
|
||||
!contains(github.event.head_commit.message, '[skip tests]') &&
|
||||
!contains(github.event.head_commit.message, '[force]')
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- test-name: "REST API TAS tests part1"
|
||||
pom-dir: tas-restapi
|
||||
test-profile: run-restapi-part1
|
||||
- test-name: "REST API TAS tests part2"
|
||||
pom-dir: tas-restapi
|
||||
test-profile: run-restapi-part2
|
||||
- test-name: "REST API TAS tests part3"
|
||||
pom-dir: tas-restapi
|
||||
test-profile: run-restapi-part3
|
||||
- test-name: "CMIS TAS tests - BROWSER binding"
|
||||
pom-dir: tas-cmis
|
||||
test-profile: run-cmis-browser
|
||||
- test-name: "CMIS TAS tests - ATOM binding"
|
||||
pom-dir: tas-cmis
|
||||
test-profile: run-cmis-atom
|
||||
- test-name: "CMIS TAS tests - WEBSERVICES binding"
|
||||
pom-dir: tas-cmis
|
||||
test-profile: run-cmis-webservices
|
||||
- test-name: "Email TAS tests"
|
||||
pom-dir: tas-email
|
||||
- test-name: "WebDAV TAS tests"
|
||||
pom-dir: tas-webdav
|
||||
- test-name: "Integration TAS tests"
|
||||
pom-dir: tas-integration
|
||||
env:
|
||||
REQUIRES_LOCAL_IMAGES: true
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
|
||||
with:
|
||||
java-version: "11"
|
||||
- name: "Build"
|
||||
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
|
||||
run: |
|
||||
bash ./scripts/ci/init.sh
|
||||
bash ./scripts/ci/build.sh
|
||||
- name: "Set up the environment"
|
||||
run: |
|
||||
${{ env.TAS_SCRIPTS }}/start-compose.sh ${{ env.TAS_ENVIRONMENT }}/docker-compose-minimal+transforms.yml
|
||||
${{ env.TAS_SCRIPTS }}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
- name: "Run tests"
|
||||
id: tests
|
||||
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
|
||||
run: mvn -B verify -f packaging/tests/${{ matrix.pom-dir }}/pom.xml -Pall-tas-tests,${{ matrix.test-profile }} -Denvironment=default -DrunBugs=false
|
||||
- name: "Print output after success"
|
||||
if: ${{ always() && steps.tests.outcome == 'success' }}
|
||||
run: ${TAS_SCRIPTS}/output_tests_run.sh "packaging/tests/${{ matrix.pom-dir }}"
|
||||
- name: "Print output after failure"
|
||||
if: ${{ always() && steps.tests.outcome == 'failure' }}
|
||||
run: ${TAS_SCRIPTS}/output_logs_for_failures.sh "packaging/tests/${{ matrix.pom-dir }}"
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
79
.github/workflows/master_release.yml
vendored
Normal file
79
.github/workflows/master_release.yml
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
name: Master/Release branch workflow
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- release/**
|
||||
|
||||
env:
|
||||
GIT_USERNAME: ${{ secrets.BOT_GITHUB_USERNAME }}
|
||||
GIT_EMAIL: ${{ secrets.BOT_GITHUB_EMAIL }}
|
||||
GIT_PASSWORD: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
MAVEN_PASSWORD: ${{ secrets.NEXUS_PASSWORD }}
|
||||
MAVEN_USERNAME: ${{ secrets.NEXUS_USERNAME }}
|
||||
QUAY_PASSWORD: ${{ secrets.QUAY_PASSWORD }}
|
||||
QUAY_USERNAME: ${{ secrets.QUAY_USERNAME }}
|
||||
GITHUB_ACTIONS_DEPLOY_TIMEOUT: 60
|
||||
|
||||
jobs:
|
||||
run_ci:
|
||||
uses: ./.github/workflows/ci.yml
|
||||
secrets: inherit
|
||||
push_to_nexus:
|
||||
name: "Push to Nexus"
|
||||
runs-on: ubuntu-latest
|
||||
needs: [run_ci]
|
||||
if: >
|
||||
!(failure() || cancelled()) &&
|
||||
!contains(github.event.head_commit.message, '[no release]') &&
|
||||
github.event_name != 'pull_request'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v1.33.0
|
||||
with:
|
||||
username: ${{ env.GIT_USERNAME }}
|
||||
email: ${{ env.GIT_EMAIL }}
|
||||
global: true
|
||||
- name: "Release"
|
||||
timeout-minutes: ${{ fromJSON(env.GITHUB_ACTIONS_DEPLOY_TIMEOUT) }}
|
||||
run: |
|
||||
bash scripts/ci/verify_release_tag.sh
|
||||
bash scripts/ci/maven_release.sh
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
||||
update_downstream:
|
||||
name: "Update alfresco-enterprise-repo"
|
||||
runs-on: ubuntu-latest
|
||||
needs: [push_to_nexus]
|
||||
if: >
|
||||
!(failure() || cancelled()) &&
|
||||
!contains(github.event.head_commit.message, '[no downstream]') &&
|
||||
github.event_name != 'pull_request'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v1.33.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v1.33.0
|
||||
- name: "Init"
|
||||
run: bash ./scripts/ci/init.sh
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/configure-git-author@v1.33.0
|
||||
with:
|
||||
username: ${{ env.GIT_USERNAME }}
|
||||
email: ${{ env.GIT_EMAIL }}
|
||||
global: true
|
||||
- name: "Update downstream"
|
||||
run: bash ./scripts/ci/update_downstream.sh
|
||||
env:
|
||||
COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
|
||||
- name: "Clean Maven cache"
|
||||
run: bash ./scripts/ci/cleanup_cache.sh
|
300
.travis.yml
300
.travis.yml
@@ -1,300 +0,0 @@
|
||||
---
|
||||
dist: focal
|
||||
language: java
|
||||
jdk: openjdk11
|
||||
|
||||
services:
|
||||
- docker
|
||||
|
||||
git:
|
||||
depth: false
|
||||
quiet: true
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- ${HOME}/.m2/repository
|
||||
|
||||
# the cache can grow constantly
|
||||
before_cache: bash scripts/travis/cleanup_cache.sh
|
||||
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- /release\/.*/
|
||||
- /feature\/.*/
|
||||
- /fix\/.*/
|
||||
|
||||
env:
|
||||
global:
|
||||
- TAS_SCRIPTS=../alfresco-community-repo/packaging/tests/scripts
|
||||
- TAS_ENVIRONMENT=./packaging/tests/environment
|
||||
|
||||
stages:
|
||||
- name: test
|
||||
if: commit_message !~ /\[skip tests\]/
|
||||
- name: release
|
||||
if: fork = false AND (branch = master OR branch =~ /release\/.*/) AND type != pull_request AND commit_message !~ /\[no release\]/
|
||||
- name: update_downstream
|
||||
if: fork = false AND (branch = master OR branch =~ /release\/.*/) AND type != pull_request AND commit_message !~ /\[no downstream\]/
|
||||
- name: trigger_downstream
|
||||
if: fork = false AND (branch != master AND branch !~ /release\/.*/) AND type != pull_request AND commit_message !~ /\[no downstream\]/
|
||||
|
||||
before_install: travis_retry bash scripts/travis/init.sh
|
||||
install: travis_retry travis_wait 40 bash scripts/travis/build.sh
|
||||
|
||||
jobs:
|
||||
include:
|
||||
|
||||
- name: "Core, Data-Model, Repository - AllUnitTestsSuite - Build and test"
|
||||
script:
|
||||
- travis_retry mvn -B test -pl core,data-model
|
||||
- travis_retry mvn -B test -pl repository -Dtest=AllUnitTestsSuite
|
||||
|
||||
- name: "Repository - AppContext01TestSuite"
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.10
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext01TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - AppContext02TestSuite"
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext02TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - AppContext03TestSuite"
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.10
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext03TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - AppContext04TestSuite"
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.10
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext04TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - AppContext05TestSuite"
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- mkdir -p "${HOME}/tmp"
|
||||
- cp repository/src/test/resources/realms/alfresco-realm.json "${HOME}/tmp"
|
||||
- export HOST_IP=$(hostname -I | cut -f1 -d' ')
|
||||
- docker run -d -e KEYCLOAK_USER=admin -e KEYCLOAK_PASSWORD=admin -e DB_VENDOR=h2 -p 8999:8080 -e KEYCLOAK_IMPORT=/tmp/alfresco-realm.json -v $HOME/tmp/alfresco-realm.json:/tmp/alfresco-realm.json alfresco/alfresco-identity-service:1.2
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext05TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco "-Didentity-service.auth-server-url=http://${HOST_IP}:8999/auth"
|
||||
|
||||
- name: "Repository - AppContext06TestSuite"
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.10
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext06TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - AppContextExtraTestSuite"
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.10
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContextExtraTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - MiscContextTestSuite"
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.10
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=MiscContextTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - SearchTestSuite"
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=SearchTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco -Dindex.subsystem.name=solr6
|
||||
|
||||
- name: "Repository - MariaDB 10.2.18 tests"
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 3307:3306 --name mariadb -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mariadb:10.2.18 --transaction-isolation=READ-COMMITTED --max-connections=300 --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
|
||||
|
||||
- name: "Repository - MariaDB 10.4 tests"
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 3307:3306 --name mariadb -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mariadb:10.4 --transaction-isolation=READ-COMMITTED --max-connections=300 --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
|
||||
|
||||
- name: "Repository - MariaDB 10.5 tests"
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 3307:3306 --name mariadb -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mariadb:10.5 --transaction-isolation=READ-COMMITTED --max-connections=300 --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
|
||||
|
||||
- name: "Repository - MySQL 5.7.23 tests"
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 3307:3306 -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mysql:5.7.23 --transaction-isolation='READ-COMMITTED'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=com.mysql.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - MySQL 8 tests"
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 3307:3306 -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mysql:8 --transaction-isolation='READ-COMMITTED'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=com.mysql.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - PostgreSQL 10.9 tests"
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:10.9 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - PostgreSQL 11.7 tests"
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - PostgreSQL 12.4 tests"
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:12.4 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - PostgreSQL 13.1 tests"
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Remote-api - AppContext01TestSuite"
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext01TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Remote-api - AppContext02TestSuite"
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.10
|
||||
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext02TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Remote-api - AppContext03TestSuite"
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.10
|
||||
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext03TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Remote-api - AppContext04TestSuite"
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.10
|
||||
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext04TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Remote-api - AppContextExtraTestSuite"
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContextExtraTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "REST API TAS tests part1"
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
script:
|
||||
- travis_wait 60 mvn -B install -f packaging/tests/tas-restapi/pom.xml -Pall-tas-tests,run-restapi-part1 -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "REST API TAS tests part2"
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
script:
|
||||
- travis_wait 60 mvn -B install -f packaging/tests/tas-restapi/pom.xml -Pall-tas-tests,run-restapi-part2 -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "REST API TAS tests part3"
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
script:
|
||||
- travis_wait 60 mvn -B install -f packaging/tests/tas-restapi/pom.xml -Pall-tas-tests,run-restapi-part3 -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "CMIS TAS tests - BROWSER binding"
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
script:
|
||||
- travis_wait 40 mvn -B install -f packaging/tests/tas-cmis/pom.xml -Pall-tas-tests,run-cmis-browser -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "CMIS TAS tests - ATOM binding"
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
script:
|
||||
- travis_wait 40 mvn -B install -f packaging/tests/tas-cmis/pom.xml -Pall-tas-tests,run-cmis-atom -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "CMIS TAS tests - WEBSERVICES binding"
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
script:
|
||||
- travis_wait 40 mvn -B install -f packaging/tests/tas-cmis/pom.xml -Pall-tas-tests,run-cmis-webservices -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "Email TAS tests"
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
script:
|
||||
- travis_wait 30 mvn -B install -f packaging/tests/tas-email/pom.xml -Pall-tas-tests -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "WebDAV TAS tests"
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
script:
|
||||
- travis_wait 30 mvn -B install -f packaging/tests/tas-webdav/pom.xml -Pall-tas-tests -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "Integration TAS tests"
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
script:
|
||||
- travis_wait 30 mvn -B install -f packaging/tests/tas-integration/pom.xml -Pall-tas-tests -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "Push to Nexus"
|
||||
stage: release
|
||||
before_script: bash scripts/travis/verify_release_tag.sh
|
||||
script: travis_wait 40 bash scripts/travis/maven_release.sh
|
||||
|
||||
- name: "Update alfresco-enterprise-repo"
|
||||
stage: update_downstream
|
||||
install: skip
|
||||
script: bash scripts/travis/update_downstream.sh
|
||||
|
||||
- name: "Trigger alfresco-enterprise-repo build"
|
||||
stage: trigger_downstream
|
||||
before_install: skip
|
||||
install: skip
|
||||
script: bash scripts/travis/trigger_travis.sh "Alfresco" "alfresco-enterprise-repo" "${TRAVIS_BRANCH}"
|
||||
|
@@ -1,6 +1,6 @@
|
||||
# alfresco-community-repo
|
||||
|
||||
[](https://travis-ci.com/Alfresco/alfresco-community-repo)
|
||||
[](https://github.com/Alfresco/alfresco-community-repo/actions/workflows/master_release.yml)
|
||||
|
||||
This project contains the bulk of the [Alfresco Content Services Repository](https://community.alfresco.com/docs/DOC-6385-project-overview-repository) code.
|
||||
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>8.424-SNAPSHOT</version>
|
||||
<version>9.41</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@@ -21,7 +21,6 @@ package org.alfresco.httpclient;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.security.AlgorithmParameters;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
@@ -32,14 +31,11 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||
import org.alfresco.encryption.AlfrescoKeyStore;
|
||||
import org.alfresco.encryption.AlfrescoKeyStoreImpl;
|
||||
import org.alfresco.encryption.EncryptionUtils;
|
||||
import org.alfresco.encryption.Encryptor;
|
||||
import org.alfresco.encryption.KeyProvider;
|
||||
import org.alfresco.encryption.KeyResourceLoader;
|
||||
import org.alfresco.encryption.KeyStoreParameters;
|
||||
import org.alfresco.encryption.ssl.AuthSSLProtocolSocketFactory;
|
||||
import org.alfresco.encryption.ssl.SSLEncryptionParameters;
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.apache.commons.httpclient.DefaultHttpMethodRetryHandler;
|
||||
import org.apache.commons.httpclient.HostConfiguration;
|
||||
import org.apache.commons.httpclient.HttpClient;
|
||||
@@ -53,8 +49,6 @@ import org.apache.commons.httpclient.SimpleHttpConnectionManager;
|
||||
import org.apache.commons.httpclient.URI;
|
||||
import org.apache.commons.httpclient.URIException;
|
||||
import org.apache.commons.httpclient.cookie.CookiePolicy;
|
||||
import org.apache.commons.httpclient.methods.ByteArrayRequestEntity;
|
||||
import org.apache.commons.httpclient.methods.PostMethod;
|
||||
import org.apache.commons.httpclient.params.DefaultHttpParams;
|
||||
import org.apache.commons.httpclient.params.DefaultHttpParamsFactory;
|
||||
import org.apache.commons.httpclient.params.HttpClientParams;
|
||||
@@ -75,23 +69,25 @@ import org.apache.commons.logging.LogFactory;
|
||||
*/
|
||||
public class HttpClientFactory
|
||||
{
|
||||
/**
|
||||
* Communication type for HttpClient:
|
||||
* - NONE is plain http
|
||||
* - SECRET is plain http with a shared secret via request header
|
||||
* - HTTPS is mTLS with client authentication (certificates are required)
|
||||
*/
|
||||
public static enum SecureCommsType
|
||||
{
|
||||
HTTPS, NONE;
|
||||
HTTPS, NONE, SECRET;
|
||||
|
||||
public static SecureCommsType getType(String type)
|
||||
{
|
||||
if(type.equalsIgnoreCase("https"))
|
||||
switch (type.toLowerCase())
|
||||
{
|
||||
return HTTPS;
|
||||
}
|
||||
else if(type.equalsIgnoreCase("none"))
|
||||
{
|
||||
return NONE;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new IllegalArgumentException("Invalid communications type");
|
||||
case "https": return HTTPS;
|
||||
case "none": return NONE;
|
||||
case "secret": return SECRET;
|
||||
default: throw new IllegalArgumentException("Invalid communications type");
|
||||
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -122,14 +118,24 @@ public class HttpClientFactory
|
||||
|
||||
private int connectionTimeout = 0;
|
||||
|
||||
// Shared secret parameters
|
||||
private String sharedSecret;
|
||||
private String sharedSecretHeader = DEFAULT_SHAREDSECRET_HEADER;
|
||||
|
||||
// Default name for HTTP Request Header when using shared secret communication
|
||||
public static final String DEFAULT_SHAREDSECRET_HEADER = "X-Alfresco-Search-Secret";
|
||||
|
||||
public HttpClientFactory()
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Default constructor for legacy subsystems.
|
||||
*/
|
||||
public HttpClientFactory(SecureCommsType secureCommsType, SSLEncryptionParameters sslEncryptionParameters,
|
||||
KeyResourceLoader keyResourceLoader, KeyStoreParameters keyStoreParameters,
|
||||
MD5EncryptionParameters encryptionParameters, String host, int port, int sslPort, int maxTotalConnections,
|
||||
int maxHostConnections, int socketTimeout)
|
||||
KeyResourceLoader keyResourceLoader, KeyStoreParameters keyStoreParameters,
|
||||
MD5EncryptionParameters encryptionParameters, String host, int port, int sslPort,
|
||||
int maxTotalConnections, int maxHostConnections, int socketTimeout)
|
||||
{
|
||||
this.secureCommsType = secureCommsType;
|
||||
this.sslEncryptionParameters = sslEncryptionParameters;
|
||||
@@ -145,6 +151,21 @@ public class HttpClientFactory
|
||||
init();
|
||||
}
|
||||
|
||||
/**
|
||||
* Recommended constructor for subsystems supporting Shared Secret communication.
|
||||
* This constructor supports Shared Secret ("secret") communication method additionally to the legacy ones: "none" and "https".
|
||||
*/
|
||||
public HttpClientFactory(SecureCommsType secureCommsType, SSLEncryptionParameters sslEncryptionParameters,
|
||||
KeyResourceLoader keyResourceLoader, KeyStoreParameters keyStoreParameters,
|
||||
MD5EncryptionParameters encryptionParameters, String sharedSecret, String sharedSecretHeader,
|
||||
String host, int port, int sslPort, int maxTotalConnections, int maxHostConnections, int socketTimeout)
|
||||
{
|
||||
this(secureCommsType, sslEncryptionParameters, keyResourceLoader, keyStoreParameters, encryptionParameters,
|
||||
host, port, sslPort, maxTotalConnections, maxHostConnections, socketTimeout);
|
||||
this.sharedSecret = sharedSecret;
|
||||
this.sharedSecretHeader = sharedSecretHeader;
|
||||
}
|
||||
|
||||
public void init()
|
||||
{
|
||||
this.sslKeyStore = new AlfrescoKeyStoreImpl(sslEncryptionParameters.getKeyStoreParameters(), keyResourceLoader);
|
||||
@@ -272,10 +293,44 @@ public class HttpClientFactory
|
||||
this.connectionTimeout = connectionTimeout;
|
||||
}
|
||||
|
||||
protected HttpClient constructHttpClient()
|
||||
/**
|
||||
* Shared secret used for SECRET communication
|
||||
* @param secret shared secret word
|
||||
*/
|
||||
public void setSharedSecret(String sharedSecret)
|
||||
{
|
||||
this.sharedSecret = sharedSecret;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Shared secret used for SECRET communication
|
||||
*/
|
||||
public String getSharedSecret()
|
||||
{
|
||||
return sharedSecret;
|
||||
}
|
||||
|
||||
/**
|
||||
* HTTP Request header used for SECRET communication
|
||||
* @param sharedSecretHeader HTTP Request header
|
||||
*/
|
||||
public void setSharedSecretHeader(String sharedSecretHeader)
|
||||
{
|
||||
this.sharedSecretHeader = sharedSecretHeader;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return HTTP Request header used for SECRET communication
|
||||
*/
|
||||
public String getSharedSecretHeader()
|
||||
{
|
||||
return sharedSecretHeader;
|
||||
}
|
||||
|
||||
protected RequestHeadersHttpClient constructHttpClient()
|
||||
{
|
||||
MultiThreadedHttpConnectionManager connectionManager = new MultiThreadedHttpConnectionManager();
|
||||
HttpClient httpClient = new HttpClient(connectionManager);
|
||||
RequestHeadersHttpClient httpClient = new RequestHeadersHttpClient(connectionManager);
|
||||
HttpClientParams params = httpClient.getParams();
|
||||
params.setBooleanParameter(HttpConnectionParams.TCP_NODELAY, true);
|
||||
params.setBooleanParameter(HttpConnectionParams.STALE_CONNECTION_CHECK, true);
|
||||
@@ -291,15 +346,15 @@ public class HttpClientFactory
|
||||
return httpClient;
|
||||
}
|
||||
|
||||
protected HttpClient getHttpsClient()
|
||||
protected RequestHeadersHttpClient getHttpsClient()
|
||||
{
|
||||
return getHttpsClient(host, sslPort);
|
||||
}
|
||||
|
||||
protected HttpClient getHttpsClient(String httpsHost, int httpsPort)
|
||||
protected RequestHeadersHttpClient getHttpsClient(String httpsHost, int httpsPort)
|
||||
{
|
||||
// Configure a custom SSL socket factory that will enforce mutual authentication
|
||||
HttpClient httpClient = constructHttpClient();
|
||||
RequestHeadersHttpClient httpClient = constructHttpClient();
|
||||
// Default port is 443 for the HostFactory, when including customised port (like 8983) the port name is skipped from "getHostURL" string
|
||||
HttpHostFactory hostFactory = new HttpHostFactory(new Protocol("https", sslSocketFactory, HttpsURL.DEFAULT_PORT));
|
||||
httpClient.setHostConfiguration(new HostConfigurationWithHostFactory(hostFactory));
|
||||
@@ -307,28 +362,54 @@ public class HttpClientFactory
|
||||
return httpClient;
|
||||
}
|
||||
|
||||
protected HttpClient getDefaultHttpClient()
|
||||
protected RequestHeadersHttpClient getDefaultHttpClient()
|
||||
{
|
||||
return getDefaultHttpClient(host, port);
|
||||
}
|
||||
|
||||
protected HttpClient getDefaultHttpClient(String httpHost, int httpPort)
|
||||
protected RequestHeadersHttpClient getDefaultHttpClient(String httpHost, int httpPort)
|
||||
{
|
||||
HttpClient httpClient = constructHttpClient();
|
||||
RequestHeadersHttpClient httpClient = constructHttpClient();
|
||||
httpClient.getHostConfiguration().setHost(httpHost, httpPort);
|
||||
return httpClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build HTTP Client using default headers
|
||||
* @return RequestHeadersHttpClient including default header for shared secret method
|
||||
*/
|
||||
protected RequestHeadersHttpClient constructSharedSecretHttpClient()
|
||||
{
|
||||
RequestHeadersHttpClient client = constructHttpClient();
|
||||
client.setDefaultHeaders(Map.of(sharedSecretHeader, sharedSecret));
|
||||
return client;
|
||||
}
|
||||
|
||||
protected RequestHeadersHttpClient getSharedSecretHttpClient()
|
||||
{
|
||||
return getSharedSecretHttpClient(host, port);
|
||||
}
|
||||
|
||||
protected RequestHeadersHttpClient getSharedSecretHttpClient(String httpHost, int httpPort)
|
||||
{
|
||||
RequestHeadersHttpClient httpClient = constructSharedSecretHttpClient();
|
||||
httpClient.getHostConfiguration().setHost(httpHost, httpPort);
|
||||
return httpClient;
|
||||
}
|
||||
|
||||
protected AlfrescoHttpClient getAlfrescoHttpsClient()
|
||||
{
|
||||
AlfrescoHttpClient repoClient = new HttpsClient(getHttpsClient());
|
||||
return repoClient;
|
||||
return new HttpsClient(getHttpsClient());
|
||||
}
|
||||
|
||||
protected AlfrescoHttpClient getAlfrescoHttpClient()
|
||||
{
|
||||
AlfrescoHttpClient repoClient = new DefaultHttpClient(getDefaultHttpClient());
|
||||
return repoClient;
|
||||
return new DefaultHttpClient(getDefaultHttpClient());
|
||||
}
|
||||
|
||||
protected AlfrescoHttpClient getAlfrescoSharedSecretClient()
|
||||
{
|
||||
return new DefaultHttpClient(getSharedSecretHttpClient());
|
||||
}
|
||||
|
||||
protected HttpClient getMD5HttpClient(String host, int port)
|
||||
@@ -341,66 +422,37 @@ public class HttpClientFactory
|
||||
|
||||
public AlfrescoHttpClient getRepoClient(String host, int port)
|
||||
{
|
||||
AlfrescoHttpClient repoClient = null;
|
||||
|
||||
if(secureCommsType == SecureCommsType.HTTPS)
|
||||
switch (secureCommsType)
|
||||
{
|
||||
repoClient = getAlfrescoHttpsClient();
|
||||
case HTTPS: return getAlfrescoHttpsClient();
|
||||
case NONE: return getAlfrescoHttpClient();
|
||||
case SECRET: return getAlfrescoSharedSecretClient();
|
||||
default: throw new AlfrescoRuntimeException("Invalid Solr secure communications type configured in [solr|alfresco].secureComms, should be 'ssl', 'none' or 'secret'");
|
||||
}
|
||||
else if(secureCommsType == SecureCommsType.NONE)
|
||||
}
|
||||
|
||||
public RequestHeadersHttpClient getHttpClient()
|
||||
{
|
||||
switch (secureCommsType)
|
||||
{
|
||||
repoClient = getAlfrescoHttpClient();
|
||||
case HTTPS: return getHttpsClient();
|
||||
case NONE: return getDefaultHttpClient();
|
||||
case SECRET: return getSharedSecretHttpClient();
|
||||
default: throw new AlfrescoRuntimeException("Invalid Solr secure communications type configured in [solr|alfresco].secureComms, should be 'ssl', 'none' or 'secret'");
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Invalid Solr secure communications type configured in alfresco.secureComms, should be 'ssl'or 'none'");
|
||||
}
|
||||
|
||||
return repoClient;
|
||||
}
|
||||
|
||||
public HttpClient getHttpClient()
|
||||
public RequestHeadersHttpClient getHttpClient(String host, int port)
|
||||
{
|
||||
HttpClient httpClient = null;
|
||||
|
||||
if(secureCommsType == SecureCommsType.HTTPS)
|
||||
switch (secureCommsType)
|
||||
{
|
||||
httpClient = getHttpsClient();
|
||||
case HTTPS: return getHttpsClient(host, port);
|
||||
case NONE: return getDefaultHttpClient(host, port);
|
||||
case SECRET: return getSharedSecretHttpClient(host, port);
|
||||
default: throw new AlfrescoRuntimeException("Invalid Solr secure communications type configured in [solr|alfresco].secureComms, should be 'ssl', 'none' or 'secret'");
|
||||
}
|
||||
else if(secureCommsType == SecureCommsType.NONE)
|
||||
{
|
||||
httpClient = getDefaultHttpClient();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Invalid Solr secure communications type configured in alfresco.secureComms, should be 'ssl'or 'none'");
|
||||
}
|
||||
|
||||
return httpClient;
|
||||
}
|
||||
|
||||
public HttpClient getHttpClient(String host, int port)
|
||||
{
|
||||
HttpClient httpClient = null;
|
||||
|
||||
if(secureCommsType == SecureCommsType.HTTPS)
|
||||
{
|
||||
httpClient = getHttpsClient(host, port);
|
||||
}
|
||||
else if(secureCommsType == SecureCommsType.NONE)
|
||||
{
|
||||
httpClient = getDefaultHttpClient(host, port);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Invalid Solr secure communications type configured in alfresco.secureComms, should be 'ssl'or 'none'");
|
||||
}
|
||||
|
||||
return httpClient;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* A secure client connection to the repository.
|
||||
*
|
||||
|
@@ -0,0 +1,87 @@
|
||||
/*
|
||||
* Copyright (C) 2005-2021 Alfresco Software Limited.
|
||||
*
|
||||
* This file is part of Alfresco
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
package org.alfresco.httpclient;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.httpclient.HostConfiguration;
|
||||
import org.apache.commons.httpclient.HttpClient;
|
||||
import org.apache.commons.httpclient.HttpException;
|
||||
import org.apache.commons.httpclient.HttpMethod;
|
||||
import org.apache.commons.httpclient.HttpState;
|
||||
import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager;
|
||||
|
||||
/**
|
||||
* Since Apache HttpClient 3.1 doesn't support including custom headers by default,
|
||||
* this class is adding that custom headers every time a method is invoked.
|
||||
*/
|
||||
public class RequestHeadersHttpClient extends HttpClient
|
||||
{
|
||||
|
||||
private Map<String, String> defaultHeaders;
|
||||
|
||||
public RequestHeadersHttpClient(MultiThreadedHttpConnectionManager connectionManager)
|
||||
{
|
||||
super(connectionManager);
|
||||
}
|
||||
|
||||
public Map<String, String> getDefaultHeaders()
|
||||
{
|
||||
return defaultHeaders;
|
||||
}
|
||||
|
||||
public void setDefaultHeaders(Map<String, String> defaultHeaders)
|
||||
{
|
||||
this.defaultHeaders = defaultHeaders;
|
||||
}
|
||||
|
||||
private void addDefaultHeaders(HttpMethod method)
|
||||
{
|
||||
if (defaultHeaders != null)
|
||||
{
|
||||
defaultHeaders.forEach((k,v) -> {
|
||||
method.addRequestHeader(k, v);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int executeMethod(HttpMethod method) throws IOException, HttpException
|
||||
{
|
||||
addDefaultHeaders(method);
|
||||
return super.executeMethod(method);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int executeMethod(HostConfiguration hostConfiguration, HttpMethod method) throws IOException, HttpException
|
||||
{
|
||||
addDefaultHeaders(method);
|
||||
return super.executeMethod(hostConfiguration, method);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int executeMethod(HostConfiguration hostconfig, HttpMethod method, HttpState state)
|
||||
throws IOException, HttpException
|
||||
{
|
||||
addDefaultHeaders(method);
|
||||
return super.executeMethod(hostconfig, method, state);
|
||||
}
|
||||
|
||||
}
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>8.424-SNAPSHOT</version>
|
||||
<version>9.41</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
@@ -167,7 +167,7 @@
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.woodstox</groupId>
|
||||
<artifactId>woodstox-core</artifactId>
|
||||
<version>6.2.4</version>
|
||||
<version>6.2.6</version>
|
||||
</dependency>
|
||||
|
||||
<!-- the cxf libs were updated, see dependencyManagement section -->
|
||||
@@ -283,6 +283,31 @@
|
||||
<groupId>com.sun.activation</groupId>
|
||||
<artifactId>javax.activation</artifactId>
|
||||
</exclusion>
|
||||
<!-- No longer needed -->
|
||||
<exclusion>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>pdfbox</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>pdfbox-tools</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>preflight</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>jempbox</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>xmpbox</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>jbig2-imageio</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Data model classes
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -60,12 +60,15 @@ public abstract class ConfigScheduler<Data>
|
||||
// Synchronized has little effect in normal operation, but on laptops that are suspended, there can be a number
|
||||
// of Threads calling execute concurrently without it, resulting in errors in the log. Theoretically possible in
|
||||
// production but not very likely.
|
||||
public synchronized void execute(JobExecutionContext context) throws JobExecutionException
|
||||
public void execute(JobExecutionContext context) throws JobExecutionException
|
||||
{
|
||||
JobDataMap dataMap = context.getJobDetail().getJobDataMap();
|
||||
ConfigScheduler configScheduler = (ConfigScheduler)dataMap.get(CONFIG_SCHEDULER);
|
||||
boolean successReadingConfig = configScheduler.readConfigAndReplace(true);
|
||||
configScheduler.changeScheduleOnStateChange(successReadingConfig);
|
||||
synchronized (configScheduler)
|
||||
{
|
||||
boolean successReadingConfig = configScheduler.readConfigAndReplace(true);
|
||||
configScheduler.changeScheduleOnStateChange(successReadingConfig);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -9,6 +9,6 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>8.424-SNAPSHOT</version>
|
||||
<version>9.41</version>
|
||||
</parent>
|
||||
</project>
|
||||
|
@@ -1,6 +1,6 @@
|
||||
# Fetch image based on Tomcat 9.0, Java 11 and Centos 8
|
||||
# More infos about this image: https://github.com/Alfresco/alfresco-docker-base-tomcat
|
||||
FROM alfresco/alfresco-base-tomcat:9.0.41-java-11-openjdk-centos-8
|
||||
FROM alfresco/alfresco-base-tomcat:9.0.52-java-11-centos-7
|
||||
|
||||
# Set default docker_context.
|
||||
ARG resource_path=target
|
||||
@@ -65,12 +65,12 @@ RUN sed -i -e "s_log4j.appender.File.File\=alfresco.log_log4j.appender.File.File
|
||||
|
||||
# fontconfig is required by Activiti worflow diagram generator
|
||||
# installing pinned dependencies as well
|
||||
RUN yum install -y fontconfig-2.13.1-3.el8 \
|
||||
dejavu-fonts-common-2.35-6.el8 \
|
||||
fontpackages-filesystem-1.44-22.el8 \
|
||||
freetype-2.9.1-4.el8_3.1 \
|
||||
libpng-1.6.34-5.el8 \
|
||||
dejavu-sans-fonts-2.35-6.el8 && \
|
||||
RUN yum install -y fontconfig-2.13.0-4.3.el7 \
|
||||
dejavu-fonts-common-2.33-6.el7 \
|
||||
fontpackages-filesystem-1.44-8.el7 \
|
||||
freetype-2.8-14.el7_9.1 \
|
||||
libpng-1.5.13-8.el7 \
|
||||
dejavu-sans-fonts-2.33-6.el7 && \
|
||||
yum clean all
|
||||
|
||||
# The standard configuration is to have all Tomcat files owned by root with group GROUPNAME and whilst owner has read/write privileges,
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>8.424-SNAPSHOT</version>
|
||||
<version>9.41</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>8.424-SNAPSHOT</version>
|
||||
<version>9.41</version>
|
||||
</parent>
|
||||
|
||||
<profiles>
|
||||
|
@@ -1,4 +1,4 @@
|
||||
TRANSFORMERS_TAG=2.3.10
|
||||
SOLR6_TAG=2.0.1
|
||||
TRANSFORMERS_TAG=2.4.0
|
||||
SOLR6_TAG=2.0.2-RC2
|
||||
POSTGRES_TAG=13.1
|
||||
ACTIVEMQ_TAG=5.16.1
|
||||
|
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>8.424-SNAPSHOT</version>
|
||||
<version>9.41</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
12
packaging/tests/scripts/output_logs_for_failures.sh
Executable file
12
packaging/tests/scripts/output_logs_for_failures.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
TAS_DIRECTORY=$1
|
||||
|
||||
cd ${TAS_DIRECTORY}
|
||||
|
||||
failures=$(grep 'status="FAIL"' target/surefire-reports/testng-results.xml | sed 's|^.*[ ]name="\([^"]*\)".*$|\1|g')
|
||||
|
||||
for failure in ${failures}
|
||||
do
|
||||
cat target/reports/alfresco-tas.log | sed '/STARTING Test: \['${failure}'\]/,/ENDING Test: \['${failure}'\]/!d;/ENDING Test: \['${failure}'\]/q'
|
||||
done
|
7
packaging/tests/scripts/output_tests_run.sh
Executable file
7
packaging/tests/scripts/output_tests_run.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
TAS_DIRECTORY=$1
|
||||
|
||||
cd ${TAS_DIRECTORY}
|
||||
|
||||
cat target/reports/alfresco-tas.log | grep -a "*** STARTING"
|
@@ -1,7 +1,18 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export DOCKER_COMPOSE_PATH=$1
|
||||
export CLEAN_UP="$2"
|
||||
export DOCKER_COMPOSES=""
|
||||
export CLEAN_UP=""
|
||||
|
||||
for var in "$@"
|
||||
do
|
||||
if [ "$var" == "no-clean-up" ]
|
||||
then
|
||||
export CLEAN_UP="$var"
|
||||
else
|
||||
export DOCKER_COMPOSES+="--file $var "
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -z "$DOCKER_COMPOSE_PATH" ]
|
||||
then
|
||||
@@ -15,8 +26,8 @@ fi
|
||||
# The second parameter can be used to avoid doing a clean up if we are doing a restart test.
|
||||
if [ "$CLEAN_UP" != "no-clean-up" ]
|
||||
then
|
||||
docker-compose --file "${DOCKER_COMPOSE_PATH}" kill
|
||||
docker-compose --file "${DOCKER_COMPOSE_PATH}" rm -f
|
||||
docker-compose ${DOCKER_COMPOSES} --project-directory $(dirname "${DOCKER_COMPOSE_PATH}") kill
|
||||
docker-compose ${DOCKER_COMPOSES} --project-directory $(dirname "${DOCKER_COMPOSE_PATH}") rm -f
|
||||
|
||||
export GENERATED_IMAGES=$(docker images | grep '^environment_' | awk '{ print $3 }')
|
||||
if [ -n "$GENERATED_IMAGES" ]
|
||||
@@ -28,7 +39,7 @@ fi
|
||||
echo "Starting ACS stack in ${DOCKER_COMPOSE_PATH}"
|
||||
|
||||
# .env files are picked up from project directory correctly on docker-compose 1.23.0+
|
||||
docker-compose --file "${DOCKER_COMPOSE_PATH}" --project-directory $(dirname "${DOCKER_COMPOSE_PATH}") up -d
|
||||
docker-compose ${DOCKER_COMPOSES} --project-directory $(dirname "${DOCKER_COMPOSE_PATH}") up -d
|
||||
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>8.424-SNAPSHOT</version>
|
||||
<version>9.41</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>8.424-SNAPSHOT</version>
|
||||
<version>9.41</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>8.424-SNAPSHOT</version>
|
||||
<version>9.41</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>8.424-SNAPSHOT</version>
|
||||
<version>9.41</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -347,13 +347,30 @@ public class GetSitesTests extends RestTest
|
||||
public void checkPaginationWithSkipCountAndMaxItems() throws Exception
|
||||
{
|
||||
sites = restClient.authenticateUser(regularUser).withParams("skipCount=10&maxItems=110").withCoreAPI().getSites();
|
||||
|
||||
int expectedCount;
|
||||
if (sites.getPagination().isHasMoreItems())
|
||||
{
|
||||
expectedCount = sites.getPagination().getMaxItems();
|
||||
}
|
||||
else
|
||||
{
|
||||
if (sites.getPagination().getTotalItems() < sites.getPagination().getSkipCount())
|
||||
{
|
||||
expectedCount = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
expectedCount = sites.getPagination().getTotalItems() - sites.getPagination().getSkipCount();
|
||||
}
|
||||
}
|
||||
|
||||
sites.getPagination().assertThat()
|
||||
.field("totalItems").isNotEmpty().and()
|
||||
.field("maxItems").is("110").and()
|
||||
.field("hasMoreItems").is((sites.getPagination().getTotalItems() - sites.getPagination().getSkipCount() > sites.getPagination().getMaxItems())?"true":"false").and()
|
||||
.field("skipCount").is("10").and()
|
||||
.field("count").is((sites.getPagination().isHasMoreItems()) ? sites.getPagination().getMaxItems()
|
||||
: sites.getPagination().getTotalItems() - sites.getPagination().getSkipCount());
|
||||
.field("count").is(expectedCount);
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.SITES, TestGroup.REGRESSION})
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>8.424-SNAPSHOT</version>
|
||||
<version>9.41</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -1,17 +1,25 @@
|
||||
package org.alfresco.webdav;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import org.alfresco.utility.LogFactory;
|
||||
import org.alfresco.utility.data.DataContent;
|
||||
import org.alfresco.utility.data.DataSite;
|
||||
import org.alfresco.utility.data.DataUser;
|
||||
import org.alfresco.utility.network.ServerHealth;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.testng.AbstractTestNGSpringContextTests;
|
||||
import org.testng.annotations.AfterMethod;
|
||||
import org.testng.annotations.BeforeMethod;
|
||||
import org.testng.annotations.BeforeSuite;
|
||||
|
||||
@ContextConfiguration("classpath:alfresco-webdav-context.xml")
|
||||
public abstract class WebDavTest extends AbstractTestNGSpringContextTests
|
||||
{
|
||||
private static final Logger LOG = LogFactory.getLogger();
|
||||
|
||||
@Autowired
|
||||
protected DataSite dataSite;
|
||||
|
||||
@@ -36,4 +44,16 @@ public abstract class WebDavTest extends AbstractTestNGSpringContextTests
|
||||
// The webdav protocol is enabled by default.
|
||||
//webDavProtocol.assertThat().protocolIsEnabled();
|
||||
}
|
||||
|
||||
@BeforeMethod(alwaysRun=true)
|
||||
public void showStartTestInfo(Method method)
|
||||
{
|
||||
LOG.info(String.format("*** STARTING Test: [%s] ***", method.getName()));
|
||||
}
|
||||
|
||||
@AfterMethod(alwaysRun=true)
|
||||
public void showEndTestInfo(Method method)
|
||||
{
|
||||
LOG.info(String.format("*** ENDING Test: [%s] ***", method.getName()));
|
||||
}
|
||||
}
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>8.424-SNAPSHOT</version>
|
||||
<version>9.41</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -57,8 +57,8 @@ ModuleDetails shareServicesModule = moduleService.getModule("alfresco-share-serv
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head>
|
||||
<title>Alfresco</title>
|
||||
<link rel="stylesheet" type="text/css" href="./css/reset.css" />
|
||||
<link rel="stylesheet" type="text/css" href="./css/alfresco.css" />
|
||||
<link rel="stylesheet" type="text/css" href="/<%=sysAdminParams.getAlfrescoContext()%>/css/reset.css" />
|
||||
<link rel="stylesheet" type="text/css" href="/<%=sysAdminParams.getAlfrescoContext()%>/css/alfresco.css" />
|
||||
</head>
|
||||
<body>
|
||||
<div class="sticky-wrapper">
|
||||
|
@@ -4,21 +4,21 @@
|
||||
%%
|
||||
Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
%%
|
||||
This file is part of the Alfresco software.
|
||||
If the software was purchased under a paid Alfresco license, the terms of
|
||||
the paid license agreement will prevail. Otherwise, the software is
|
||||
This file is part of the Alfresco software.
|
||||
If the software was purchased under a paid Alfresco license, the terms of
|
||||
the paid license agreement will prevail. Otherwise, the software is
|
||||
provided under the following open source license terms:
|
||||
|
||||
|
||||
Alfresco is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Lesser General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
|
||||
Alfresco is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Lesser General Public License for more details.
|
||||
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public License
|
||||
along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
#L%
|
||||
@@ -57,19 +57,19 @@ ModuleDetails shareServicesModule = moduleService.getModule("alfresco-share-serv
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head>
|
||||
<title>Alfresco</title>
|
||||
<link rel="stylesheet" type="text/css" href="./css/reset.css" />
|
||||
<link rel="stylesheet" type="text/css" href="./css/alfresco.css" />
|
||||
<link rel="stylesheet" type="text/css" href="/<%=sysAdminParams.getAlfrescoContext()%>/css/reset.css" />
|
||||
<link rel="stylesheet" type="text/css" href="/<%=sysAdminParams.getAlfrescoContext()%>/css/alfresco.css" />
|
||||
</head>
|
||||
<body>
|
||||
<div class="sticky-wrapper">
|
||||
<div class="index">
|
||||
|
||||
|
||||
<div class="title">
|
||||
<span class="logo"><a href="http://www.alfresco.com"><img src="./images/logo/logo.png" width="145" height="48" alt="" border="0" /></a></span>
|
||||
<span class="logo-separator"> </span>
|
||||
<h1>Welcome to Alfresco</h1>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="index-list">
|
||||
<h4><%=descriptorService.getServerDescriptor().getEdition()%> - <%=descriptorService.getServerDescriptor().getVersion()%></h4>
|
||||
<p></p>
|
||||
@@ -94,7 +94,7 @@ ModuleDetails shareServicesModule = moduleService.getModule("alfresco-share-serv
|
||||
{
|
||||
%>
|
||||
<p>WARNING: The system is in Read Only mode, the License may have failed to deploy. Please visit the <a href="./s/enterprise/admin">Alfresco Administration Console</a> (admin only)</p>
|
||||
<%
|
||||
<%
|
||||
}
|
||||
if (descriptorService.getLicenseDescriptor() != null && descriptorService.getLicenseDescriptor().getLicenseMode().toString().equals("ENTERPRISE"))
|
||||
{
|
||||
@@ -120,7 +120,7 @@ ModuleDetails shareServicesModule = moduleService.getModule("alfresco-share-serv
|
||||
<p><a href="./api/-default-/public/cmis/versions/1.1/atom">CMIS 1.1 AtomPub Service Document</a></p>
|
||||
<p><a href="./api/-default-/public/cmis/versions/1.1/browser">CMIS 1.1 Browser Binding URL</a></p>
|
||||
</div>
|
||||
|
||||
|
||||
</div>
|
||||
<div class="push"></div>
|
||||
</div>
|
||||
|
67
pom.xml
67
pom.xml
@@ -2,7 +2,7 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>8.424-SNAPSHOT</version>
|
||||
<version>9.41</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>Alfresco Community Repo Parent</name>
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
<properties>
|
||||
<acs.version.major>7</acs.version.major>
|
||||
<acs.version.minor>0</acs.version.minor>
|
||||
<acs.version.revision>0</acs.version.revision>
|
||||
<acs.version.revision>1</acs.version.revision>
|
||||
<acs.version.label />
|
||||
|
||||
<version.edition>Community</version.edition>
|
||||
@@ -34,15 +34,15 @@
|
||||
<image.registry>quay.io</image.registry>
|
||||
|
||||
<java.version>11</java.version>
|
||||
<maven.compiler.source>11</maven.compiler.source>
|
||||
<maven.compiler.target>11</maven.compiler.target>
|
||||
<maven.build.sourceVersion>11</maven.build.sourceVersion>
|
||||
<maven.compiler.source>${java.version}</maven.compiler.source>
|
||||
<maven.compiler.target>${java.version}</maven.compiler.target>
|
||||
<maven.build.sourceVersion>${java.version}</maven.build.sourceVersion>
|
||||
|
||||
<dir.root>${project.build.directory}/alf_data</dir.root>
|
||||
|
||||
<dependency.alfresco-hb-data-sender.version>1.0.12</dependency.alfresco-hb-data-sender.version>
|
||||
<dependency.alfresco-mmt.version>6.0</dependency.alfresco-mmt.version>
|
||||
<dependency.alfresco-trashcan-cleaner.version>2.3</dependency.alfresco-trashcan-cleaner.version>
|
||||
<dependency.alfresco-trashcan-cleaner.version>2.4.1</dependency.alfresco-trashcan-cleaner.version>
|
||||
<dependency.alfresco-jlan.version>7.1</dependency.alfresco-jlan.version>
|
||||
<dependency.alfresco-server-root.version>6.0.1</dependency.alfresco-server-root.version>
|
||||
<dependency.alfresco-messaging-repo.version>1.2.15</dependency.alfresco-messaging-repo.version>
|
||||
@@ -55,14 +55,14 @@
|
||||
|
||||
<dependency.spring.version>5.3.3</dependency.spring.version>
|
||||
<dependency.antlr.version>3.5.2</dependency.antlr.version>
|
||||
<dependency.jackson.version>2.12.1</dependency.jackson.version>
|
||||
<dependency.jackson-databind.version>${dependency.jackson.version}</dependency.jackson-databind.version>
|
||||
<dependency.cxf.version>3.4.2</dependency.cxf.version>
|
||||
<dependency.jackson.version>2.12.3</dependency.jackson.version>
|
||||
<dependency.jackson-databind.version>2.12.3</dependency.jackson-databind.version>
|
||||
<dependency.cxf.version>3.4.4</dependency.cxf.version>
|
||||
<dependency.opencmis.version>1.0.0</dependency.opencmis.version>
|
||||
<dependency.webscripts.version>8.18</dependency.webscripts.version>
|
||||
<dependency.bouncycastle.version>1.68</dependency.bouncycastle.version>
|
||||
<dependency.mockito-core.version>3.8.0</dependency.mockito-core.version>
|
||||
<dependency.org-json.version>20201115</dependency.org-json.version>
|
||||
<dependency.webscripts.version>8.27</dependency.webscripts.version>
|
||||
<dependency.bouncycastle.version>1.69</dependency.bouncycastle.version>
|
||||
<dependency.mockito-core.version>3.9.0</dependency.mockito-core.version>
|
||||
<dependency.org-json.version>20210307</dependency.org-json.version>
|
||||
<dependency.commons-dbcp.version>1.4-DBCP330</dependency.commons-dbcp.version>
|
||||
<dependency.commons-io.version>2.8.0</dependency.commons-io.version>
|
||||
<dependency.gson.version>2.8.5</dependency.gson.version>
|
||||
@@ -73,17 +73,18 @@
|
||||
<dependency.slf4j.version>1.7.30</dependency.slf4j.version>
|
||||
<dependency.gytheio.version>0.12</dependency.gytheio.version>
|
||||
<dependency.groovy.version>2.5.9</dependency.groovy.version>
|
||||
<dependency.tika.version>1.25</dependency.tika.version>
|
||||
<dependency.spring-security.version>5.4.1</dependency.spring-security.version>
|
||||
<dependency.tika.version>1.26</dependency.tika.version>
|
||||
<dependency.spring-security.version>5.5.0</dependency.spring-security.version>
|
||||
<dependency.truezip.version>7.7.10</dependency.truezip.version>
|
||||
<dependency.poi.version>4.1.2</dependency.poi.version>
|
||||
<dependency.ooxml-schemas.version>1.4</dependency.ooxml-schemas.version>
|
||||
<dependency.keycloak.version>11.0.0-alfresco-001</dependency.keycloak.version>
|
||||
<dependency.keycloak.version>13.0.1</dependency.keycloak.version>
|
||||
<dependency.jboss.logging.version>3.4.1.Final</dependency.jboss.logging.version>
|
||||
<dependency.camel.version>3.7.0</dependency.camel.version>
|
||||
<dependency.camel.version>3.7.4</dependency.camel.version>
|
||||
<dependency.activemq.version>5.16.1</dependency.activemq.version>
|
||||
<dependency.apache-compress.version>1.20</dependency.apache-compress.version>
|
||||
<dependency.apache.taglibs.version>1.2.5</dependency.apache.taglibs.version>
|
||||
<dependency.awaitility.version>4.0.3</dependency.awaitility.version>
|
||||
<dependency.awaitility.version>4.1.0</dependency.awaitility.version>
|
||||
|
||||
<dependency.jakarta-jaxb-api.version>2.3.3</dependency.jakarta-jaxb-api.version>
|
||||
<dependency.jakarta-ws-api.version>2.3.3</dependency.jakarta-ws-api.version>
|
||||
@@ -96,16 +97,16 @@
|
||||
<dependency.jakarta-json-api.version>1.1.6</dependency.jakarta-json-api.version>
|
||||
<dependency.jakarta-rpc-api.version>1.1.4</dependency.jakarta-rpc-api.version>
|
||||
|
||||
<alfresco.googledrive.version>3.2.1</alfresco.googledrive.version>
|
||||
<alfresco.aos-module.version>1.4.0</alfresco.aos-module.version>
|
||||
<alfresco.googledrive.version>3.2.1.3</alfresco.googledrive.version>
|
||||
<alfresco.aos-module.version>1.4.0.1</alfresco.aos-module.version>
|
||||
|
||||
<dependency.postgresql.version>42.2.19</dependency.postgresql.version>
|
||||
<dependency.mysql.version>8.0.23</dependency.mysql.version>
|
||||
<dependency.postgresql.version>42.2.20</dependency.postgresql.version>
|
||||
<dependency.mysql.version>8.0.25</dependency.mysql.version>
|
||||
<dependency.mariadb.version>2.7.2</dependency.mariadb.version>
|
||||
<dependency.tas-utility.version>3.0.43</dependency.tas-utility.version>
|
||||
<dependency.tas-utility.version>3.0.44</dependency.tas-utility.version>
|
||||
<dependency.rest-assured.version>3.3.0</dependency.rest-assured.version>
|
||||
<dependency.tas-restapi.version>1.56</dependency.tas-restapi.version>
|
||||
<dependency.tas-cmis.version>1.27</dependency.tas-cmis.version>
|
||||
<dependency.tas-restapi.version>1.58</dependency.tas-restapi.version>
|
||||
<dependency.tas-cmis.version>1.30</dependency.tas-cmis.version>
|
||||
<dependency.tas-email.version>1.8</dependency.tas-email.version>
|
||||
<dependency.tas-webdav.version>1.6</dependency.tas-webdav.version>
|
||||
<dependency.tas-ftp.version>1.5</dependency.tas-ftp.version>
|
||||
@@ -116,7 +117,7 @@
|
||||
<connection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</connection>
|
||||
<developerConnection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</developerConnection>
|
||||
<url>https://github.com/Alfresco/alfresco-community-repo</url>
|
||||
<tag>HEAD</tag>
|
||||
<tag>9.41</tag>
|
||||
</scm>
|
||||
|
||||
<distributionManagement>
|
||||
@@ -549,8 +550,7 @@
|
||||
<dependency>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcprov-jdk15on</artifactId>
|
||||
<version>1.68</version>
|
||||
<!-- <version>${dependency.bouncycastle.version}</version>-->
|
||||
<version>${dependency.bouncycastle.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
@@ -583,7 +583,7 @@
|
||||
<dependency>
|
||||
<groupId>com.drewnoakes</groupId>
|
||||
<artifactId>metadata-extractor</artifactId>
|
||||
<version>2.15.0</version>
|
||||
<version>2.16.0</version>
|
||||
</dependency>
|
||||
<!-- upgrade dependency from TIKA -->
|
||||
<dependency>
|
||||
@@ -601,7 +601,7 @@
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-compress</artifactId>
|
||||
<version>1.20</version>
|
||||
<version>${dependency.apache-compress.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
@@ -679,7 +679,7 @@
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>2.10.9</version>
|
||||
<version>2.10.10</version>
|
||||
</dependency>
|
||||
|
||||
<!-- provided dependencies -->
|
||||
@@ -694,7 +694,7 @@
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.13</version>
|
||||
<version>4.13.2</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@@ -814,7 +814,7 @@
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>3.2.0</version>
|
||||
<version>3.3.0</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
@@ -829,5 +829,4 @@
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>8.424-SNAPSHOT</version>
|
||||
<version>9.41</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@@ -25,21 +25,18 @@
|
||||
*/
|
||||
package org.alfresco.repo.web.scripts.solr;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
|
||||
import javax.servlet.FilterChain;
|
||||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.ServletOutputStream;
|
||||
import javax.servlet.ServletRequest;
|
||||
import javax.servlet.ServletResponse;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.servlet.http.HttpServletResponseWrapper;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.httpclient.HttpClientFactory;
|
||||
import org.alfresco.repo.web.filter.beans.DependencyInjectedFilter;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
@@ -88,9 +85,7 @@ public class SOLRAuthenticationFilter implements DependencyInjectedFilter, Initi
|
||||
|
||||
private String sharedSecret;
|
||||
|
||||
private String sharedSecretHeader = DEFAULT_SHAREDSECRET_HEADER;
|
||||
|
||||
private static final String DEFAULT_SHAREDSECRET_HEADER = "X-Alfresco-Search-Secret";
|
||||
private String sharedSecretHeader = HttpClientFactory.DEFAULT_SHAREDSECRET_HEADER;
|
||||
|
||||
public void setSecureComms(String type)
|
||||
{
|
||||
|
@@ -716,7 +716,7 @@ public abstract class BaseSSOAuthenticationFilter extends BaseAuthenticationFilt
|
||||
}
|
||||
else
|
||||
{
|
||||
if(!pathInfo.substring(0, 6).toLowerCase().equals("/cmis/") && !pathInfo.equals("/discovery"))
|
||||
if((pathInfo.length() > 5 && !pathInfo.substring(0, 6).toLowerCase().equals("/cmis/")) && !pathInfo.equals("/discovery"))
|
||||
{
|
||||
// remove tenant
|
||||
int idx = pathInfo.indexOf('/', 1);
|
||||
|
@@ -41,7 +41,11 @@ public class DefaultExceptionResolver implements ExceptionResolver<Exception>
|
||||
@Override
|
||||
public ErrorResponse resolveException(Exception ex)
|
||||
{
|
||||
return new ErrorResponse(DEFAULT_MESSAGE_ID, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ex.getLocalizedMessage(), ex.getStackTrace(), null);
|
||||
return new ErrorResponse(DEFAULT_MESSAGE_ID,
|
||||
HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
|
||||
ex.getLocalizedMessage(),
|
||||
ex.getStackTrace(),
|
||||
null);
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -0,0 +1,48 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.rest.framework.core.exceptions;
|
||||
|
||||
import org.alfresco.repo.search.QueryParserException;
|
||||
|
||||
/**
|
||||
* QueryParserException is related with search requests to Search Services.
|
||||
*/
|
||||
public class QueryParserExceptionResolver implements ExceptionResolver<QueryParserException>
|
||||
{
|
||||
|
||||
@Override
|
||||
public ErrorResponse resolveException(QueryParserException ex)
|
||||
{
|
||||
return new ErrorResponse(
|
||||
DefaultExceptionResolver.DEFAULT_MESSAGE_ID,
|
||||
// Mapping the original HTTP Status code returned by Search Services
|
||||
ex.getHttpStatusCode(),
|
||||
ex.getLocalizedMessage(),
|
||||
ex.getStackTrace(),
|
||||
null);
|
||||
}
|
||||
|
||||
}
|
@@ -26,10 +26,12 @@
|
||||
package org.alfresco.rest.framework.tools;
|
||||
|
||||
import org.alfresco.metrics.rest.RestMetricsReporter;
|
||||
import org.alfresco.repo.search.QueryParserException;
|
||||
import org.alfresco.rest.framework.Api;
|
||||
import org.alfresco.rest.framework.core.exceptions.DefaultExceptionResolver;
|
||||
import org.alfresco.rest.framework.core.exceptions.ErrorResponse;
|
||||
import org.alfresco.rest.framework.core.exceptions.ExceptionResolver;
|
||||
import org.alfresco.rest.framework.core.exceptions.QueryParserExceptionResolver;
|
||||
import org.alfresco.rest.framework.jacksonextensions.JacksonHelper;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
@@ -48,6 +50,7 @@ public class ApiAssistant {
|
||||
|
||||
private ExceptionResolver<Exception> defaultResolver = new DefaultExceptionResolver();
|
||||
private ExceptionResolver<WebScriptException> webScriptExceptionResolver;
|
||||
private ExceptionResolver<QueryParserException> queryParserExceptionResolver;
|
||||
private ExceptionResolver<Exception> resolver;
|
||||
private JacksonHelper jsonHelper;
|
||||
private RestMetricsReporter restMetricsReporter;
|
||||
@@ -77,6 +80,10 @@ public class ApiAssistant {
|
||||
{
|
||||
error = webScriptExceptionResolver.resolveException((WebScriptException) ex);
|
||||
}
|
||||
else if (ex instanceof QueryParserException)
|
||||
{
|
||||
error = queryParserExceptionResolver.resolveException((QueryParserException) ex);
|
||||
}
|
||||
else
|
||||
{
|
||||
error = resolver.resolveException(ex);
|
||||
@@ -100,6 +107,11 @@ public class ApiAssistant {
|
||||
this.webScriptExceptionResolver = webScriptExceptionResolver;
|
||||
}
|
||||
|
||||
public void setQueryParserExceptionResolver(ExceptionResolver<QueryParserException> queryParserExceptionResolver)
|
||||
{
|
||||
this.queryParserExceptionResolver = queryParserExceptionResolver;
|
||||
}
|
||||
|
||||
public void setResolver(ExceptionResolver<Exception> resolver) {
|
||||
this.resolver = resolver;
|
||||
}
|
||||
|
@@ -31,6 +31,7 @@ import java.util.Properties;
|
||||
|
||||
import javax.servlet.ServletContext;
|
||||
|
||||
import org.alfresco.httpclient.HttpClientFactory.SecureCommsType;
|
||||
import org.alfresco.web.scripts.servlet.X509ServletFilterBase;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
@@ -70,7 +71,9 @@ public class AlfrescoX509ServletFilter extends X509ServletFilterBase
|
||||
* Return true or false based on the property. This will switch on/off X509 enforcement in the X509ServletFilterBase.
|
||||
*/
|
||||
|
||||
if (prop == null || "none".equals(prop))
|
||||
if (prop == null ||
|
||||
SecureCommsType.getType(prop) == SecureCommsType.NONE ||
|
||||
SecureCommsType.getType(prop) == SecureCommsType.SECRET)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@@ -134,6 +134,8 @@
|
||||
</bean>
|
||||
<bean id="webScriptExceptionResolver" class="org.alfresco.rest.framework.core.exceptions.WebScriptExceptionResolver">
|
||||
</bean>
|
||||
<bean id="queryParserExceptionResolver" class="org.alfresco.rest.framework.core.exceptions.QueryParserExceptionResolver">
|
||||
</bean>
|
||||
<bean id="simpleMappingExceptionResolverParent" abstract="true" class="org.alfresco.rest.framework.core.exceptions.SimpleMappingExceptionResolver">
|
||||
<property name="exceptionMappings">
|
||||
<map>
|
||||
@@ -182,6 +184,7 @@
|
||||
<property name="resolver" ref="simpleMappingExceptionResolver" />
|
||||
<property name="webScriptExceptionResolver" ref="webScriptExceptionResolver" />
|
||||
<property name="restMetricsReporter" ref="restMetricsReporter"/>
|
||||
<property name="queryParserExceptionResolver" ref="queryParserExceptionResolver" />
|
||||
</bean>
|
||||
|
||||
<!-- Using annotation-config=false means AutowiredAnnotationBeanPostProcessor
|
||||
|
@@ -1,34 +1,76 @@
|
||||
<#assign null><span style="color:red">${msg("nodebrowser.null")?html}</span></#assign>
|
||||
<#assign none><span style="color:red">${msg("nodebrowser.none")?html}</span></#assign>
|
||||
<#assign collection>${msg("nodebrowser.collection")?html}</#assign>
|
||||
|
||||
<#assign maxDepth=1000 />
|
||||
<#macro dateFormat date>${date?string("dd MMM yyyy HH:mm:ss 'GMT'Z '('zzz')'")}</#macro>
|
||||
<#macro propValue p>
|
||||
<#if p.value??>
|
||||
<#if p.value?is_date>
|
||||
<@dateFormat p.value />
|
||||
<#elseif p.value?is_boolean>
|
||||
${p.value?string}
|
||||
<#elseif p.value?is_number>
|
||||
${p.value?c}
|
||||
<#elseif p.value?is_string>
|
||||
${p.value?html}
|
||||
<#elseif p.value?is_hash>
|
||||
<#assign result = "{"/>
|
||||
<#assign first = true />
|
||||
<#list p.value?keys as key>
|
||||
<#if first = false>
|
||||
<#assign result = result + ", "/>
|
||||
<#attempt>
|
||||
<#if p.value??>
|
||||
<#if p.value?is_date>
|
||||
<@dateFormat p.value />
|
||||
<#elseif p.value?is_boolean>
|
||||
${p.value?string}
|
||||
<#elseif p.value?is_number>
|
||||
${p.value?c}
|
||||
<#elseif p.value?is_string>
|
||||
${p.value?html}
|
||||
<#elseif p.value?is_hash || p.value?is_enumerable>
|
||||
<@convertToJSON p.value />
|
||||
</#if>
|
||||
<#else>
|
||||
${null}
|
||||
</#if>
|
||||
<#recover>
|
||||
<span style="color:red">${.error}</span>
|
||||
</#attempt>
|
||||
</#macro>
|
||||
<#macro convertToJSON v>
|
||||
<#if v??>
|
||||
<#if v?is_date>
|
||||
<@dateFormat v />
|
||||
<#elseif v?is_boolean>
|
||||
${v?string}
|
||||
<#elseif v?is_number>
|
||||
${v?c}
|
||||
<#elseif v?is_string>
|
||||
"${v?string}"
|
||||
<#elseif v?is_hash>
|
||||
<#if v?keys?size gt maxDepth >
|
||||
<#stop "Max depth of object achieved">
|
||||
</#if>
|
||||
<#assign result = result + "${key}=${p.value[key]?html}" />
|
||||
<#assign first = false/>
|
||||
</#list>
|
||||
<#assign result = result + "}"/>
|
||||
${result}
|
||||
<@compress single_line=true>
|
||||
{
|
||||
<#assign first = true />
|
||||
<#list v?keys as key>
|
||||
<#if first = false>,</#if>
|
||||
"${key}":
|
||||
<#if v[key]??>
|
||||
<@convertToJSON v[key] />
|
||||
<#else>
|
||||
${null}
|
||||
</#if>
|
||||
<#assign first = false/>
|
||||
</#list>
|
||||
}
|
||||
</@compress>
|
||||
<#elseif v?is_enumerable>
|
||||
<#if v?size gt maxDepth>
|
||||
<#stop "Max depth of object achieved" >
|
||||
</#if>
|
||||
<#assign first = true />
|
||||
<@compress single_line=true>
|
||||
[
|
||||
<#list v as item>
|
||||
<#if first = false>,</#if>
|
||||
<@convertToJSON item />
|
||||
<#assign first = false/>
|
||||
</#list>
|
||||
]
|
||||
</@compress>
|
||||
</#if>
|
||||
<#else>
|
||||
${null}
|
||||
</#if>
|
||||
<#else>
|
||||
${null}
|
||||
</#if>
|
||||
</#macro>
|
||||
<#macro contentUrl nodeRef prop>
|
||||
${url.serviceContext}/api/node/${nodeRef?replace("://","/")}/content;${prop?url}
|
||||
|
@@ -23,45 +23,46 @@
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.rest.framework.tests.core;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
import org.alfresco.repo.forms.FormNotFoundException;
|
||||
package org.alfresco.rest.framework.tests.core;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
import org.alfresco.repo.forms.FormNotFoundException;
|
||||
import org.alfresco.repo.node.integrity.IntegrityException;
|
||||
import org.alfresco.rest.framework.core.exceptions.ApiException;
|
||||
import org.alfresco.rest.framework.core.exceptions.ConstraintViolatedException;
|
||||
import org.alfresco.rest.framework.core.exceptions.DeletedResourceException;
|
||||
import org.alfresco.rest.framework.core.exceptions.EntityNotFoundException;
|
||||
import org.alfresco.rest.framework.core.exceptions.ErrorResponse;
|
||||
import org.alfresco.repo.search.QueryParserException;
|
||||
import org.alfresco.rest.framework.core.exceptions.ApiException;
|
||||
import org.alfresco.rest.framework.core.exceptions.ConstraintViolatedException;
|
||||
import org.alfresco.rest.framework.core.exceptions.DeletedResourceException;
|
||||
import org.alfresco.rest.framework.core.exceptions.EntityNotFoundException;
|
||||
import org.alfresco.rest.framework.core.exceptions.ErrorResponse;
|
||||
import org.alfresco.rest.framework.core.exceptions.InsufficientStorageException;
|
||||
import org.alfresco.rest.framework.core.exceptions.InvalidArgumentException;
|
||||
import org.alfresco.rest.framework.core.exceptions.NotFoundException;
|
||||
import org.alfresco.rest.framework.core.exceptions.PermissionDeniedException;
|
||||
import org.alfresco.rest.framework.core.exceptions.RelationshipResourceNotFoundException;
|
||||
import org.alfresco.rest.framework.core.exceptions.StaleEntityException;
|
||||
import org.alfresco.rest.framework.core.exceptions.UnsupportedResourceOperationException;
|
||||
import org.alfresco.rest.framework.resource.parameters.where.InvalidQueryException;
|
||||
import org.alfresco.rest.framework.core.exceptions.InvalidArgumentException;
|
||||
import org.alfresco.rest.framework.core.exceptions.NotFoundException;
|
||||
import org.alfresco.rest.framework.core.exceptions.PermissionDeniedException;
|
||||
import org.alfresco.rest.framework.core.exceptions.RelationshipResourceNotFoundException;
|
||||
import org.alfresco.rest.framework.core.exceptions.StaleEntityException;
|
||||
import org.alfresco.rest.framework.core.exceptions.UnsupportedResourceOperationException;
|
||||
import org.alfresco.rest.framework.resource.parameters.where.InvalidQueryException;
|
||||
import org.alfresco.rest.framework.tools.ApiAssistant;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.extensions.webscripts.WebScriptException;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.extensions.webscripts.WebScriptException;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration(locations = { "classpath:test-rest-context.xml" })
|
||||
public class ExceptionResolverTests
|
||||
{
|
||||
@Autowired
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration(locations = { "classpath:test-rest-context.xml" })
|
||||
public class ExceptionResolverTests
|
||||
{
|
||||
@Autowired
|
||||
ApiAssistant assistant;
|
||||
|
||||
@Test
|
||||
@Test
|
||||
public void testWebscriptException()
|
||||
{
|
||||
ErrorResponse response = assistant.resolveException(new WebScriptException(null));
|
||||
@@ -75,43 +76,43 @@ public class ExceptionResolverTests
|
||||
|
||||
//04180006 Authentication failed for Web Script org/alfresco/api/ResourceWebScript.get
|
||||
@Test
|
||||
public void testMatchException()
|
||||
{
|
||||
public void testMatchException()
|
||||
{
|
||||
ErrorResponse response = assistant.resolveException(new ApiException(null));
|
||||
assertNotNull(response);
|
||||
assertEquals(500, response.getStatusCode()); //default to INTERNAL_SERVER_ERROR
|
||||
|
||||
assertNotNull(response);
|
||||
assertEquals(500, response.getStatusCode()); //default to INTERNAL_SERVER_ERROR
|
||||
|
||||
response = assistant.resolveException(new InvalidArgumentException(null));
|
||||
assertEquals(400, response.getStatusCode()); //default to STATUS_BAD_REQUEST
|
||||
|
||||
response = assistant.resolveException(new InvalidQueryException(null));
|
||||
assertEquals(400, response.getStatusCode()); //default to STATUS_BAD_REQUEST
|
||||
|
||||
response = assistant.resolveException(new NotFoundException(null));
|
||||
assertEquals(404, response.getStatusCode()); //default to STATUS_NOT_FOUND
|
||||
|
||||
response = assistant.resolveException(new EntityNotFoundException(null));
|
||||
assertEquals(404, response.getStatusCode()); //default to STATUS_NOT_FOUND
|
||||
|
||||
response = assistant.resolveException(new RelationshipResourceNotFoundException(null, null));
|
||||
assertEquals(404, response.getStatusCode()); //default to STATUS_NOT_FOUND
|
||||
|
||||
response = assistant.resolveException(new PermissionDeniedException(null));
|
||||
assertEquals(403, response.getStatusCode()); //default to STATUS_FORBIDDEN
|
||||
|
||||
response = assistant.resolveException(new UnsupportedResourceOperationException(null));
|
||||
assertEquals(405, response.getStatusCode()); //default to STATUS_METHOD_NOT_ALLOWED
|
||||
|
||||
response = assistant.resolveException(new DeletedResourceException(null));
|
||||
assertEquals(405, response.getStatusCode()); //default to STATUS_METHOD_NOT_ALLOWED
|
||||
|
||||
response = assistant.resolveException(new ConstraintViolatedException(null));
|
||||
assertEquals(409, response.getStatusCode()); //default to STATUS_CONFLICT
|
||||
|
||||
response = assistant.resolveException(new StaleEntityException(null));
|
||||
assertEquals(409, response.getStatusCode()); //default to STATUS_CONFLICT
|
||||
assertEquals(400, response.getStatusCode()); //default to STATUS_BAD_REQUEST
|
||||
|
||||
//Try a random exception
|
||||
response = assistant.resolveException(new InvalidQueryException(null));
|
||||
assertEquals(400, response.getStatusCode()); //default to STATUS_BAD_REQUEST
|
||||
|
||||
response = assistant.resolveException(new NotFoundException(null));
|
||||
assertEquals(404, response.getStatusCode()); //default to STATUS_NOT_FOUND
|
||||
|
||||
response = assistant.resolveException(new EntityNotFoundException(null));
|
||||
assertEquals(404, response.getStatusCode()); //default to STATUS_NOT_FOUND
|
||||
|
||||
response = assistant.resolveException(new RelationshipResourceNotFoundException(null, null));
|
||||
assertEquals(404, response.getStatusCode()); //default to STATUS_NOT_FOUND
|
||||
|
||||
response = assistant.resolveException(new PermissionDeniedException(null));
|
||||
assertEquals(403, response.getStatusCode()); //default to STATUS_FORBIDDEN
|
||||
|
||||
response = assistant.resolveException(new UnsupportedResourceOperationException(null));
|
||||
assertEquals(405, response.getStatusCode()); //default to STATUS_METHOD_NOT_ALLOWED
|
||||
|
||||
response = assistant.resolveException(new DeletedResourceException(null));
|
||||
assertEquals(405, response.getStatusCode()); //default to STATUS_METHOD_NOT_ALLOWED
|
||||
|
||||
response = assistant.resolveException(new ConstraintViolatedException(null));
|
||||
assertEquals(409, response.getStatusCode()); //default to STATUS_CONFLICT
|
||||
|
||||
response = assistant.resolveException(new StaleEntityException(null));
|
||||
assertEquals(409, response.getStatusCode()); //default to STATUS_CONFLICT
|
||||
|
||||
//Try a random exception
|
||||
response = assistant.resolveException(new FormNotFoundException(null));
|
||||
assertEquals(500, response.getStatusCode()); //default to INTERNAL_SERVER_ERROR
|
||||
|
||||
@@ -120,6 +121,15 @@ public class ExceptionResolverTests
|
||||
|
||||
response = assistant.resolveException(new IntegrityException(null));
|
||||
assertEquals(422, response.getStatusCode());
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/** Check that the status code from SS is passed back to the caller. */
|
||||
@Test
|
||||
public void testQueryParserException()
|
||||
{
|
||||
ErrorResponse response = assistant.resolveException(new QueryParserException("Endpoint not found", 404));
|
||||
assertNotNull(response);
|
||||
assertEquals("Expected status code to be passed through from query parser.", 404, response.getStatusCode());
|
||||
}
|
||||
}
|
||||
|
@@ -33,10 +33,12 @@
|
||||
</bean>
|
||||
<bean id="webScriptExceptionResolver" class="org.alfresco.rest.framework.core.exceptions.WebScriptExceptionResolver">
|
||||
</bean>
|
||||
<bean id="queryParserExceptionResolver" class="org.alfresco.rest.framework.core.exceptions.QueryParserExceptionResolver" />
|
||||
<bean id="apiAssistant" class="org.alfresco.rest.framework.tools.ApiAssistant">
|
||||
<property name="jsonHelper" ref="jsonHelper" />
|
||||
<property name="resolver" ref="simpleMappingExceptionResolver" />
|
||||
<property name="webScriptExceptionResolver" ref="webScriptExceptionResolver" />
|
||||
<property name="queryParserExceptionResolver" ref="queryParserExceptionResolver" />
|
||||
</bean>
|
||||
<bean id="simpleMappingExceptionResolver" class="org.alfresco.rest.framework.core.exceptions.SimpleMappingExceptionResolver">
|
||||
<property name="exceptionMappings">
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>8.424-SNAPSHOT</version>
|
||||
<version>9.41</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
@@ -245,7 +245,7 @@
|
||||
<dependency>
|
||||
<groupId>org.freemarker</groupId>
|
||||
<artifactId>freemarker</artifactId>
|
||||
<version>2.3.20-alfresco-patched-20200421</version>
|
||||
<version>2.3.20-alfresco-patched-20220413</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.xmlbeans</groupId>
|
||||
@@ -383,7 +383,7 @@
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.woodstox</groupId>
|
||||
<artifactId>woodstox-core</artifactId>
|
||||
<version>6.2.4</version>
|
||||
<version>6.2.6</version>
|
||||
</dependency>
|
||||
|
||||
<!-- GData -->
|
||||
|
@@ -40,6 +40,7 @@ public class IdsEntity
|
||||
private Long idThree;
|
||||
private Long idFour;
|
||||
private List<Long> ids;
|
||||
private boolean ordered;
|
||||
public Long getIdOne()
|
||||
{
|
||||
return idOne;
|
||||
@@ -80,4 +81,12 @@ public class IdsEntity
|
||||
{
|
||||
this.ids = ids;
|
||||
}
|
||||
public boolean isOrdered()
|
||||
{
|
||||
return ordered;
|
||||
}
|
||||
public void setOrdered(boolean ordered)
|
||||
{
|
||||
this.ordered = ordered;
|
||||
}
|
||||
}
|
||||
|
@@ -32,6 +32,8 @@ import org.alfresco.sync.repo.Client;
|
||||
import org.alfresco.sync.repo.Client.ClientType;
|
||||
import org.alfresco.repo.activities.ActivityType;
|
||||
import org.alfresco.repo.model.filefolder.HiddenAspect;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.tenant.TenantService;
|
||||
import org.alfresco.service.cmr.activities.ActivityInfo;
|
||||
import org.alfresco.service.cmr.activities.ActivityPoster;
|
||||
@@ -228,7 +230,7 @@ public class ActivityPosterImpl implements CmisActivityPoster, InitializingBean
|
||||
{
|
||||
if(activitiesEnabled && !hiddenAspect.hasHiddenAspect(nodeRef))
|
||||
{
|
||||
SiteInfo siteInfo = siteService.getSite(nodeRef);
|
||||
SiteInfo siteInfo = getSiteAsSystem(nodeRef);
|
||||
String siteId = (siteInfo != null ? siteInfo.getShortName() : null);
|
||||
if(siteId != null && !siteId.equals(""))
|
||||
{
|
||||
@@ -290,5 +292,16 @@ public class ActivityPosterImpl implements CmisActivityPoster, InitializingBean
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private SiteInfo getSiteAsSystem(NodeRef nodeRef)
|
||||
{
|
||||
return AuthenticationUtil.runAsSystem(new RunAsWork<SiteInfo>()
|
||||
{
|
||||
@Override
|
||||
public SiteInfo doWork() throws Exception
|
||||
{
|
||||
return siteService.getSite(nodeRef);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@@ -34,7 +34,10 @@ import org.alfresco.repo.action.ParameterDefinitionImpl;
|
||||
import org.alfresco.repo.admin.SysAdminParams;
|
||||
import org.alfresco.repo.jscript.ScriptAction;
|
||||
import org.alfresco.service.ServiceRegistry;
|
||||
import org.alfresco.service.cmr.action.Action;
|
||||
import org.alfresco.service.cmr.action.Action;
|
||||
import org.alfresco.service.cmr.action.ActionDefinition;
|
||||
import org.alfresco.service.cmr.action.ActionService;
|
||||
import org.alfresco.service.cmr.action.ParameterConstraint;
|
||||
import org.alfresco.service.cmr.action.ParameterDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
@@ -126,6 +129,10 @@ public class ScriptActionExecuter extends ActionExecuterAbstractBase
|
||||
if (nodeService.exists(actionedUponNodeRef))
|
||||
{
|
||||
NodeRef scriptRef = (NodeRef)action.getParameterValue(PARAM_SCRIPTREF);
|
||||
if(!isValidScriptRef(action))
|
||||
{
|
||||
throw new IllegalStateException("Invalid script ref path: " + scriptRef);
|
||||
}
|
||||
NodeRef spaceRef = this.serviceRegistry.getRuleService().getOwningNodeRef(action);
|
||||
if (spaceRef == null)
|
||||
{
|
||||
@@ -222,4 +229,19 @@ public class ScriptActionExecuter extends ActionExecuterAbstractBase
|
||||
|
||||
return companyHomeRef;
|
||||
}
|
||||
|
||||
private boolean isValidScriptRef(Action action)
|
||||
{
|
||||
NodeRef scriptRef = (NodeRef) action.getParameterValue(PARAM_SCRIPTREF);
|
||||
ActionService actionService = this.serviceRegistry.getActionService();
|
||||
ActionDefinition actDef = actionService.getActionDefinition(action.getActionDefinitionName());
|
||||
ParameterDefinition parameterDef = actDef.getParameterDefintion(PARAM_SCRIPTREF);
|
||||
String paramConstraintName = parameterDef.getParameterConstraintName();
|
||||
if (paramConstraintName != null)
|
||||
{
|
||||
ParameterConstraint paramConstraint = actionService.getParameterConstraint(paramConstraintName);
|
||||
return paramConstraint.isValidValue(scriptRef.toString());
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@@ -30,12 +30,14 @@ import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.repo.bulkimport.ImportableItem.ContentAndMetadata;
|
||||
import org.alfresco.repo.bulkimport.MetadataLoader;
|
||||
import org.alfresco.repo.bulkimport.impl.FileUtils;
|
||||
import org.alfresco.service.ServiceRegistry;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
@@ -60,6 +62,9 @@ abstract class AbstractMapBasedMetadataLoader implements MetadataLoader
|
||||
|
||||
private final static String DEFAULT_MULTI_VALUED_SEPARATOR = ",";
|
||||
|
||||
private final List<QName> TYPES_TO_HANDLE_EMPTY_VALUE = List.of(DataTypeDefinition.DATE, DataTypeDefinition.DATETIME,
|
||||
DataTypeDefinition.FLOAT, DataTypeDefinition.DOUBLE, DataTypeDefinition.INT, DataTypeDefinition.LONG);
|
||||
|
||||
protected final NamespaceService namespaceService;
|
||||
protected final DictionaryService dictionaryService;
|
||||
protected final String multiValuedSeparator;
|
||||
@@ -164,7 +169,7 @@ abstract class AbstractMapBasedMetadataLoader implements MetadataLoader
|
||||
else
|
||||
{
|
||||
// Single value property
|
||||
metadata.addProperty(name, metadataProperties.get(key));
|
||||
metadata.addProperty(name, handleValue(propertyDefinition, metadataProperties.get(key)));
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -187,4 +192,17 @@ abstract class AbstractMapBasedMetadataLoader implements MetadataLoader
|
||||
}
|
||||
}
|
||||
|
||||
private Serializable handleValue(PropertyDefinition pd, Serializable value)
|
||||
{
|
||||
if (pd != null && TYPES_TO_HANDLE_EMPTY_VALUE.contains(pd.getDataType().getName()))
|
||||
{
|
||||
if (value != null && value.toString().trim().length() == 0)
|
||||
{
|
||||
value = null;
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1483,7 +1483,17 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
|
||||
|
||||
// Update ACLs for moved tree
|
||||
Long newParentAclId = newParentNode.getAclId();
|
||||
accessControlListDAO.updateInheritance(newChildNodeId, oldParentAclId, newParentAclId);
|
||||
|
||||
// Verify if parent has aspect applied and ACL's are pending
|
||||
if (hasNodeAspect(oldParentNodeId, ContentModel.ASPECT_PENDING_FIX_ACL))
|
||||
{
|
||||
Long oldParentSharedAclId = (Long) this.getNodeProperty(oldParentNodeId, ContentModel.PROP_SHARED_ACL_TO_REPLACE);
|
||||
accessControlListDAO.updateInheritance(newChildNodeId, oldParentSharedAclId, newParentAclId);
|
||||
}
|
||||
else
|
||||
{
|
||||
accessControlListDAO.updateInheritance(newChildNodeId, oldParentAclId, newParentAclId);
|
||||
}
|
||||
}
|
||||
|
||||
// Done
|
||||
@@ -2746,6 +2756,22 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
|
||||
selectNodesWithAspects(qnameIds, minNodeId, maxNodeId, resultsCallback);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getNodesWithAspects(
|
||||
Set<QName> aspectQNames,
|
||||
Long minNodeId, Long maxNodeId, boolean ordered,
|
||||
NodeRefQueryCallback resultsCallback)
|
||||
{
|
||||
Set<Long> qnameIdsSet = qnameDAO.convertQNamesToIds(aspectQNames, false);
|
||||
if (qnameIdsSet.size() == 0)
|
||||
{
|
||||
// No point running a query
|
||||
return;
|
||||
}
|
||||
List<Long> qnameIds = new ArrayList<Long>(qnameIdsSet);
|
||||
selectNodesWithAspects(qnameIds, minNodeId, maxNodeId, ordered, resultsCallback);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Returns a writable copy of the cached aspects set
|
||||
*/
|
||||
@@ -4917,6 +4943,10 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
|
||||
List<Long> qnameIds,
|
||||
Long minNodeId, Long maxNodeId,
|
||||
NodeRefQueryCallback resultsCallback);
|
||||
protected abstract void selectNodesWithAspects(
|
||||
List<Long> qnameIds,
|
||||
Long minNodeId, Long maxNodeId, boolean ordered,
|
||||
NodeRefQueryCallback resultsCallback);
|
||||
protected abstract Long insertNodeAssoc(Long sourceNodeId, Long targetNodeId, Long assocTypeQNameId, int assocIndex);
|
||||
protected abstract int updateNodeAssoc(Long id, int assocIndex);
|
||||
protected abstract int deleteNodeAssoc(Long sourceNodeId, Long targetNodeId, Long assocTypeQNameId);
|
||||
|
@@ -405,6 +405,20 @@ public interface NodeDAO extends NodeBulkLoader
|
||||
Long minNodeId, Long maxNodeId,
|
||||
NodeRefQueryCallback resultsCallback);
|
||||
|
||||
/**
|
||||
* Get nodes with aspects between the given ranges, ordering the results optionally
|
||||
*
|
||||
* @param aspectQNames the aspects that must be on the nodes
|
||||
* @param minNodeId the minimum node ID (inclusive)
|
||||
* @param maxNodeId the maximum node ID (exclusive)
|
||||
* @param ordered if the results are to be ordered by nodeID
|
||||
* @param resultsCallback callback to process results
|
||||
*/
|
||||
public void getNodesWithAspects(
|
||||
Set<QName> aspectQNames,
|
||||
Long minNodeId, Long maxNodeId, boolean ordered,
|
||||
NodeRefQueryCallback resultsCallback);
|
||||
|
||||
/*
|
||||
* Node Assocs
|
||||
*/
|
||||
|
@@ -764,6 +764,31 @@ public class NodeDAOImpl extends AbstractNodeDAOImpl
|
||||
template.select(SELECT_NODES_WITH_ASPECT_IDS, parameters, resultHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void selectNodesWithAspects(
|
||||
List<Long> qnameIds,
|
||||
Long minNodeId, Long maxNodeId, boolean ordered,
|
||||
final NodeRefQueryCallback resultsCallback)
|
||||
{
|
||||
@SuppressWarnings("rawtypes")
|
||||
ResultHandler resultHandler = new ResultHandler()
|
||||
{
|
||||
public void handleResult(ResultContext context)
|
||||
{
|
||||
NodeEntity entity = (NodeEntity) context.getResultObject();
|
||||
Pair<Long, NodeRef> nodePair = new Pair<Long, NodeRef>(entity.getId(), entity.getNodeRef());
|
||||
resultsCallback.handle(nodePair);
|
||||
}
|
||||
};
|
||||
|
||||
IdsEntity parameters = new IdsEntity();
|
||||
parameters.setIdOne(minNodeId);
|
||||
parameters.setIdTwo(maxNodeId);
|
||||
parameters.setIds(qnameIds);
|
||||
parameters.setOrdered(ordered);
|
||||
template.select(SELECT_NODES_WITH_ASPECT_IDS, parameters, resultHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Long insertNodeAssoc(Long sourceNodeId, Long targetNodeId, Long assocTypeQNameId, int assocIndex)
|
||||
{
|
||||
|
@@ -337,6 +337,13 @@ public class ADMAccessControlListDAO implements AccessControlListDAO
|
||||
setFixedAcls(getNodeIdNotNull(parent), inheritFrom, null, sharedAclToReplace, changes, false, asyncCall, true);
|
||||
return changes;
|
||||
}
|
||||
|
||||
public List<AclChange> setInheritanceForChildren(NodeRef parent, Long inheritFrom, Long sharedAclToReplace, boolean asyncCall, boolean forceSharedACL)
|
||||
{
|
||||
List<AclChange> changes = new ArrayList<AclChange>();
|
||||
setFixedAcls(getNodeIdNotNull(parent), inheritFrom, null, sharedAclToReplace, changes, false, asyncCall, true, forceSharedACL);
|
||||
return changes;
|
||||
}
|
||||
|
||||
public void updateChangedAcls(NodeRef startingPoint, List<AclChange> changes)
|
||||
{
|
||||
@@ -362,6 +369,29 @@ public class ADMAccessControlListDAO implements AccessControlListDAO
|
||||
setFixedAcls(nodeId, inheritFrom, mergeFrom, sharedAclToReplace, changes, set, false, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Support to set a shared ACL on a node and all of its children
|
||||
*
|
||||
* @param nodeId
|
||||
* the parent node
|
||||
* @param inheritFrom
|
||||
* the parent node's ACL
|
||||
* @param mergeFrom
|
||||
* the shared ACL, if already known. If <code>null</code>, will be retrieved / created lazily
|
||||
* @param changes
|
||||
* the list in which to record changes
|
||||
* @param set
|
||||
* set the shared ACL on the parent ?
|
||||
* @param asyncCall
|
||||
* function may require asynchronous call depending the execution time; if time exceeds configured <code>fixedAclMaxTransactionTime</code> value,
|
||||
* recursion is stopped using propagateOnChildren parameter(set on false) and those nodes for which the method execution was not finished
|
||||
* in the classical way, will have ASPECT_PENDING_FIX_ACL, which will be used in {@link FixedAclUpdater} for later processing
|
||||
*/
|
||||
public void setFixedAcls(Long nodeId, Long inheritFrom, Long mergeFrom, Long sharedAclToReplace, List<AclChange> changes, boolean set, boolean asyncCall, boolean propagateOnChildren)
|
||||
{
|
||||
setFixedAcls(nodeId, inheritFrom, mergeFrom, sharedAclToReplace, changes, set, false, true, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Support to set a shared ACL on a node and all of its children
|
||||
*
|
||||
@@ -379,8 +409,10 @@ public class ADMAccessControlListDAO implements AccessControlListDAO
|
||||
* function may require asynchronous call depending the execution time; if time exceeds configured <code>fixedAclMaxTransactionTime</code> value,
|
||||
* recursion is stopped using propagateOnChildren parameter(set on false) and those nodes for which the method execution was not finished
|
||||
* in the classical way, will have ASPECT_PENDING_FIX_ACL, which will be used in {@link FixedAclUpdater} for later processing
|
||||
* @param forceSharedACL
|
||||
* When a child node has an unexpected ACL, force it to assume the new shared ACL instead of throwing a concurrency exception.
|
||||
*/
|
||||
public void setFixedAcls(Long nodeId, Long inheritFrom, Long mergeFrom, Long sharedAclToReplace, List<AclChange> changes, boolean set, boolean asyncCall, boolean propagateOnChildren)
|
||||
public void setFixedAcls(Long nodeId, Long inheritFrom, Long mergeFrom, Long sharedAclToReplace, List<AclChange> changes, boolean set, boolean asyncCall, boolean propagateOnChildren, boolean forceSharedACL)
|
||||
{
|
||||
if (log.isDebugEnabled())
|
||||
{
|
||||
@@ -431,14 +463,14 @@ public class ADMAccessControlListDAO implements AccessControlListDAO
|
||||
|
||||
if (acl == null)
|
||||
{
|
||||
propagateOnChildren = setFixAclPending(child.getId(), inheritFrom, mergeFrom, sharedAclToReplace, changes, false, asyncCall, propagateOnChildren);
|
||||
propagateOnChildren = setFixAclPending(child.getId(), inheritFrom, mergeFrom, sharedAclToReplace, changes, false, asyncCall, propagateOnChildren, forceSharedACL);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Still has old shared ACL or already replaced
|
||||
if(acl.equals(sharedAclToReplace) || acl.equals(mergeFrom) || acl.equals(currentAcl))
|
||||
{
|
||||
propagateOnChildren = setFixAclPending(child.getId(), inheritFrom, mergeFrom, sharedAclToReplace, changes, false, asyncCall, propagateOnChildren);
|
||||
propagateOnChildren = setFixAclPending(child.getId(), inheritFrom, mergeFrom, sharedAclToReplace, changes, false, asyncCall, propagateOnChildren, forceSharedACL);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -457,7 +489,20 @@ public class ADMAccessControlListDAO implements AccessControlListDAO
|
||||
}
|
||||
else if (dbAcl.getAclType() == ACLType.SHARED)
|
||||
{
|
||||
throw new ConcurrencyFailureException("setFixedAcls: unexpected shared acl: "+dbAcl);
|
||||
if (forceSharedACL)
|
||||
{
|
||||
log.warn("Forcing shared ACL on node: " + child.getId() + " ( "
|
||||
+ nodeDAO.getNodePair(child.getId()).getSecond() + ") - " + dbAcl);
|
||||
sharedAclToReplace = acl;
|
||||
propagateOnChildren = setFixAclPending(child.getId(), inheritFrom, mergeFrom, sharedAclToReplace,
|
||||
changes, false, asyncCall, propagateOnChildren, forceSharedACL);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ConcurrencyFailureException(
|
||||
"setFixedAcls: unexpected shared acl: " + dbAcl + " on node " + child.getId() + " ( "
|
||||
+ nodeDAO.getNodePair(child.getId()).getSecond() + ")");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -506,7 +551,7 @@ public class ADMAccessControlListDAO implements AccessControlListDAO
|
||||
*
|
||||
*/
|
||||
private boolean setFixAclPending(Long nodeId, Long inheritFrom, Long mergeFrom, Long sharedAclToReplace,
|
||||
List<AclChange> changes, boolean set, boolean asyncCall, boolean propagateOnChildren)
|
||||
List<AclChange> changes, boolean set, boolean asyncCall, boolean propagateOnChildren, boolean forceSharedACL)
|
||||
{
|
||||
// check transaction time
|
||||
long transactionStartTime = AlfrescoTransactionSupport.getTransactionStartTime();
|
||||
@@ -514,7 +559,7 @@ public class ADMAccessControlListDAO implements AccessControlListDAO
|
||||
if (transactionTime < fixedAclMaxTransactionTime)
|
||||
{
|
||||
// make regular method call if time is under max transaction configured time
|
||||
setFixedAcls(nodeId, inheritFrom, mergeFrom, sharedAclToReplace, changes, set, asyncCall, propagateOnChildren);
|
||||
setFixedAcls(nodeId, inheritFrom, mergeFrom, sharedAclToReplace, changes, set, asyncCall, propagateOnChildren, forceSharedACL);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@@ -91,6 +91,11 @@ public interface AccessControlListDAO
|
||||
*/
|
||||
public List<AclChange> setInheritanceForChildren(NodeRef parent, Long inheritFrom, Long sharedAclToReplace, boolean asyncCall);
|
||||
|
||||
/**
|
||||
* Set the inheritance on a given node and it's children. If an unexpected ACL occurs in a child, it can be overriden by setting forceSharedACL
|
||||
*/
|
||||
public List<AclChange> setInheritanceForChildren(NodeRef parent, Long inheritFrom, Long sharedAclToReplace, boolean asyncCall, boolean forceSharedACL);
|
||||
|
||||
public Long getIndirectAcl(NodeRef nodeRef);
|
||||
|
||||
public Long getInheritedAcl(NodeRef nodeRef);
|
||||
|
@@ -38,6 +38,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.batch.BatchProcessWorkProvider;
|
||||
import org.alfresco.repo.batch.BatchProcessor;
|
||||
import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorker;
|
||||
import org.alfresco.repo.domain.node.NodeDAO;
|
||||
import org.alfresco.repo.domain.node.NodeDAO.NodeRefQueryCallback;
|
||||
import org.alfresco.repo.lock.JobLockService;
|
||||
@@ -50,6 +51,7 @@ import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.security.permissions.PermissionServicePolicies;
|
||||
import org.alfresco.repo.security.permissions.PermissionServicePolicies.OnInheritPermissionsDisabled;
|
||||
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.repo.transaction.TransactionListenerAdapter;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
@@ -64,6 +66,8 @@ import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.dao.ConcurrencyFailureException;
|
||||
|
||||
/**
|
||||
* Finds nodes with ASPECT_PENDING_FIX_ACL aspect and sets fixed ACLs for them
|
||||
@@ -91,6 +95,7 @@ public class FixedAclUpdater extends TransactionListenerAdapter implements Appli
|
||||
|
||||
private int maxItemBatchSize = 100;
|
||||
private int numThreads = 4;
|
||||
private boolean forceSharedACL = false;
|
||||
|
||||
private ClassPolicyDelegate<OnInheritPermissionsDisabled> onInheritPermissionsDisabledDelegate;
|
||||
private PolicyComponent policyComponent;
|
||||
@@ -132,6 +137,11 @@ public class FixedAclUpdater extends TransactionListenerAdapter implements Appli
|
||||
this.maxItemBatchSize = maxItemBatchSize;
|
||||
}
|
||||
|
||||
public void setForceSharedACL(boolean forceSharedACL)
|
||||
{
|
||||
this.forceSharedACL = forceSharedACL;
|
||||
}
|
||||
|
||||
public void setLockTimeToLive(long lockTimeToLive)
|
||||
{
|
||||
this.lockTimeToLive = lockTimeToLive;
|
||||
@@ -182,7 +192,7 @@ public class FixedAclUpdater extends TransactionListenerAdapter implements Appli
|
||||
public List<NodeRef> execute() throws Throwable
|
||||
{
|
||||
getNodesCallback.init();
|
||||
nodeDAO.getNodesWithAspects(aspects, getNodesCallback.getMinNodeId(), null, getNodesCallback);
|
||||
nodeDAO.getNodesWithAspects(aspects, getNodesCallback.getMinNodeId(), null, true, getNodesCallback);
|
||||
getNodesCallback.done();
|
||||
|
||||
return getNodesCallback.getNodes();
|
||||
@@ -253,7 +263,7 @@ public class FixedAclUpdater extends TransactionListenerAdapter implements Appli
|
||||
{
|
||||
}
|
||||
|
||||
public void process(final NodeRef nodeRef) throws Throwable
|
||||
public void process(final NodeRef nodeRef)
|
||||
{
|
||||
RunAsWork<Void> findAndUpdateAclRunAsWork = new RunAsWork<Void>()
|
||||
{
|
||||
@@ -265,34 +275,44 @@ public class FixedAclUpdater extends TransactionListenerAdapter implements Appli
|
||||
log.debug(String.format("Processing node %s", nodeRef));
|
||||
}
|
||||
|
||||
final Long nodeId = nodeDAO.getNodePair(nodeRef).getFirst();
|
||||
|
||||
// MNT-22009 - If node was deleted and in archive store, remove the aspect and properties and do not
|
||||
// process
|
||||
if (nodeRef.getStoreRef().equals(StoreRef.STORE_REF_ARCHIVE_SPACESSTORE))
|
||||
try
|
||||
{
|
||||
final Long nodeId = nodeDAO.getNodePair(nodeRef).getFirst();
|
||||
|
||||
// MNT-22009 - If node was deleted and in archive store, remove the aspect and properties and do
|
||||
// not
|
||||
// process
|
||||
if (nodeRef.getStoreRef().equals(StoreRef.STORE_REF_ARCHIVE_SPACESSTORE))
|
||||
{
|
||||
accessControlListDAO.removePendingAclAspect(nodeId);
|
||||
return null;
|
||||
}
|
||||
|
||||
// retrieve acl properties from node
|
||||
Long inheritFrom = (Long) nodeDAO.getNodeProperty(nodeId, ContentModel.PROP_INHERIT_FROM_ACL);
|
||||
Long sharedAclToReplace = (Long) nodeDAO.getNodeProperty(nodeId, ContentModel.PROP_SHARED_ACL_TO_REPLACE);
|
||||
|
||||
// set inheritance using retrieved prop
|
||||
accessControlListDAO.setInheritanceForChildren(nodeRef, inheritFrom, sharedAclToReplace, true,
|
||||
forceSharedACL);
|
||||
|
||||
// Remove aspect
|
||||
accessControlListDAO.removePendingAclAspect(nodeId);
|
||||
return null;
|
||||
|
||||
if (!policyIgnoreUtil.ignorePolicy(nodeRef))
|
||||
{
|
||||
boolean transformedToAsyncOperation = toBoolean((Boolean) AlfrescoTransactionSupport
|
||||
.getResource(FixedAclUpdater.FIXED_ACL_ASYNC_REQUIRED_KEY));
|
||||
|
||||
OnInheritPermissionsDisabled onInheritPermissionsDisabledPolicy = onInheritPermissionsDisabledDelegate
|
||||
.get(ContentModel.TYPE_BASE);
|
||||
onInheritPermissionsDisabledPolicy.onInheritPermissionsDisabled(nodeRef, transformedToAsyncOperation);
|
||||
}
|
||||
}
|
||||
|
||||
// retrieve acl properties from node
|
||||
Long inheritFrom = (Long) nodeDAO.getNodeProperty(nodeId, ContentModel.PROP_INHERIT_FROM_ACL);
|
||||
Long sharedAclToReplace = (Long) nodeDAO.getNodeProperty(nodeId, ContentModel.PROP_SHARED_ACL_TO_REPLACE);
|
||||
|
||||
// set inheritance using retrieved prop
|
||||
accessControlListDAO.setInheritanceForChildren(nodeRef, inheritFrom, sharedAclToReplace, true);
|
||||
|
||||
// Remove aspect
|
||||
accessControlListDAO.removePendingAclAspect(nodeId);
|
||||
|
||||
if (!policyIgnoreUtil.ignorePolicy(nodeRef))
|
||||
catch (Exception e)
|
||||
{
|
||||
boolean transformedToAsyncOperation = toBoolean(
|
||||
(Boolean) AlfrescoTransactionSupport.getResource(FixedAclUpdater.FIXED_ACL_ASYNC_REQUIRED_KEY));
|
||||
|
||||
OnInheritPermissionsDisabled onInheritPermissionsDisabledPolicy = onInheritPermissionsDisabledDelegate
|
||||
.get(ContentModel.TYPE_BASE);
|
||||
onInheritPermissionsDisabledPolicy.onInheritPermissionsDisabled(nodeRef, transformedToAsyncOperation);
|
||||
log.error("Job could not process pending ACL node " + nodeRef + ": " + e);
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled())
|
||||
@@ -308,6 +328,7 @@ public class FixedAclUpdater extends TransactionListenerAdapter implements Appli
|
||||
AuthenticationUtil.runAs(findAndUpdateAclRunAsWork, AuthenticationUtil.getSystemUserName());
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
private class GetNodesWithAspectCallback implements NodeRefQueryCallback
|
||||
{
|
||||
|
@@ -69,19 +69,19 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
public static final String PROPERTY_READ_ONLY = "system.delete_not_exists.read_only";
|
||||
public static final String PROPERTY_TIMEOUT_SECONDS = "system.delete_not_exists.timeout_seconds";
|
||||
|
||||
private Connection connection;
|
||||
protected Connection connection;
|
||||
private String sql;
|
||||
private int line;
|
||||
private File scriptFile;
|
||||
private Properties globalProperties;
|
||||
|
||||
private boolean readOnly;
|
||||
private int deleteBatchSize;
|
||||
private int batchSize;
|
||||
protected boolean readOnly;
|
||||
protected int deleteBatchSize;
|
||||
protected int batchSize;
|
||||
private long timeoutSec;
|
||||
|
||||
private long deletedCount;
|
||||
private Date startTime;
|
||||
protected long deletedCount;
|
||||
protected Date startTime;
|
||||
|
||||
public DeleteNotExistsExecutor(Connection connection, String sql, int line, File scriptFile, Properties globalProperties)
|
||||
{
|
||||
@@ -164,7 +164,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
}
|
||||
}
|
||||
|
||||
private void process(Pair<String, String>[] tableColumn, Long[] tableUpperLimits, String[] optionalWhereClauses) throws SQLException
|
||||
protected void process(Pair<String, String>[] tableColumn, Long[] tableUpperLimits, String[] optionalWhereClauses) throws SQLException
|
||||
{
|
||||
// The approach is to fetch ordered row ids from all referencer/secondary (e.g.
|
||||
// alf_audit_app, alf_audit_entry, alf_prop_unique_ctx) tables and
|
||||
@@ -190,6 +190,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
try
|
||||
{
|
||||
connection.setAutoCommit(false);
|
||||
|
||||
primaryPrepStmt = connection.prepareStatement(createPreparedSelectStatement(primaryTableName, primaryColumnName, primaryWhereClause));
|
||||
primaryPrepStmt.setFetchSize(batchSize);
|
||||
primaryPrepStmt.setLong(1, primaryId);
|
||||
@@ -264,7 +265,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isTimeoutExceeded()
|
||||
protected boolean isTimeoutExceeded()
|
||||
{
|
||||
if (timeoutSec <= 0)
|
||||
{
|
||||
@@ -275,7 +276,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
return (now.getTime() > startTime.getTime() + (timeoutSec * 1000));
|
||||
}
|
||||
|
||||
private Long processPrimaryTableResultSet(PreparedStatement primaryPrepStmt, PreparedStatement[] secondaryPrepStmts, PreparedStatement deletePrepStmt, Set<Long> deleteIds, String primaryTableName,
|
||||
protected Long processPrimaryTableResultSet(PreparedStatement primaryPrepStmt, PreparedStatement[] secondaryPrepStmts, PreparedStatement deletePrepStmt, Set<Long> deleteIds, String primaryTableName,
|
||||
String primaryColumnName, Pair<String, String>[] tableColumn) throws SQLException
|
||||
{
|
||||
int rowsProcessed = 0;
|
||||
@@ -336,7 +337,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
return primaryId;
|
||||
}
|
||||
|
||||
private void deleteFromPrimaryTable(PreparedStatement deletePrepStmt, Set<Long> deleteIds, String primaryTableName) throws SQLException
|
||||
protected void deleteFromPrimaryTable(PreparedStatement deletePrepStmt, Set<Long> deleteIds, String primaryTableName) throws SQLException
|
||||
{
|
||||
int deletedBatchCount = deleteIds.size();
|
||||
if (!readOnly && !deleteIds.isEmpty())
|
||||
@@ -425,7 +426,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
return batchUpperLimit;
|
||||
}
|
||||
|
||||
private boolean isLess(Long primaryId, Long[] secondaryIds)
|
||||
protected boolean isLess(Long primaryId, Long[] secondaryIds)
|
||||
{
|
||||
for (Long secondaryId : secondaryIds)
|
||||
{
|
||||
@@ -447,8 +448,8 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
|
||||
return quotedString.replace("\"", "");
|
||||
}
|
||||
|
||||
private String createPreparedSelectStatement(String tableName, String columnName, String whereClause)
|
||||
|
||||
protected String createPreparedSelectStatement(String tableName, String columnName, String whereClause)
|
||||
{
|
||||
StringBuilder sqlBuilder = new StringBuilder("SELECT " + columnName + " FROM " + tableName + " WHERE ");
|
||||
|
||||
@@ -461,7 +462,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
return sqlBuilder.toString();
|
||||
}
|
||||
|
||||
private String createPreparedDeleteStatement(String tableName, String idColumnName, int deleteBatchSize, String whereClause)
|
||||
protected String createPreparedDeleteStatement(String tableName, String idColumnName, int deleteBatchSize, String whereClause)
|
||||
{
|
||||
StringBuilder stmtBuilder = new StringBuilder("DELETE FROM " + tableName + " WHERE ");
|
||||
|
||||
@@ -515,7 +516,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
}
|
||||
}
|
||||
|
||||
private Long getColumnValueById(ResultSet resultSet, String columnId) throws SQLException
|
||||
protected Long getColumnValueById(ResultSet resultSet, String columnId) throws SQLException
|
||||
{
|
||||
Long columnValue = null;
|
||||
if (resultSet != null && resultSet.next())
|
||||
@@ -526,7 +527,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
return columnValue;
|
||||
}
|
||||
|
||||
private ResultSet[] getSecondaryResultSets(PreparedStatement[] preparedStatements) throws SQLException
|
||||
protected ResultSet[] getSecondaryResultSets(PreparedStatement[] preparedStatements) throws SQLException
|
||||
{
|
||||
ResultSet[] secondaryResultSets = new ResultSet[preparedStatements.length];
|
||||
for (int i = 1; i < preparedStatements.length; i++)
|
||||
@@ -540,7 +541,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
return secondaryResultSets;
|
||||
}
|
||||
|
||||
private Long[] getSecondaryIds(ResultSet[] secondaryResultSets, Pair<String, String>[] tableColumn) throws SQLException
|
||||
protected Long[] getSecondaryIds(ResultSet[] secondaryResultSets, Pair<String, String>[] tableColumn) throws SQLException
|
||||
{
|
||||
Long[] secondaryIds = new Long[tableColumn.length];
|
||||
|
||||
@@ -571,7 +572,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
}
|
||||
}
|
||||
|
||||
private void closeQuietly(Statement statement)
|
||||
protected void closeQuietly(Statement statement)
|
||||
{
|
||||
if (statement != null)
|
||||
{
|
||||
@@ -586,7 +587,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
}
|
||||
}
|
||||
|
||||
private void closeQuietly(Statement[] statements)
|
||||
protected void closeQuietly(Statement[] statements)
|
||||
{
|
||||
if (statements != null)
|
||||
{
|
||||
@@ -597,7 +598,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
}
|
||||
}
|
||||
|
||||
private void closeQuietly(ResultSet resultSet)
|
||||
protected void closeQuietly(ResultSet resultSet)
|
||||
{
|
||||
if (resultSet != null)
|
||||
{
|
||||
@@ -612,7 +613,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
}
|
||||
}
|
||||
|
||||
private void closeQuietly(ResultSet[] resultSets)
|
||||
protected void closeQuietly(ResultSet[] resultSets)
|
||||
{
|
||||
if (resultSets != null)
|
||||
{
|
||||
|
@@ -0,0 +1,278 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.domain.schema.script;
|
||||
|
||||
import org.alfresco.util.Pair;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.io.File;
|
||||
import java.sql.Connection;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Extends <code>{@link DeleteNotExistsExecutor}</code> to cope with MySQL
|
||||
* specific fetch size limitation and restrictions.
|
||||
*/
|
||||
public class MySQLDeleteNotExistsExecutor extends DeleteNotExistsExecutor
|
||||
{
|
||||
private static final Log logger = LogFactory.getLog(MySQLDeleteNotExistsExecutor.class);
|
||||
|
||||
private final DataSource dataSource;
|
||||
|
||||
public MySQLDeleteNotExistsExecutor(Connection connection, String sql, int line, File scriptFile, Properties globalProperties, DataSource dataSource)
|
||||
{
|
||||
super(connection, sql, line, scriptFile, globalProperties);
|
||||
this.dataSource = dataSource;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process(Pair<String, String>[] tableColumn, Long[] tableUpperLimits, String[] optionalWhereClauses) throws SQLException
|
||||
{
|
||||
// The approach is to fetch ordered row ids from all referencer/secondary (e.g.
|
||||
// alf_audit_app, alf_audit_entry, alf_prop_unique_ctx) tables and
|
||||
// referenced/primary table (e.g. alf_prop_root) concurrently, so that it is
|
||||
// possible skip over id gaps efficiently while at the same time being able to
|
||||
// work out which ids are obsolete and delete them in batches.
|
||||
|
||||
// The algorithm can be further improved by iterating over the rows in descending order.
|
||||
// This is due to the fact that older data should be more stable in time.
|
||||
|
||||
String primaryTableName = tableColumn[0].getFirst();
|
||||
String primaryColumnName = tableColumn[0].getSecond();
|
||||
String primaryWhereClause = optionalWhereClauses[0];
|
||||
|
||||
Long primaryId = 0L;
|
||||
|
||||
PreparedStatement primaryPrepStmt = null;
|
||||
PreparedStatement[] secondaryPrepStmts = null;
|
||||
PreparedStatement deletePrepStmt = null;
|
||||
Set<Long> deleteIds = new HashSet<>();
|
||||
|
||||
deletedCount = 0L;
|
||||
startTime = new Date();
|
||||
|
||||
long defaultOffset = 0L;
|
||||
try
|
||||
{
|
||||
connection.setAutoCommit(false);
|
||||
|
||||
primaryPrepStmt = connection.prepareStatement(createLimitPreparedSelectStatement(primaryTableName, primaryColumnName, primaryWhereClause));
|
||||
primaryPrepStmt.setLong(1, primaryId);
|
||||
primaryPrepStmt.setLong(2, tableUpperLimits[0]);
|
||||
primaryPrepStmt.setInt(3, batchSize);
|
||||
primaryPrepStmt.setLong(4, defaultOffset);
|
||||
|
||||
boolean hasResults = primaryPrepStmt.execute();
|
||||
|
||||
if (hasResults)
|
||||
{
|
||||
secondaryPrepStmts = new PreparedStatement[tableColumn.length];
|
||||
for (int i = 1; i < tableColumn.length; i++)
|
||||
{
|
||||
PreparedStatement secStmt = connection.prepareStatement(createLimitPreparedSelectStatement(tableColumn[i].getFirst(), tableColumn[i].getSecond(), optionalWhereClauses[i]));
|
||||
secStmt.setLong(1, primaryId);
|
||||
secStmt.setLong(2, tableUpperLimits[i]);
|
||||
secStmt.setInt(3, batchSize);
|
||||
secStmt.setLong(4, defaultOffset);
|
||||
|
||||
secondaryPrepStmts[i] = secStmt;
|
||||
}
|
||||
|
||||
deletePrepStmt = connection.prepareStatement(createPreparedDeleteStatement(primaryTableName, primaryColumnName, deleteBatchSize, primaryWhereClause));
|
||||
|
||||
// Timeout is only checked at each bach start.
|
||||
// It can be further refined by being verified at each primary row processing.
|
||||
while (hasResults && !isTimeoutExceeded())
|
||||
{
|
||||
// Process batch
|
||||
primaryId = processPrimaryTableResultSet(primaryPrepStmt, secondaryPrepStmts, deletePrepStmt, deleteIds, primaryTableName, primaryColumnName, tableColumn);
|
||||
connection.commit();
|
||||
|
||||
if (primaryId == null)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
// Prepare for next batch
|
||||
primaryPrepStmt.setLong(1, primaryId);
|
||||
primaryPrepStmt.setLong(2, tableUpperLimits[0]);
|
||||
primaryPrepStmt.setInt(3, batchSize);
|
||||
primaryPrepStmt.setLong(4, defaultOffset);
|
||||
|
||||
for (int i = 1; i < tableColumn.length; i++)
|
||||
{
|
||||
PreparedStatement secStmt = secondaryPrepStmts[i];
|
||||
secStmt.setLong(1, primaryId);
|
||||
secStmt.setLong(2, tableUpperLimits[i]);
|
||||
secStmt.setInt(3, batchSize);
|
||||
secStmt.setLong(4, defaultOffset);
|
||||
}
|
||||
|
||||
hasResults = primaryPrepStmt.execute();
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we have any more ids to delete
|
||||
if (!deleteIds.isEmpty())
|
||||
{
|
||||
deleteFromPrimaryTable(deletePrepStmt, deleteIds, primaryTableName);
|
||||
connection.commit();
|
||||
}
|
||||
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
String msg = ((readOnly) ? "Script would have" : "Script") + " deleted a total of " + deletedCount + " items from table " + primaryTableName + ".";
|
||||
logger.debug(msg);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
closeQuietly(deletePrepStmt);
|
||||
closeQuietly(secondaryPrepStmts);
|
||||
closeQuietly(primaryPrepStmt);
|
||||
|
||||
connection.setAutoCommit(true);
|
||||
}
|
||||
}
|
||||
|
||||
protected Long processPrimaryTableResultSet(PreparedStatement primaryPrepStmt, PreparedStatement[] secondaryPrepStmts, PreparedStatement deletePrepStmt, Set<Long> deleteIds,
|
||||
String primaryTableName, String primaryColumnName, Pair<String, String>[] tableColumn) throws SQLException
|
||||
{
|
||||
int rowsProcessed = 0;
|
||||
Long primaryId = null;
|
||||
ResultSet[] secondaryResultSets = null;
|
||||
try (ResultSet resultSet = primaryPrepStmt.getResultSet())
|
||||
{
|
||||
secondaryResultSets = getSecondaryResultSets(secondaryPrepStmts);
|
||||
Long[] secondaryIds = getSecondaryIds(secondaryResultSets, tableColumn);
|
||||
|
||||
// Create and populate secondary tables offsets
|
||||
Long[] secondaryOffsets = new Long[tableColumn.length];
|
||||
for (int i = 1; i < tableColumn.length; i++)
|
||||
{
|
||||
secondaryOffsets[i] = 0L;
|
||||
}
|
||||
|
||||
while (resultSet.next())
|
||||
{
|
||||
++rowsProcessed;
|
||||
primaryId = resultSet.getLong(primaryColumnName);
|
||||
|
||||
while (isLess(primaryId, secondaryIds))
|
||||
{
|
||||
deleteIds.add(primaryId);
|
||||
|
||||
if (deleteIds.size() == deleteBatchSize)
|
||||
{
|
||||
deleteFromPrimaryTable(deletePrepStmt, deleteIds, primaryTableName);
|
||||
connection.commit();
|
||||
}
|
||||
|
||||
if (!resultSet.next())
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
++rowsProcessed;
|
||||
primaryId = resultSet.getLong(primaryColumnName);
|
||||
}
|
||||
|
||||
if (logger.isTraceEnabled())
|
||||
{
|
||||
logger.trace("RowsProcessed " + rowsProcessed + " from primary table " + primaryTableName);
|
||||
}
|
||||
|
||||
updateSecondaryIds(primaryId, secondaryIds, secondaryPrepStmts, secondaryOffsets, secondaryResultSets, tableColumn);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
closeQuietly(secondaryResultSets);
|
||||
}
|
||||
|
||||
return primaryId;
|
||||
}
|
||||
|
||||
private void updateSecondaryIds(Long primaryId, Long[] secondaryIds, PreparedStatement[] secondaryPrepStmts, Long[] secondaryOffsets, ResultSet[] secondaryResultSets,
|
||||
Pair<String, String>[] tableColumn) throws SQLException
|
||||
{
|
||||
for (int i = 1; i < tableColumn.length; i++)
|
||||
{
|
||||
Long secondaryId = secondaryIds[i];
|
||||
while (secondaryId != null && primaryId >= secondaryId)
|
||||
{
|
||||
ResultSet resultSet = secondaryResultSets[i];
|
||||
String columnId = tableColumn[i].getSecond();
|
||||
|
||||
secondaryId = getColumnValueById(resultSet, columnId);
|
||||
|
||||
// Check if we reach the end of the first page
|
||||
if (secondaryId == null)
|
||||
{
|
||||
// Close the previous result set
|
||||
closeQuietly(resultSet);
|
||||
|
||||
// Set to use the next page
|
||||
long offset = secondaryOffsets[i] + batchSize;
|
||||
secondaryOffsets[i] = offset;
|
||||
|
||||
PreparedStatement secStmt = secondaryPrepStmts[i];
|
||||
secStmt.setLong(4, offset);
|
||||
|
||||
// Check if any results were found
|
||||
boolean secHasResults = secStmt.execute();
|
||||
secondaryResultSets[i] = secHasResults ? secStmt.getResultSet() : null;
|
||||
|
||||
// Try again to get the next secondary id
|
||||
secondaryId = getColumnValueById(secondaryResultSets[i], columnId);
|
||||
}
|
||||
|
||||
secondaryIds[i] = secondaryId;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private String createLimitPreparedSelectStatement(String tableName, String columnName, String whereClause)
|
||||
{
|
||||
StringBuilder sqlBuilder = new StringBuilder("SELECT " + columnName + " FROM " + tableName + " WHERE ");
|
||||
|
||||
if (whereClause != null && !whereClause.isEmpty())
|
||||
{
|
||||
sqlBuilder.append(whereClause + " AND ");
|
||||
}
|
||||
|
||||
sqlBuilder.append(columnName + " > ? AND " + columnName + " <= ? ORDER BY " + columnName + " ASC LIMIT ? OFFSET ?");
|
||||
return sqlBuilder.toString();
|
||||
}
|
||||
}
|
@@ -350,7 +350,7 @@ public class ScriptExecutorImpl implements ScriptExecutor
|
||||
}
|
||||
else if (sql.startsWith("--DELETE_NOT_EXISTS"))
|
||||
{
|
||||
DeleteNotExistsExecutor deleteNotExists = new DeleteNotExistsExecutor(connection, sql, line, scriptFile, globalProperties);
|
||||
DeleteNotExistsExecutor deleteNotExists = createDeleteNotExistsExecutor(dialect, connection, sql, line, scriptFile);
|
||||
deleteNotExists.execute();
|
||||
|
||||
// Reset
|
||||
@@ -537,7 +537,17 @@ public class ScriptExecutorImpl implements ScriptExecutor
|
||||
try { scriptInputStream.close(); } catch (Throwable e) {}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private DeleteNotExistsExecutor createDeleteNotExistsExecutor(Dialect dialect, Connection connection, String sql, int line, File scriptFile)
|
||||
{
|
||||
if (dialect instanceof MySQLInnoDBDialect)
|
||||
{
|
||||
return new MySQLDeleteNotExistsExecutor(connection, sql, line, scriptFile, globalProperties, dataSource);
|
||||
}
|
||||
|
||||
return new DeleteNotExistsExecutor(connection, sql, line, scriptFile, globalProperties);
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the given SQL statement, absorbing exceptions that we expect during
|
||||
* schema creation or upgrade.
|
||||
|
@@ -54,7 +54,6 @@ import org.alfresco.repo.policy.JavaBehaviour;
|
||||
import org.alfresco.repo.policy.PolicyComponent;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.repository.AssociationRef;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
@@ -90,11 +89,11 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
protected DictionaryService dictionaryService;
|
||||
private DescriptorService descriptorService;
|
||||
private EventFilterRegistry eventFilterRegistry;
|
||||
private Event2MessageProducer event2MessageProducer;
|
||||
private TransactionService transactionService;
|
||||
private PersonService personService;
|
||||
protected NodeResourceHelper nodeResourceHelper;
|
||||
|
||||
private EventGeneratorQueue eventGeneratorQueue;
|
||||
private NodeTypeFilter nodeTypeFilter;
|
||||
private ChildAssociationTypeFilter childAssociationTypeFilter;
|
||||
private EventUserFilter userFilter;
|
||||
@@ -109,10 +108,10 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
PropertyCheck.mandatory(this, "dictionaryService", dictionaryService);
|
||||
PropertyCheck.mandatory(this, "descriptorService", descriptorService);
|
||||
PropertyCheck.mandatory(this, "eventFilterRegistry", eventFilterRegistry);
|
||||
PropertyCheck.mandatory(this, "event2MessageProducer", event2MessageProducer);
|
||||
PropertyCheck.mandatory(this, "transactionService", transactionService);
|
||||
PropertyCheck.mandatory(this, "personService", personService);
|
||||
PropertyCheck.mandatory(this, "nodeResourceHelper", nodeResourceHelper);
|
||||
PropertyCheck.mandatory(this, "eventGeneratorQueue", eventGeneratorQueue);
|
||||
|
||||
this.nodeTypeFilter = eventFilterRegistry.getNodeTypeFilter();
|
||||
this.childAssociationTypeFilter = eventFilterRegistry.getChildAssociationTypeFilter();
|
||||
@@ -177,12 +176,6 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
this.eventFilterRegistry = eventFilterRegistry;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void setEvent2MessageProducer(Event2MessageProducer event2MessageProducer)
|
||||
{
|
||||
this.event2MessageProducer = event2MessageProducer;
|
||||
}
|
||||
|
||||
public void setTransactionService(TransactionService transactionService)
|
||||
{
|
||||
this.transactionService = transactionService;
|
||||
@@ -198,6 +191,11 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
this.nodeResourceHelper = nodeResourceHelper;
|
||||
}
|
||||
|
||||
public void setEventGeneratorQueue(EventGeneratorQueue eventGeneratorQueue)
|
||||
{
|
||||
this.eventGeneratorQueue = eventGeneratorQueue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCreateNode(ChildAssociationRef childAssocRef)
|
||||
{
|
||||
@@ -428,20 +426,26 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
|
||||
protected void sendEvent(NodeRef nodeRef, EventConsolidator consolidator)
|
||||
{
|
||||
EventInfo eventInfo = getEventInfo(AuthenticationUtil.getFullyAuthenticatedUser());
|
||||
eventGeneratorQueue.accept(()-> createEvent(nodeRef, consolidator, eventInfo));
|
||||
}
|
||||
|
||||
private RepoEvent<?> createEvent(NodeRef nodeRef, EventConsolidator consolidator, EventInfo eventInfo)
|
||||
{
|
||||
String user = eventInfo.getPrincipal();
|
||||
|
||||
if (consolidator.isTemporaryNode())
|
||||
{
|
||||
if (LOGGER.isTraceEnabled())
|
||||
{
|
||||
LOGGER.trace("Ignoring temporary node: " + nodeRef);
|
||||
}
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
final String user = AuthenticationUtil.getFullyAuthenticatedUser();
|
||||
// Get the repo event before the filtering,
|
||||
// so we can take the latest node info into account
|
||||
final RepoEvent<?> event = consolidator.getRepoEvent(getEventInfo(user));
|
||||
|
||||
final RepoEvent<?> event = consolidator.getRepoEvent(eventInfo);
|
||||
|
||||
final QName nodeType = consolidator.getNodeType();
|
||||
if (isFiltered(nodeType, user))
|
||||
@@ -452,7 +456,7 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
+ ((nodeType == null) ? "Unknown' " : nodeType.toPrefixString())
|
||||
+ "' created by: " + user);
|
||||
}
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (event.getType().equals(EventType.NODE_UPDATED.getType()) && consolidator.isResourceBeforeAllFieldsNull())
|
||||
@@ -461,27 +465,34 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
{
|
||||
LOGGER.trace("Ignoring node updated event as no fields have been updated: " + nodeRef);
|
||||
}
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
logAndSendEvent(event, consolidator.getEventTypes());
|
||||
logEvent(event, consolidator.getEventTypes());
|
||||
return event;
|
||||
}
|
||||
|
||||
protected void sendEvent(ChildAssociationRef childAssociationRef, ChildAssociationEventConsolidator consolidator)
|
||||
{
|
||||
EventInfo eventInfo = getEventInfo(AuthenticationUtil.getFullyAuthenticatedUser());
|
||||
eventGeneratorQueue.accept(()-> createEvent(eventInfo, childAssociationRef, consolidator));
|
||||
}
|
||||
|
||||
private RepoEvent<?> createEvent(EventInfo eventInfo, ChildAssociationRef childAssociationRef, ChildAssociationEventConsolidator consolidator)
|
||||
{
|
||||
String user = eventInfo.getPrincipal();
|
||||
if (consolidator.isTemporaryChildAssociation())
|
||||
{
|
||||
if (LOGGER.isTraceEnabled())
|
||||
{
|
||||
LOGGER.trace("Ignoring temporary child association: " + childAssociationRef);
|
||||
}
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
final String user = AuthenticationUtil.getFullyAuthenticatedUser();
|
||||
// Get the repo event before the filtering,
|
||||
// so we can take the latest association info into account
|
||||
final RepoEvent<?> event = consolidator.getRepoEvent(getEventInfo(user));
|
||||
final RepoEvent<?> event = consolidator.getRepoEvent(eventInfo);
|
||||
|
||||
final QName childAssocType = consolidator.getChildAssocType();
|
||||
if (isFilteredChildAssociation(childAssocType, user))
|
||||
@@ -492,7 +503,7 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
+ ((childAssocType == null) ? "Unknown' " : childAssocType.toPrefixString())
|
||||
+ "' created by: " + user);
|
||||
}
|
||||
return;
|
||||
return null;
|
||||
} else if (childAssociationRef.isPrimary())
|
||||
{
|
||||
if (LOGGER.isTraceEnabled())
|
||||
@@ -501,13 +512,20 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
+ ((childAssocType == null) ? "Unknown' " : childAssocType.toPrefixString())
|
||||
+ "' created by: " + user);
|
||||
}
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
logAndSendEvent(event, consolidator.getEventTypes());
|
||||
logEvent(event, consolidator.getEventTypes());
|
||||
return event;
|
||||
}
|
||||
|
||||
protected void sendEvent(AssociationRef peerAssociationRef, PeerAssociationEventConsolidator consolidator)
|
||||
{
|
||||
EventInfo eventInfo = getEventInfo(AuthenticationUtil.getFullyAuthenticatedUser());
|
||||
eventGeneratorQueue.accept(()-> createEvent(eventInfo, peerAssociationRef, consolidator));
|
||||
}
|
||||
|
||||
private RepoEvent<?> createEvent(EventInfo eventInfo, AssociationRef peerAssociationRef, PeerAssociationEventConsolidator consolidator)
|
||||
{
|
||||
if (consolidator.isTemporaryPeerAssociation())
|
||||
{
|
||||
@@ -515,30 +533,21 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
{
|
||||
LOGGER.trace("Ignoring temporary peer association: " + peerAssociationRef);
|
||||
}
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
final String user = AuthenticationUtil.getFullyAuthenticatedUser();
|
||||
// Get the repo event before the filtering,
|
||||
// so we can take the latest association info into account
|
||||
final RepoEvent<?> event = consolidator.getRepoEvent(getEventInfo(user));
|
||||
|
||||
logAndSendEvent(event, consolidator.getEventTypes());
|
||||
RepoEvent<?> event = consolidator.getRepoEvent(eventInfo);
|
||||
logEvent(event, consolidator.getEventTypes());
|
||||
return event;
|
||||
}
|
||||
|
||||
protected void logAndSendEvent(RepoEvent<?> event, Deque<EventType> listOfEvents)
|
||||
private void logEvent(RepoEvent<?> event, Deque<EventType> listOfEvents)
|
||||
{
|
||||
if (LOGGER.isTraceEnabled())
|
||||
{
|
||||
LOGGER.trace("List of Events:" + listOfEvents);
|
||||
LOGGER.trace("Sending event:" + event);
|
||||
}
|
||||
// Need to execute this in another read txn because Camel expects it
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction((RetryingTransactionCallback<Void>) () -> {
|
||||
event2MessageProducer.send(event);
|
||||
|
||||
return null;
|
||||
}, true, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -0,0 +1,179 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.event2;
|
||||
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.alfresco.repo.event.v1.model.RepoEvent;
|
||||
import org.alfresco.util.PropertyCheck;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
|
||||
/*
|
||||
* This queue allows to create asynchronously the RepoEvent offloading the work to a ThreadPool but
|
||||
* at the same time it preserves the order of the events
|
||||
*/
|
||||
public class EventGeneratorQueue implements InitializingBean
|
||||
{
|
||||
protected static final Log LOGGER = LogFactory.getLog(EventGeneratorQueue.class);
|
||||
|
||||
protected Executor enqueueThreadPoolExecutor;
|
||||
protected Executor dequeueThreadPoolExecutor;
|
||||
protected Event2MessageProducer event2MessageProducer;
|
||||
protected BlockingQueue<EventInMaking> queue = new LinkedBlockingQueue<>();
|
||||
protected Runnable listener = createListener();
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception
|
||||
{
|
||||
PropertyCheck.mandatory(this, "enqueueThreadPoolExecutor", enqueueThreadPoolExecutor);
|
||||
PropertyCheck.mandatory(this, "dequeueThreadPoolExecutor", dequeueThreadPoolExecutor);
|
||||
PropertyCheck.mandatory(this, "event2MessageProducer", event2MessageProducer);
|
||||
}
|
||||
|
||||
public void setEvent2MessageProducer(Event2MessageProducer event2MessageProducer)
|
||||
{
|
||||
this.event2MessageProducer = event2MessageProducer;
|
||||
}
|
||||
|
||||
public void setEnqueueThreadPoolExecutor(Executor enqueueThreadPoolExecutor)
|
||||
{
|
||||
this.enqueueThreadPoolExecutor = enqueueThreadPoolExecutor;
|
||||
}
|
||||
|
||||
public void setDequeueThreadPoolExecutor(Executor dequeueThreadPoolExecutor)
|
||||
{
|
||||
this.dequeueThreadPoolExecutor = dequeueThreadPoolExecutor;
|
||||
dequeueThreadPoolExecutor.execute(listener);
|
||||
}
|
||||
|
||||
/**
|
||||
* Procedure to enqueue the callback functions that creates an event.
|
||||
* @param maker Callback function that creates an event.
|
||||
*/
|
||||
public void accept(Callable<RepoEvent<?>> maker)
|
||||
{
|
||||
EventInMaking eventInMaking = new EventInMaking(maker);
|
||||
queue.offer(eventInMaking);
|
||||
enqueueThreadPoolExecutor.execute(() -> {
|
||||
try
|
||||
{
|
||||
eventInMaking.make();
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
LOGGER.error("Unexpected error while enqueuing maker function for repository event" + e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create listener task in charge of dequeuing and sending events ready to be sent.
|
||||
* @return The task in charge of dequeuing and sending events ready to be sent.
|
||||
*/
|
||||
private Runnable createListener()
|
||||
{
|
||||
return new Runnable()
|
||||
{
|
||||
@Override
|
||||
public void run()
|
||||
{
|
||||
try
|
||||
{
|
||||
while (!Thread.interrupted())
|
||||
{
|
||||
try
|
||||
{
|
||||
EventInMaking eventInMaking = queue.take();
|
||||
RepoEvent<?> event = eventInMaking.getEventWhenReady();
|
||||
if (event != null)
|
||||
{
|
||||
event2MessageProducer.send(event);
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
LOGGER.error("Unexpected error while dequeuing and sending repository event" + e);
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
LOGGER.warn("Unexpected: rescheduling the listener thread.");
|
||||
dequeueThreadPoolExecutor.execute(listener);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* Simple class that makes events and allows to retrieve them when ready
|
||||
*/
|
||||
private static class EventInMaking
|
||||
{
|
||||
private Callable<RepoEvent<?>> maker;
|
||||
private volatile RepoEvent<?> event;
|
||||
private CountDownLatch latch;
|
||||
|
||||
public EventInMaking(Callable<RepoEvent<?>> maker)
|
||||
{
|
||||
this.maker = maker;
|
||||
this.latch = new CountDownLatch(1);
|
||||
}
|
||||
|
||||
public void make() throws Exception
|
||||
{
|
||||
try
|
||||
{
|
||||
event = maker.call();
|
||||
}
|
||||
finally
|
||||
{
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
|
||||
public RepoEvent<?> getEventWhenReady() throws InterruptedException
|
||||
{
|
||||
latch.await(30, TimeUnit.SECONDS);
|
||||
return event;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString()
|
||||
{
|
||||
return maker.toString();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -1,28 +1,28 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2022 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.exporter;
|
||||
|
||||
import java.io.IOException;
|
||||
@@ -39,6 +39,7 @@ import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.node.MLPropertyInterceptor;
|
||||
@@ -77,6 +78,7 @@ import org.alfresco.service.descriptor.DescriptorService;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.namespace.RegexQNamePattern;
|
||||
import org.apache.commons.lang3.math.NumberUtils;
|
||||
import org.dom4j.io.OutputFormat;
|
||||
import org.dom4j.io.XMLWriter;
|
||||
import org.springframework.extensions.surf.util.ParameterCheck;
|
||||
@@ -99,6 +101,8 @@ public class ExporterComponent
|
||||
private DescriptorService descriptorService;
|
||||
private AuthenticationService authenticationService;
|
||||
private PermissionService permissionService;
|
||||
|
||||
private String exportChunkSize;
|
||||
|
||||
|
||||
/** Indent Size */
|
||||
@@ -178,6 +182,14 @@ public class ExporterComponent
|
||||
{
|
||||
this.exportSecondaryNodes = exportSecondaryNodes;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param exportChunkSize the exportChunkSize
|
||||
*/
|
||||
public void setExportChunkSize(String exportChunkSize)
|
||||
{
|
||||
this.exportChunkSize = exportChunkSize;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.alfresco.service.cmr.view.ExporterService#exportView(java.io.OutputStream, org.alfresco.service.cmr.view.ExporterCrawlerParameters, org.alfresco.service.cmr.view.Exporter)
|
||||
@@ -943,28 +955,23 @@ public class ExporterComponent
|
||||
try
|
||||
{
|
||||
// Current strategy is to determine if node is a child of the root exported node
|
||||
for (NodeRef exportRoot : context.getExportList())
|
||||
if (context.getExportMap() != null)
|
||||
{
|
||||
if (nodeRef.equals(exportRoot) && parameters.isCrawlSelf() == true)
|
||||
for (NodeRef[] listNodeRef : context.getExportMap().values())
|
||||
{
|
||||
// node to export is the root export node (and root is to be exported)
|
||||
isWithin = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
// locate export root in primary parent path of node
|
||||
Path nodePath = nodeService.getPath(nodeRef);
|
||||
for (int i = nodePath.size() - 1; i >= 0; i--)
|
||||
for (NodeRef exportRoot : listNodeRef)
|
||||
{
|
||||
Path.ChildAssocElement pathElement = (Path.ChildAssocElement) nodePath.get(i);
|
||||
if (pathElement.getRef().getChildRef().equals(exportRoot))
|
||||
{
|
||||
isWithin = true;
|
||||
break;
|
||||
}
|
||||
isWithin = checkIsWithin(nodeRef, exportRoot, parameters);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (NodeRef exportRoot : context.getExportList())
|
||||
{
|
||||
isWithin = checkIsWithin(nodeRef, exportRoot, parameters);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (AccessDeniedException accessErr)
|
||||
{
|
||||
@@ -979,6 +986,28 @@ public class ExporterComponent
|
||||
}
|
||||
}
|
||||
|
||||
private boolean checkIsWithin(NodeRef nodeRef, NodeRef exportRoot, ExporterCrawlerParameters parameters){
|
||||
if (nodeRef.equals(exportRoot) && parameters.isCrawlSelf() == true)
|
||||
{
|
||||
// node to export is the root export node (and root is to be exported)
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
// locate export root in primary parent path of node
|
||||
Path nodePath = nodeService.getPath(nodeRef);
|
||||
for (int i = nodePath.size() - 1; i >= 0; i--)
|
||||
{
|
||||
Path.ChildAssocElement pathElement = (Path.ChildAssocElement) nodePath.get(i);
|
||||
if (pathElement.getRef().getChildRef().equals(exportRoot))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Exporter Context
|
||||
@@ -986,7 +1015,9 @@ public class ExporterComponent
|
||||
private class ExporterContextImpl implements ExporterContext
|
||||
{
|
||||
private NodeRef[] exportList;
|
||||
private Map<Integer,NodeRef[]> exportListMap;
|
||||
private NodeRef[] parentList;
|
||||
private Map<Integer,NodeRef[]> parentListMap;
|
||||
private String exportedBy;
|
||||
private Date exportedDate;
|
||||
private String exporterVersion;
|
||||
@@ -995,8 +1026,10 @@ public class ExporterComponent
|
||||
private Map<Integer, Set<NodeRef>> nodesWithAssociations = new HashMap<Integer, Set<NodeRef>>();
|
||||
|
||||
private int index;
|
||||
|
||||
|
||||
private int indexSubList;
|
||||
private int chunkSize;
|
||||
|
||||
|
||||
/**
|
||||
* Construct
|
||||
*
|
||||
@@ -1005,7 +1038,17 @@ public class ExporterComponent
|
||||
public ExporterContextImpl(ExporterCrawlerParameters parameters)
|
||||
{
|
||||
index = 0;
|
||||
|
||||
indexSubList = 0;
|
||||
|
||||
|
||||
if(!NumberUtils.isParsable(exportChunkSize)){
|
||||
chunkSize = 10;
|
||||
}
|
||||
else
|
||||
{
|
||||
chunkSize = Integer.parseInt(exportChunkSize);
|
||||
}
|
||||
|
||||
// get current user performing export
|
||||
String currentUserName = authenticationService.getCurrentUserName();
|
||||
exportedBy = (currentUserName == null) ? "unknown" : currentUserName;
|
||||
@@ -1022,24 +1065,80 @@ public class ExporterComponent
|
||||
NodeRef exportOf = getNodeRef(parameters.getExportFrom());
|
||||
exportList[0] = exportOf;
|
||||
}
|
||||
parentList = new NodeRef[exportList.length];
|
||||
for (int i = 0; i < exportList.length; i++)
|
||||
if(exportList.length > chunkSize)
|
||||
{
|
||||
parentList[i] = getParent(exportList[i], parameters.isCrawlSelf());
|
||||
exportListMap = splitArray(exportList);
|
||||
|
||||
parentListMap = new HashMap<>();
|
||||
for(Map.Entry<Integer, NodeRef[]> exportEntrySet : exportListMap.entrySet())
|
||||
{
|
||||
parentList= new NodeRef[exportEntrySet.getValue().length];
|
||||
for (int i = 0; i < exportEntrySet.getValue().length; i++)
|
||||
{
|
||||
parentList[i] = getParent(exportEntrySet.getValue()[i], parameters.isCrawlSelf());
|
||||
}
|
||||
parentListMap.put(exportEntrySet.getKey(), parentList);
|
||||
}
|
||||
}
|
||||
|
||||
else{
|
||||
parentList = new NodeRef[exportList.length];
|
||||
for (int i = 0; i < exportList.length; i++)
|
||||
{
|
||||
parentList[i] = getParent(exportList[i], parameters.isCrawlSelf());
|
||||
}
|
||||
}
|
||||
|
||||
// get exporter version
|
||||
exporterVersion = descriptorService.getServerDescriptor().getVersion();
|
||||
}
|
||||
|
||||
public Map<Integer, NodeRef[]> splitArray(NodeRef[] arrayToSplit){
|
||||
if(chunkSize <= 0){
|
||||
return null;
|
||||
}
|
||||
int rest = arrayToSplit.length % chunkSize;
|
||||
int chunks = arrayToSplit.length / chunkSize + (rest > 0 ? 1 : 0);
|
||||
Map<Integer, NodeRef[]> arrays = new HashMap<>() ;
|
||||
for(Integer i = 0; i < (rest > 0 ? chunks - 1 : chunks); i++){
|
||||
arrays.put(i, Arrays.copyOfRange(arrayToSplit, i * chunkSize, i * chunkSize + chunkSize));
|
||||
}
|
||||
if(rest > 0){
|
||||
arrays.put(chunks - 1, Arrays.copyOfRange(arrayToSplit, (chunks - 1) * chunkSize, (chunks - 1) * chunkSize + rest));
|
||||
}
|
||||
return arrays;
|
||||
}
|
||||
|
||||
public boolean canRetrieve()
|
||||
{
|
||||
return index < exportList.length;
|
||||
if(exportListMap != null)
|
||||
{
|
||||
if (exportListMap.containsKey(indexSubList))
|
||||
{
|
||||
return index < exportListMap.get(indexSubList).length;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
return index < exportList.length;
|
||||
}
|
||||
}
|
||||
|
||||
public int setNextValue()
|
||||
{
|
||||
return ++index;
|
||||
if(exportListMap != null && (index == exportListMap.get(indexSubList).length-1)){
|
||||
resetContext();
|
||||
if(indexSubList <= exportListMap.size())
|
||||
{
|
||||
++indexSubList;
|
||||
}
|
||||
}
|
||||
else{
|
||||
++index;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
|
||||
public void resetContext()
|
||||
@@ -1078,7 +1177,13 @@ public class ExporterComponent
|
||||
{
|
||||
if (canRetrieve())
|
||||
{
|
||||
return exportList[index];
|
||||
if(exportListMap!=null)
|
||||
{
|
||||
return exportListMap.get(indexSubList)[index];
|
||||
}
|
||||
else {
|
||||
return exportList[index];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -1091,7 +1196,13 @@ public class ExporterComponent
|
||||
{
|
||||
if (canRetrieve())
|
||||
{
|
||||
return parentList[index];
|
||||
if(parentListMap!=null)
|
||||
{
|
||||
return parentListMap.get(indexSubList)[index];
|
||||
}
|
||||
else {
|
||||
return parentList[index];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -1105,6 +1216,11 @@ public class ExporterComponent
|
||||
return exportList;
|
||||
}
|
||||
|
||||
public Map<Integer, NodeRef[]> getExportMap()
|
||||
{
|
||||
return exportListMap;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.alfresco.service.cmr.view.ExporterContext#getExportParentList()
|
||||
|
@@ -0,0 +1,201 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2022 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.jscript;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.mozilla.javascript.Callable;
|
||||
import org.mozilla.javascript.Context;
|
||||
import org.mozilla.javascript.ContextFactory;
|
||||
import org.mozilla.javascript.Scriptable;
|
||||
|
||||
/**
|
||||
* Custom factory that allows to apply configured limits during script executions
|
||||
*
|
||||
* @see ContextFactory
|
||||
*/
|
||||
public class AlfrescoContextFactory extends ContextFactory
|
||||
{
|
||||
private static final Log LOGGER = LogFactory.getLog(AlfrescoContextFactory.class);
|
||||
|
||||
private int optimizationLevel = -1;
|
||||
private int maxScriptExecutionSeconds = -1;
|
||||
private int maxStackDepth = -1;
|
||||
private long maxMemoryUsedInBytes = -1L;
|
||||
private int observeInstructionCount = -1;
|
||||
|
||||
private AlfrescoScriptThreadMxBeanWrapper threadMxBeanWrapper;
|
||||
|
||||
private final int INTERPRETIVE_MODE = -1;
|
||||
|
||||
@Override
|
||||
protected Context makeContext()
|
||||
{
|
||||
AlfrescoScriptContext context = new AlfrescoScriptContext();
|
||||
|
||||
context.setOptimizationLevel(optimizationLevel);
|
||||
|
||||
// Needed for both time and memory measurement
|
||||
if (maxScriptExecutionSeconds > 0 || maxMemoryUsedInBytes > 0L)
|
||||
{
|
||||
if (observeInstructionCount > 0)
|
||||
{
|
||||
LOGGER.info("Enabling observer count...");
|
||||
context.setGenerateObserverCount(true);
|
||||
context.setInstructionObserverThreshold(observeInstructionCount);
|
||||
}
|
||||
else
|
||||
{
|
||||
LOGGER.info("Disabling observer count...");
|
||||
context.setGenerateObserverCount(false);
|
||||
}
|
||||
}
|
||||
|
||||
// Memory limit
|
||||
if (maxMemoryUsedInBytes > 0)
|
||||
{
|
||||
context.setThreadId(Thread.currentThread().getId());
|
||||
}
|
||||
|
||||
// Max stack depth
|
||||
if (maxStackDepth > 0)
|
||||
{
|
||||
if (optimizationLevel != INTERPRETIVE_MODE)
|
||||
{
|
||||
LOGGER.warn("Changing optimization level from " + optimizationLevel + " to " + INTERPRETIVE_MODE);
|
||||
}
|
||||
// stack depth can only be set when no optimizations are applied
|
||||
context.setOptimizationLevel(INTERPRETIVE_MODE);
|
||||
context.setMaximumInterpreterStackDepth(maxStackDepth);
|
||||
}
|
||||
|
||||
return context;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void observeInstructionCount(Context cx, int instructionCount)
|
||||
{
|
||||
AlfrescoScriptContext acx = (AlfrescoScriptContext) cx;
|
||||
|
||||
if (acx.isLimitsEnabled())
|
||||
{
|
||||
// Time limit
|
||||
if (maxScriptExecutionSeconds > 0)
|
||||
{
|
||||
long currentTime = System.currentTimeMillis();
|
||||
if (currentTime - acx.getStartTime() > maxScriptExecutionSeconds * 1000)
|
||||
{
|
||||
throw new Error("Maximum script time of " + maxScriptExecutionSeconds + " seconds exceeded");
|
||||
}
|
||||
}
|
||||
|
||||
// Memory
|
||||
if (maxMemoryUsedInBytes > 0 && threadMxBeanWrapper != null && threadMxBeanWrapper.isThreadAllocatedMemorySupported())
|
||||
{
|
||||
|
||||
if (acx.getStartMemory() <= 0)
|
||||
{
|
||||
acx.setStartMemory(threadMxBeanWrapper.getThreadAllocatedBytes(acx.getThreadId()));
|
||||
}
|
||||
else
|
||||
{
|
||||
long currentAllocatedBytes = threadMxBeanWrapper.getThreadAllocatedBytes(acx.getThreadId());
|
||||
if (currentAllocatedBytes - acx.getStartMemory() >= maxMemoryUsedInBytes)
|
||||
{
|
||||
throw new Error("Memory limit of " + maxMemoryUsedInBytes + " bytes reached");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Object doTopCall(Callable callable, Context cx, Scriptable scope, Scriptable thisObj, Object[] args)
|
||||
{
|
||||
AlfrescoScriptContext acx = (AlfrescoScriptContext) cx;
|
||||
acx.setStartTime(System.currentTimeMillis());
|
||||
return super.doTopCall(callable, cx, scope, thisObj, args);
|
||||
}
|
||||
|
||||
public int getOptimizationLevel()
|
||||
{
|
||||
return optimizationLevel;
|
||||
}
|
||||
|
||||
public void setOptimizationLevel(int optimizationLevel)
|
||||
{
|
||||
this.optimizationLevel = optimizationLevel;
|
||||
}
|
||||
|
||||
public int getMaxScriptExecutionSeconds()
|
||||
{
|
||||
return maxScriptExecutionSeconds;
|
||||
}
|
||||
|
||||
public void setMaxScriptExecutionSeconds(int maxScriptExecutionSeconds)
|
||||
{
|
||||
this.maxScriptExecutionSeconds = maxScriptExecutionSeconds;
|
||||
}
|
||||
|
||||
public int getMaxStackDepth()
|
||||
{
|
||||
return maxStackDepth;
|
||||
}
|
||||
|
||||
public void setMaxStackDepth(int maxStackDepth)
|
||||
{
|
||||
this.maxStackDepth = maxStackDepth;
|
||||
}
|
||||
|
||||
public long getMaxMemoryUsedInBytes()
|
||||
{
|
||||
return maxMemoryUsedInBytes;
|
||||
}
|
||||
|
||||
public void setMaxMemoryUsedInBytes(long maxMemoryUsedInBytes)
|
||||
{
|
||||
this.maxMemoryUsedInBytes = maxMemoryUsedInBytes;
|
||||
if (maxMemoryUsedInBytes > 0)
|
||||
{
|
||||
this.threadMxBeanWrapper = new AlfrescoScriptThreadMxBeanWrapper();
|
||||
if (!threadMxBeanWrapper.isThreadAllocatedMemorySupported())
|
||||
{
|
||||
LOGGER.warn("com.sun.management.ThreadMXBean was not found on the classpath. "
|
||||
+ "This means that the limiting the memory usage for a script will NOT work.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public int getObserveInstructionCount()
|
||||
{
|
||||
return observeInstructionCount;
|
||||
}
|
||||
|
||||
public void setObserveInstructionCount(int observeInstructionCount)
|
||||
{
|
||||
this.observeInstructionCount = observeInstructionCount;
|
||||
}
|
||||
}
|
@@ -0,0 +1,81 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2022 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.jscript;
|
||||
|
||||
import org.mozilla.javascript.Context;
|
||||
|
||||
/**
|
||||
* Custom Rhino context that holds data as start time and memory
|
||||
*
|
||||
* @see Context
|
||||
*/
|
||||
public class AlfrescoScriptContext extends Context
|
||||
{
|
||||
private long startTime;
|
||||
private long threadId;
|
||||
private long startMemory;
|
||||
private boolean limitsEnabled = false;
|
||||
|
||||
public long getStartTime()
|
||||
{
|
||||
return startTime;
|
||||
}
|
||||
|
||||
public void setStartTime(long startTime)
|
||||
{
|
||||
this.startTime = startTime;
|
||||
}
|
||||
|
||||
public long getThreadId()
|
||||
{
|
||||
return threadId;
|
||||
}
|
||||
|
||||
public void setThreadId(long threadId)
|
||||
{
|
||||
this.threadId = threadId;
|
||||
}
|
||||
|
||||
public long getStartMemory()
|
||||
{
|
||||
return startMemory;
|
||||
}
|
||||
|
||||
public void setStartMemory(long startMemory)
|
||||
{
|
||||
this.startMemory = startMemory;
|
||||
}
|
||||
|
||||
public boolean isLimitsEnabled()
|
||||
{
|
||||
return limitsEnabled;
|
||||
}
|
||||
|
||||
public void setLimitsEnabled(boolean limitsEnabled)
|
||||
{
|
||||
this.limitsEnabled = limitsEnabled;
|
||||
}
|
||||
}
|
@@ -0,0 +1,78 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2022 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.jscript;
|
||||
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.lang.management.ThreadMXBean;
|
||||
|
||||
/**
|
||||
* Allows to monitor memory usage
|
||||
*/
|
||||
public class AlfrescoScriptThreadMxBeanWrapper
|
||||
{
|
||||
|
||||
private ThreadMXBean threadMXBean = null;
|
||||
private boolean threadAllocatedMemorySupported = false;
|
||||
|
||||
private final String THREAD_MX_BEAN_SUN = "com.sun.management.ThreadMXBean";
|
||||
|
||||
public AlfrescoScriptThreadMxBeanWrapper()
|
||||
{
|
||||
checkThreadAllocatedMemory();
|
||||
}
|
||||
|
||||
public long getThreadAllocatedBytes(long threadId)
|
||||
{
|
||||
if (threadMXBean != null && threadAllocatedMemorySupported)
|
||||
{
|
||||
return ((com.sun.management.ThreadMXBean) threadMXBean).getThreadAllocatedBytes(threadId);
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
public void checkThreadAllocatedMemory()
|
||||
{
|
||||
try
|
||||
{
|
||||
Class<?> clazz = Class.forName(THREAD_MX_BEAN_SUN);
|
||||
if (clazz != null)
|
||||
{
|
||||
this.threadAllocatedMemorySupported = true;
|
||||
this.threadMXBean = (com.sun.management.ThreadMXBean) ManagementFactory.getThreadMXBean();
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.threadAllocatedMemorySupported = false;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isThreadAllocatedMemorySupported()
|
||||
{
|
||||
return threadAllocatedMemorySupported;
|
||||
}
|
||||
}
|
@@ -57,10 +57,12 @@ import org.alfresco.service.namespace.QName;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.mozilla.javascript.Context;
|
||||
import org.mozilla.javascript.ContextFactory;
|
||||
import org.mozilla.javascript.ImporterTopLevel;
|
||||
import org.mozilla.javascript.Script;
|
||||
import org.mozilla.javascript.Scriptable;
|
||||
import org.mozilla.javascript.ScriptableObject;
|
||||
import org.mozilla.javascript.Undefined;
|
||||
import org.mozilla.javascript.WrapFactory;
|
||||
import org.mozilla.javascript.WrappedException;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
@@ -108,6 +110,23 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
|
||||
/** Cache of runtime compiled script instances */
|
||||
private final Map<String, Script> scriptCache = new ConcurrentHashMap<String, Script>(256);
|
||||
|
||||
/** Rhino optimization level */
|
||||
private int optimizationLevel = -1;
|
||||
|
||||
/** Maximum seconds a script is allowed to run */
|
||||
private int maxScriptExecutionSeconds = -1;
|
||||
|
||||
/** Maximum of call stack depth (in terms of number of call frames) */
|
||||
private int maxStackDepth = -1;
|
||||
|
||||
/** Maximum memory (bytes) a script can use */
|
||||
private long maxMemoryUsedInBytes = -1L;
|
||||
|
||||
/** Number of (bytecode) instructions that will trigger the observer */
|
||||
private int observerInstructionCount = 100;
|
||||
|
||||
/** Custom context factory */
|
||||
public static AlfrescoContextFactory contextFactory;
|
||||
|
||||
/**
|
||||
* Set the default store reference
|
||||
@@ -144,6 +163,51 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
|
||||
this.shareSealedScopes = shareSealedScopes;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param optimizationLevel
|
||||
* -1 interpretive mode, 0 no optimizations, 1-9 optimizations performed
|
||||
*/
|
||||
public void setOptimizationLevel(int optimizationLevel)
|
||||
{
|
||||
this.optimizationLevel = optimizationLevel;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param maxScriptExecutionSeconds
|
||||
* the number of seconds a script is allowed to run
|
||||
*/
|
||||
public void setMaxScriptExecutionSeconds(int maxScriptExecutionSeconds)
|
||||
{
|
||||
this.maxScriptExecutionSeconds = maxScriptExecutionSeconds;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param maxStackDepth
|
||||
* the number of call stack depth allowed
|
||||
*/
|
||||
public void setMaxStackDepth(int maxStackDepth)
|
||||
{
|
||||
this.maxStackDepth = maxStackDepth;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param maxMemoryUsedInBytes
|
||||
* the number of memory a script can use
|
||||
*/
|
||||
public void setMaxMemoryUsedInBytes(long maxMemoryUsedInBytes)
|
||||
{
|
||||
this.maxMemoryUsedInBytes = maxMemoryUsedInBytes;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param observerInstructionCount
|
||||
* the number of instructions that will trigger {@link ContextFactory#observeInstructionCount}
|
||||
*/
|
||||
public void setObserverInstructionCount(int observerInstructionCount)
|
||||
{
|
||||
this.observerInstructionCount = observerInstructionCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.alfresco.service.cmr.repository.ScriptProcessor#reset()
|
||||
*/
|
||||
@@ -441,6 +505,8 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
|
||||
private Object executeScriptImpl(Script script, Map<String, Object> model, boolean secure, String debugScriptName)
|
||||
throws AlfrescoRuntimeException
|
||||
{
|
||||
Scriptable scope = null;
|
||||
|
||||
long startTime = 0;
|
||||
if (callLogger.isDebugEnabled())
|
||||
{
|
||||
@@ -457,14 +523,16 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
|
||||
// Create a thread-specific scope from one of the shared scopes.
|
||||
// See http://www.mozilla.org/rhino/scopes.html
|
||||
cx.setWrapFactory(secure ? wrapFactory : sandboxFactory);
|
||||
Scriptable scope;
|
||||
|
||||
// Enables or disables execution limits based on secure flag
|
||||
enableLimits(cx, secure);
|
||||
|
||||
if (this.shareSealedScopes)
|
||||
{
|
||||
Scriptable sharedScope = secure ? this.nonSecureScope : this.secureScope;
|
||||
scope = cx.newObject(sharedScope);
|
||||
scope.setPrototype(sharedScope);
|
||||
scope.setParentScope(null);
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -538,6 +606,7 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
|
||||
}
|
||||
finally
|
||||
{
|
||||
unsetScope(model, scope);
|
||||
Context.exit();
|
||||
|
||||
if (callLogger.isDebugEnabled())
|
||||
@@ -630,6 +699,9 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
|
||||
*/
|
||||
public void afterPropertiesSet() throws Exception
|
||||
{
|
||||
// Initialize context factory
|
||||
initContextFactory();
|
||||
|
||||
// Initialize the secure scope
|
||||
Context cx = Context.enter();
|
||||
try
|
||||
@@ -687,4 +759,129 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
|
||||
}
|
||||
return scope;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean supplied scope and unset it from any model instance where it has been injected before
|
||||
*
|
||||
* @param model
|
||||
* Data model containing objects from where scope will be unset
|
||||
* @param scope
|
||||
* The scope to clean
|
||||
*/
|
||||
private void unsetScope(Map<String, Object> model, Scriptable scope)
|
||||
{
|
||||
if (scope != null)
|
||||
{
|
||||
Object[] ids = scope.getIds();
|
||||
if (ids != null)
|
||||
{
|
||||
for (Object id : ids)
|
||||
{
|
||||
try
|
||||
{
|
||||
deleteProperty(scope, id.toString());
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
logger.info("Unable to delete id: " + id, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (model != null)
|
||||
{
|
||||
for (String key : model.keySet())
|
||||
{
|
||||
try
|
||||
{
|
||||
deleteProperty(scope, key);
|
||||
|
||||
Object obj = model.get(key);
|
||||
if (obj instanceof Scopeable)
|
||||
{
|
||||
((Scopeable) obj).setScope(null);
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
logger.info("Unable to unset model object " + key + " : ", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a property from the supplied scope, if property is not removable, then is set to null
|
||||
*
|
||||
* @param scope
|
||||
* the scope object from where property will be removed
|
||||
* @param name
|
||||
* the property name to delete
|
||||
*/
|
||||
private void deleteProperty(Scriptable scope, String name)
|
||||
{
|
||||
if (scope != null && name != null)
|
||||
{
|
||||
if (!ScriptableObject.deleteProperty(scope, name))
|
||||
{
|
||||
ScriptableObject.putProperty(scope, name, null);
|
||||
}
|
||||
scope.delete(name);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the context factory with limits configuration
|
||||
*/
|
||||
private synchronized void initContextFactory()
|
||||
{
|
||||
if (contextFactory == null)
|
||||
{
|
||||
contextFactory = new AlfrescoContextFactory();
|
||||
contextFactory.setOptimizationLevel(optimizationLevel);
|
||||
|
||||
if (maxScriptExecutionSeconds > 0)
|
||||
{
|
||||
contextFactory.setMaxScriptExecutionSeconds(maxScriptExecutionSeconds);
|
||||
}
|
||||
|
||||
if (maxMemoryUsedInBytes > 0L)
|
||||
{
|
||||
contextFactory.setMaxMemoryUsedInBytes(maxMemoryUsedInBytes);
|
||||
}
|
||||
|
||||
if (maxStackDepth > 0)
|
||||
{
|
||||
contextFactory.setMaxStackDepth(maxStackDepth);
|
||||
}
|
||||
|
||||
if (maxScriptExecutionSeconds > 0 || maxMemoryUsedInBytes > 0L)
|
||||
{
|
||||
contextFactory.setObserveInstructionCount(observerInstructionCount);
|
||||
}
|
||||
|
||||
ContextFactory.initGlobal(contextFactory);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If script is considered secure no limits will be applied, otherwise, the limits are enabled and the script can be
|
||||
* interrupted in case a limit has been reached.
|
||||
*
|
||||
* @param cx
|
||||
* the Rhino scope
|
||||
* @param secure
|
||||
* true if script execution is considered secure (e.g, deployed at classpath level)
|
||||
*/
|
||||
private void enableLimits(Context cx, boolean secure)
|
||||
{
|
||||
if (cx != null)
|
||||
{
|
||||
if (cx instanceof AlfrescoScriptContext)
|
||||
{
|
||||
((AlfrescoScriptContext) cx).setLimitsEnabled(!secure);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,538 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
package org.alfresco.repo.rendition.executer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Serializable;
|
||||
import java.io.StringWriter;
|
||||
import java.io.Writer;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.xml.transform.OutputKeys;
|
||||
import javax.xml.transform.TransformerConfigurationException;
|
||||
import javax.xml.transform.sax.SAXTransformerFactory;
|
||||
import javax.xml.transform.sax.TransformerHandler;
|
||||
import javax.xml.transform.stream.StreamResult;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.action.ParameterDefinitionImpl;
|
||||
import org.alfresco.repo.rendition.RenditionLocation;
|
||||
import org.alfresco.service.cmr.action.ParameterDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.rendition.RenditionServiceException;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.cmr.repository.ContentService;
|
||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.tika.config.TikaConfig;
|
||||
import org.apache.tika.exception.TikaException;
|
||||
import org.apache.tika.metadata.Metadata;
|
||||
import org.apache.tika.mime.MediaType;
|
||||
import org.apache.tika.parser.AutoDetectParser;
|
||||
import org.apache.tika.parser.ParseContext;
|
||||
import org.apache.tika.parser.Parser;
|
||||
import org.apache.tika.sax.BodyContentHandler;
|
||||
import org.apache.tika.sax.ContentHandlerDecorator;
|
||||
import org.xml.sax.Attributes;
|
||||
import org.xml.sax.ContentHandler;
|
||||
import org.xml.sax.SAXException;
|
||||
import org.xml.sax.helpers.AttributesImpl;
|
||||
|
||||
/**
|
||||
* This class provides a way to turn documents supported by the
|
||||
* {@link ContentService} standard transformers into basic, clean
|
||||
* HTML.
|
||||
* <P/>
|
||||
* The HTML that is produced probably isn't going to be suitable
|
||||
* for direct web publishing, as it's likely going to be too
|
||||
* basic. Instead, it should be simple and clean HTML, suitable
|
||||
* for being the basis of some web-friendly HTML once edited
|
||||
* / further transformed.
|
||||
*
|
||||
* @author Nick Burch
|
||||
* @since 3.4
|
||||
*
|
||||
* @deprecated The RenditionService is being replace by the simpler async RenditionService2.
|
||||
*/
|
||||
@Deprecated
|
||||
public class HTMLRenderingEngine extends AbstractRenderingEngine
|
||||
{
|
||||
private static Log logger = LogFactory.getLog(HTMLRenderingEngine.class);
|
||||
private TikaConfig tikaConfig;
|
||||
|
||||
/**
|
||||
* This optional parameter, when set to true, causes only the
|
||||
* contents of the HTML body to be written out as the rendition.
|
||||
* By default, the whole of the HTML document is used.
|
||||
*/
|
||||
public static final String PARAM_BODY_CONTENTS_ONLY = "bodyContentsOnly";
|
||||
/**
|
||||
* This optional parameter, when set to true, causes any embedded
|
||||
* images to be written into the same folder as the html, with
|
||||
* a name prefix.
|
||||
* By default, images are placed into a sub-folder.
|
||||
*/
|
||||
public static final String PARAM_IMAGES_SAME_FOLDER = "imagesSameFolder";
|
||||
|
||||
/*
|
||||
* Action constants
|
||||
*/
|
||||
public static final String NAME = "htmlRenderingEngine";
|
||||
|
||||
|
||||
@Override
|
||||
protected Collection<ParameterDefinition> getParameterDefinitions() {
|
||||
Collection<ParameterDefinition> paramList = super.getParameterDefinitions();
|
||||
paramList.add(new ParameterDefinitionImpl(PARAM_BODY_CONTENTS_ONLY, DataTypeDefinition.BOOLEAN, false,
|
||||
getParamDisplayLabel(PARAM_BODY_CONTENTS_ONLY)));
|
||||
paramList.add(new ParameterDefinitionImpl(PARAM_IMAGES_SAME_FOLDER, DataTypeDefinition.BOOLEAN, false,
|
||||
getParamDisplayLabel(PARAM_IMAGES_SAME_FOLDER)));
|
||||
return paramList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Injects the TikaConfig to use
|
||||
*
|
||||
* @param tikaConfig The Tika Config to use
|
||||
*/
|
||||
public void setTikaConfig(TikaConfig tikaConfig)
|
||||
{
|
||||
this.tikaConfig = tikaConfig;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.alfresco.repo.rendition.executer.AbstractRenderingEngine#render(org.alfresco.repo.rendition.executer.AbstractRenderingEngine.RenderingContext)
|
||||
*/
|
||||
@Override
|
||||
protected void render(RenderingContext context)
|
||||
{
|
||||
ContentReader contentReader = context.makeContentReader();
|
||||
String sourceMimeType = contentReader.getMimetype();
|
||||
|
||||
// Check that Tika supports the supplied file
|
||||
AutoDetectParser p = new AutoDetectParser(tikaConfig);
|
||||
MediaType sourceMediaType = MediaType.parse(sourceMimeType);
|
||||
if(! p.getParsers().containsKey(sourceMediaType))
|
||||
{
|
||||
throw new RenditionServiceException(
|
||||
"Source mime type of " + sourceMimeType +
|
||||
" is not supported by Tika for HTML conversions"
|
||||
);
|
||||
}
|
||||
|
||||
// Make the HTML Version using Tika
|
||||
// This will also extract out any images as found
|
||||
generateHTML(p, context);
|
||||
}
|
||||
|
||||
private String getHtmlBaseName(RenderingContext context)
|
||||
{
|
||||
// Based on the name of the source node, which will
|
||||
// also largely be the name of the html node
|
||||
String baseName = nodeService.getProperty(
|
||||
context.getSourceNode(),
|
||||
ContentModel.PROP_NAME
|
||||
).toString();
|
||||
if(baseName.lastIndexOf('.') > -1)
|
||||
{
|
||||
baseName = baseName.substring(0, baseName.lastIndexOf('.'));
|
||||
}
|
||||
return baseName;
|
||||
}
|
||||
/**
|
||||
* What name should be used for the images directory?
|
||||
* Note this is only required if {@link #PARAM_IMAGES_SAME_FOLDER} is false (the default).
|
||||
*/
|
||||
private String getImagesDirectoryName(RenderingContext context)
|
||||
{
|
||||
// Based on the name of the source node, which will
|
||||
// also largely be the name of the html node
|
||||
String folderName = getHtmlBaseName(context);
|
||||
folderName = folderName + "_files";
|
||||
return folderName;
|
||||
}
|
||||
/**
|
||||
* What prefix should be applied to the name of images?
|
||||
*/
|
||||
private String getImagesPrefixName(RenderingContext context)
|
||||
{
|
||||
if( context.getParamWithDefault(PARAM_IMAGES_SAME_FOLDER, false) )
|
||||
{
|
||||
// Prefix with the name of the source node
|
||||
return getHtmlBaseName(context) + "_";
|
||||
}
|
||||
else {
|
||||
// They have their own folder, so no prefix is needed
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a directory to store the images in.
|
||||
* The directory will be a sibling of the rendered
|
||||
* HTML, and named similar to it.
|
||||
* Note this is only required if {@link #PARAM_IMAGES_SAME_FOLDER} is false (the default).
|
||||
*/
|
||||
private NodeRef createImagesDirectory(RenderingContext context)
|
||||
{
|
||||
// It should be a sibling of the HTML in it's eventual location
|
||||
// (not it's current temporary one!)
|
||||
RenditionLocation location = resolveRenditionLocation(
|
||||
context.getSourceNode(), context.getDefinition(), context.getDestinationNode()
|
||||
);
|
||||
NodeRef parent = location.getParentRef();
|
||||
|
||||
// Figure out what to call it, based on the HTML node
|
||||
String folderName = getImagesDirectoryName(context);
|
||||
|
||||
// It is already there?
|
||||
// (eg from when the rendition is being re-run)
|
||||
NodeRef imgFolder = nodeService.getChildByName(
|
||||
parent, ContentModel.ASSOC_CONTAINS, folderName
|
||||
);
|
||||
if(imgFolder != null)
|
||||
return imgFolder;
|
||||
|
||||
// Create the directory
|
||||
Map<QName,Serializable> properties = new HashMap<QName,Serializable>();
|
||||
properties.put(ContentModel.PROP_NAME, folderName);
|
||||
imgFolder = nodeService.createNode(
|
||||
parent,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(folderName),
|
||||
ContentModel.TYPE_FOLDER,
|
||||
properties
|
||||
).getChildRef();
|
||||
|
||||
return imgFolder;
|
||||
}
|
||||
|
||||
private NodeRef createEmbeddedImage(NodeRef imgFolder, boolean primary,
|
||||
String filename, String contentType, InputStream imageSource,
|
||||
RenderingContext context)
|
||||
{
|
||||
// Create the node if needed
|
||||
NodeRef img = nodeService.getChildByName(
|
||||
imgFolder, ContentModel.ASSOC_CONTAINS, filename
|
||||
);
|
||||
if(img == null)
|
||||
{
|
||||
Map<QName,Serializable> properties = new HashMap<QName,Serializable>();
|
||||
properties.put(ContentModel.PROP_NAME, filename);
|
||||
img = nodeService.createNode(
|
||||
imgFolder,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(filename),
|
||||
ContentModel.TYPE_CONTENT,
|
||||
properties
|
||||
).getChildRef();
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Image node created: " + img);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO Once composite content is properly supported,
|
||||
// at this point we'll associate the new image with
|
||||
// the rendered HTML node so the dependency is tracked.
|
||||
|
||||
// Put the image into the node
|
||||
ContentWriter writer = contentService.getWriter(
|
||||
img, ContentModel.PROP_CONTENT, true
|
||||
);
|
||||
writer.setMimetype(contentType);
|
||||
writer.putContent(imageSource);
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Image content written into " + img);
|
||||
}
|
||||
|
||||
// All done
|
||||
return img;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a Tika-compatible SAX content handler, which will
|
||||
* be used to generate+capture the XHTML
|
||||
*/
|
||||
private ContentHandler buildContentHandler(Writer output, RenderingContext context)
|
||||
{
|
||||
// Create the main transformer
|
||||
SAXTransformerFactory factory = (SAXTransformerFactory)
|
||||
SAXTransformerFactory.newInstance();
|
||||
TransformerHandler handler;
|
||||
|
||||
try {
|
||||
handler = factory.newTransformerHandler();
|
||||
} catch (TransformerConfigurationException e) {
|
||||
throw new RenditionServiceException("SAX Processing isn't available - " + e);
|
||||
}
|
||||
|
||||
handler.getTransformer().setOutputProperty(OutputKeys.INDENT, "yes");
|
||||
handler.setResult(new StreamResult(output));
|
||||
handler.getTransformer().setOutputProperty(OutputKeys.METHOD, "xml");
|
||||
|
||||
// Change the image links as they go past
|
||||
String dirName = null, imgPrefix = null;
|
||||
if(context.getParamWithDefault(PARAM_IMAGES_SAME_FOLDER, false))
|
||||
{
|
||||
imgPrefix = getImagesPrefixName(context);
|
||||
}
|
||||
else
|
||||
{
|
||||
dirName = getImagesDirectoryName(context);
|
||||
}
|
||||
ContentHandler contentHandler = new TikaImageRewritingContentHandler(
|
||||
handler, dirName, imgPrefix
|
||||
);
|
||||
|
||||
// If required, wrap it to only return the body
|
||||
boolean bodyOnly = context.getParamWithDefault(PARAM_BODY_CONTENTS_ONLY, false);
|
||||
if(bodyOnly) {
|
||||
contentHandler = new BodyContentHandler(contentHandler);
|
||||
}
|
||||
|
||||
// All done
|
||||
return contentHandler;
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks Tika to translate the contents into HTML
|
||||
*/
|
||||
private void generateHTML(Parser p, RenderingContext context)
|
||||
{
|
||||
ContentReader contentReader = context.makeContentReader();
|
||||
|
||||
// Setup things to parse with
|
||||
StringWriter sw = new StringWriter();
|
||||
ContentHandler handler = buildContentHandler(sw, context);
|
||||
|
||||
// Tell Tika what we're dealing with
|
||||
Metadata metadata = new Metadata();
|
||||
metadata.set(
|
||||
Metadata.CONTENT_TYPE,
|
||||
contentReader.getMimetype()
|
||||
);
|
||||
metadata.set(
|
||||
Metadata.RESOURCE_NAME_KEY,
|
||||
nodeService.getProperty(
|
||||
context.getSourceNode(),
|
||||
ContentModel.PROP_NAME
|
||||
).toString()
|
||||
);
|
||||
|
||||
// Our parse context needs to extract images
|
||||
ParseContext parseContext = new ParseContext();
|
||||
parseContext.set(Parser.class, new TikaImageExtractingParser(context));
|
||||
|
||||
// Parse
|
||||
try {
|
||||
p.parse(
|
||||
contentReader.getContentInputStream(),
|
||||
handler, metadata, parseContext
|
||||
);
|
||||
} catch(Exception e) {
|
||||
throw new RenditionServiceException("Tika HTML Conversion Failed", e);
|
||||
}
|
||||
|
||||
// As a string
|
||||
String html = sw.toString();
|
||||
|
||||
// If we're doing body-only, remove all the html namespaces
|
||||
// that will otherwise clutter up the document
|
||||
boolean bodyOnly = context.getParamWithDefault(PARAM_BODY_CONTENTS_ONLY, false);
|
||||
if(bodyOnly) {
|
||||
html = html.replaceAll("<\\?xml.*?\\?>", "");
|
||||
html = html.replaceAll("<p xmlns=\"http://www.w3.org/1999/xhtml\"","<p");
|
||||
html = html.replaceAll("<h(\\d) xmlns=\"http://www.w3.org/1999/xhtml\"","<h\\1");
|
||||
html = html.replaceAll("<div xmlns=\"http://www.w3.org/1999/xhtml\"","<div");
|
||||
html = html.replaceAll("<table xmlns=\"http://www.w3.org/1999/xhtml\"","<table");
|
||||
html = html.replaceAll(" ","");
|
||||
}
|
||||
|
||||
// Save it
|
||||
ContentWriter contentWriter = context.makeContentWriter();
|
||||
contentWriter.setMimetype("text/html");
|
||||
contentWriter.putContent( html );
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A nested Tika parser which extracts out any
|
||||
* images as they come past.
|
||||
*/
|
||||
@SuppressWarnings("serial")
|
||||
private class TikaImageExtractingParser implements Parser {
|
||||
private Set<MediaType> types;
|
||||
|
||||
private RenderingContext renderingContext;
|
||||
private NodeRef imgFolder = null;
|
||||
private int count = 0;
|
||||
|
||||
private TikaImageExtractingParser(RenderingContext renderingContext) {
|
||||
this.renderingContext = renderingContext;
|
||||
|
||||
// Our expected types
|
||||
types = new HashSet<MediaType>();
|
||||
types.add(MediaType.image("bmp"));
|
||||
types.add(MediaType.image("gif"));
|
||||
types.add(MediaType.image("jpg"));
|
||||
types.add(MediaType.image("jpeg"));
|
||||
types.add(MediaType.image("png"));
|
||||
types.add(MediaType.image("tiff"));
|
||||
|
||||
// Are images going in the same place as the HTML?
|
||||
if( renderingContext.getParamWithDefault(PARAM_IMAGES_SAME_FOLDER, false) )
|
||||
{
|
||||
RenditionLocation location = resolveRenditionLocation(
|
||||
renderingContext.getSourceNode(), renderingContext.getDefinition(),
|
||||
renderingContext.getDestinationNode()
|
||||
);
|
||||
imgFolder = location.getParentRef();
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Using imgFolder: " + imgFolder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<MediaType> getSupportedTypes(ParseContext context) {
|
||||
return types;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void parse(InputStream stream, ContentHandler handler,
|
||||
Metadata metadata, ParseContext context) throws IOException,
|
||||
SAXException, TikaException {
|
||||
// Is it a supported image?
|
||||
String filename = metadata.get(Metadata.RESOURCE_NAME_KEY);
|
||||
String type = metadata.get(Metadata.CONTENT_TYPE);
|
||||
boolean accept = false;
|
||||
|
||||
if(type != null) {
|
||||
for(MediaType mt : types) {
|
||||
if(mt.toString().equals(type)) {
|
||||
accept = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if(filename != null) {
|
||||
for(MediaType mt : types) {
|
||||
String ext = "." + mt.getSubtype();
|
||||
if(filename.endsWith(ext)) {
|
||||
accept = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(!accept)
|
||||
return;
|
||||
|
||||
handleImage(stream, filename, type);
|
||||
}
|
||||
|
||||
private void handleImage(InputStream stream, String filename, String type) {
|
||||
count++;
|
||||
|
||||
// Do we already have the folder? If not, create it
|
||||
if(imgFolder == null) {
|
||||
imgFolder = createImagesDirectory(renderingContext);
|
||||
}
|
||||
|
||||
// Give it a sensible name if needed
|
||||
if(filename == null) {
|
||||
filename = "image-" + count + ".";
|
||||
filename += type.substring(type.indexOf('/')+1);
|
||||
}
|
||||
|
||||
// Prefix the filename if needed
|
||||
filename = getImagesPrefixName(renderingContext) + filename;
|
||||
|
||||
// Save the image
|
||||
createEmbeddedImage(imgFolder, (count==1), filename, type, stream, renderingContext);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A content handler that re-writes image src attributes,
|
||||
* and passes everything else on to the real one.
|
||||
*/
|
||||
private class TikaImageRewritingContentHandler extends ContentHandlerDecorator {
|
||||
private String imageFolder;
|
||||
private String imagePrefix;
|
||||
|
||||
private TikaImageRewritingContentHandler(ContentHandler handler, String imageFolder, String imagePrefix) {
|
||||
super(handler);
|
||||
this.imageFolder = imageFolder;
|
||||
this.imagePrefix = imagePrefix;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void startElement(String uri, String localName, String qName,
|
||||
Attributes origAttrs) throws SAXException {
|
||||
// If we have an image tag, re-write the src attribute
|
||||
// if required
|
||||
if("img".equals(localName)) {
|
||||
AttributesImpl attrs;
|
||||
if(origAttrs instanceof AttributesImpl) {
|
||||
attrs = (AttributesImpl)origAttrs;
|
||||
} else {
|
||||
attrs = new AttributesImpl(origAttrs);
|
||||
}
|
||||
|
||||
for(int i=0; i<attrs.getLength(); i++) {
|
||||
if("src".equals(attrs.getLocalName(i))) {
|
||||
String src = attrs.getValue(i);
|
||||
if(src.startsWith("embedded:")) {
|
||||
String newSrc = "";
|
||||
if(imageFolder != null)
|
||||
newSrc += imageFolder + "/";
|
||||
if(imagePrefix != null)
|
||||
newSrc += imagePrefix;
|
||||
newSrc += src.substring(src.indexOf(':')+1);
|
||||
attrs.setValue(i, newSrc);
|
||||
}
|
||||
}
|
||||
}
|
||||
super.startElement(uri, localName, qName, attrs);
|
||||
} else {
|
||||
// For any other tag, pass through as-is
|
||||
super.startElement(uri, localName, qName, origAttrs);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -52,6 +52,7 @@ import java.util.Set;
|
||||
import java.util.StringJoiner;
|
||||
|
||||
import static org.alfresco.repo.content.MimetypeMap.MIMETYPE_PDF;
|
||||
import static org.alfresco.repo.content.transform.magick.ImageTransformationOptions.OPT_COMMAND_OPTIONS;
|
||||
import static org.alfresco.repo.rendition2.RenditionDefinition2.ALLOW_ENLARGEMENT;
|
||||
import static org.alfresco.repo.rendition2.RenditionDefinition2.ALLOW_PDF_ENLARGEMENT;
|
||||
import static org.alfresco.repo.rendition2.RenditionDefinition2.ALPHA_REMOVE;
|
||||
@@ -122,6 +123,7 @@ public class TransformationOptionsConverter implements InitializingBean
|
||||
IMAGE_OPTIONS.addAll(RESIZE_OPTIONS);
|
||||
IMAGE_OPTIONS.add(AUTO_ORIENT);
|
||||
IMAGE_OPTIONS.add(ALPHA_REMOVE);
|
||||
IMAGE_OPTIONS.add(OPT_COMMAND_OPTIONS);
|
||||
}
|
||||
|
||||
private static Set<String> PDF_OPTIONS = new HashSet<>(Arrays.asList(new String[]
|
||||
@@ -284,6 +286,8 @@ public class TransformationOptionsConverter implements InitializingBean
|
||||
}
|
||||
opts.setSourceOptionsList(sourceOptionsList);
|
||||
}
|
||||
|
||||
ifSet(options, OPT_COMMAND_OPTIONS, (v) -> opts.setCommandOptions(v));
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -361,13 +365,11 @@ public class TransformationOptionsConverter implements InitializingBean
|
||||
{
|
||||
ImageTransformationOptions opts = (ImageTransformationOptions) options;
|
||||
|
||||
// TODO We don't support this any more for security reasons, however it might be possible to
|
||||
// extract some of the well know values and add them to the newer ImageMagick transform options.
|
||||
// From a security viewpoint it would be better not to support the option of passing anything to
|
||||
// ImageMagick. It might be possible to extract some of the well know values and add them to the
|
||||
// T-Engine engine_config.
|
||||
String commandOptions = opts.getCommandOptions();
|
||||
if (commandOptions != null && !commandOptions.isBlank())
|
||||
{
|
||||
logger.error("ImageMagick commandOptions are no longer supported for security reasons: " + commandOptions);
|
||||
}
|
||||
ifSet(commandOptions != null && !commandOptions.isBlank(), map, OPT_COMMAND_OPTIONS, commandOptions);
|
||||
|
||||
ImageResizeOptions imageResizeOptions = opts.getResizeOptions();
|
||||
if (imageResizeOptions != null)
|
||||
|
@@ -26,6 +26,9 @@
|
||||
package org.alfresco.repo.search;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.apache.http.HttpStatus;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author Andy
|
||||
@@ -33,11 +36,10 @@ import org.alfresco.error.AlfrescoRuntimeException;
|
||||
*/
|
||||
public class QueryParserException extends AlfrescoRuntimeException
|
||||
{
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
/** Serial version UUID. */
|
||||
private static final long serialVersionUID = 4886993838297301968L;
|
||||
/** Http Status Code that should be returned by Remote API. */
|
||||
private int httpStatusCode;
|
||||
|
||||
/**
|
||||
* @param msgId
|
||||
@@ -45,7 +47,6 @@ public class QueryParserException extends AlfrescoRuntimeException
|
||||
public QueryParserException(String msgId)
|
||||
{
|
||||
super(msgId);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -55,7 +56,6 @@ public class QueryParserException extends AlfrescoRuntimeException
|
||||
public QueryParserException(String msgId, Object[] msgParams)
|
||||
{
|
||||
super(msgId, msgParams);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -65,7 +65,6 @@ public class QueryParserException extends AlfrescoRuntimeException
|
||||
public QueryParserException(String msgId, Throwable cause)
|
||||
{
|
||||
super(msgId, cause);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -76,7 +75,22 @@ public class QueryParserException extends AlfrescoRuntimeException
|
||||
public QueryParserException(String msgId, Object[] msgParams, Throwable cause)
|
||||
{
|
||||
super(msgId, msgParams, cause);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for exception that allows setting an HTTP status code.
|
||||
*
|
||||
* @param msgId Message for the exception
|
||||
* @param httpStatusCode Status code to return for exception
|
||||
*/
|
||||
public QueryParserException(String msgId, int httpStatusCode)
|
||||
{
|
||||
super(msgId);
|
||||
this.httpStatusCode = httpStatusCode;
|
||||
}
|
||||
|
||||
public int getHttpStatusCode()
|
||||
{
|
||||
return httpStatusCode;
|
||||
}
|
||||
}
|
@@ -1,28 +1,28 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.search.impl.querymodel.impl.db;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@@ -74,6 +74,10 @@ public class DBQuery extends BaseQuery implements DBQueryBuilderComponent
|
||||
|
||||
Set<String> selectorGroup;
|
||||
|
||||
private int limit = 0;
|
||||
|
||||
private int offset = 0;
|
||||
|
||||
/**
|
||||
* @param source Source
|
||||
* @param constraint Constraint
|
||||
@@ -133,6 +137,22 @@ public class DBQuery extends BaseQuery implements DBQueryBuilderComponent
|
||||
this.sinceTxId = sinceTxId;
|
||||
}
|
||||
|
||||
public int getLimit() {
|
||||
return limit;
|
||||
}
|
||||
|
||||
public void setLimit(int limit) {
|
||||
this.limit = limit;
|
||||
}
|
||||
|
||||
public int getOffset() {
|
||||
return offset;
|
||||
}
|
||||
|
||||
public void setOffset(int offset) {
|
||||
this.offset = offset;
|
||||
}
|
||||
|
||||
public List<DBQueryBuilderJoinCommand> getJoins()
|
||||
{
|
||||
HashMap<QName, DBQueryBuilderJoinCommand> singleJoins = new HashMap<QName, DBQueryBuilderJoinCommand>();
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -41,9 +41,13 @@ import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
|
||||
import org.alfresco.service.cmr.security.PermissionService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.EqualsHelper;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
public class NodePermissionAssessor
|
||||
{
|
||||
protected static final Log logger = LogFactory.getLog(NodePermissionAssessor.class);
|
||||
|
||||
private final boolean isSystemReading;
|
||||
private final boolean isAdminReading;
|
||||
private final boolean isNullReading;
|
||||
@@ -138,24 +142,31 @@ public class NodePermissionAssessor
|
||||
|
||||
public void setMaxPermissionChecks(int maxPermissionChecks)
|
||||
{
|
||||
this.maxPermissionChecks = maxPermissionChecks;
|
||||
if (maxPermissionChecks == Integer.MAX_VALUE)
|
||||
{
|
||||
this.maxPermissionChecks = maxPermissionChecks;
|
||||
}
|
||||
else
|
||||
{
|
||||
this.maxPermissionChecks = maxPermissionChecks + 1;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean shouldQuitChecks()
|
||||
{
|
||||
boolean result = false;
|
||||
|
||||
if (checksPerformed >= maxPermissionChecks)
|
||||
{
|
||||
result = true;
|
||||
logger.warn("Maximum permission checks exceeded (" + maxPermissionChecks + ")");
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
if ((System.currentTimeMillis() - startTime) >= maxPermissionCheckTimeMillis)
|
||||
{
|
||||
result = true;
|
||||
logger.warn("Maximum permission checks time exceeded (" + maxPermissionCheckTimeMillis + ")");
|
||||
return true;
|
||||
}
|
||||
|
||||
return result;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public void setMaxPermissionCheckTimeMillis(long maxPermissionCheckTimeMillis)
|
||||
|
@@ -29,26 +29,29 @@ import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.alfresco.repo.search.QueryParserException;
|
||||
import org.apache.commons.httpclient.Header;
|
||||
import org.apache.commons.httpclient.HttpClient;
|
||||
import org.apache.commons.httpclient.HttpException;
|
||||
import org.apache.commons.httpclient.HttpStatus;
|
||||
import org.apache.commons.httpclient.URI;
|
||||
import org.apache.commons.httpclient.URIException;
|
||||
import org.apache.commons.httpclient.methods.PostMethod;
|
||||
import org.apache.commons.httpclient.methods.StringRequestEntity;
|
||||
import org.apache.commons.httpclient.params.HttpMethodParams;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
import org.json.JSONTokener;
|
||||
|
||||
public abstract class AbstractSolrQueryHTTPClient
|
||||
{
|
||||
/** Logger for the class. */
|
||||
private static final Log LOGGER = LogFactory.getLog(AbstractSolrQueryHTTPClient.class);
|
||||
|
||||
public static final int DEFAULT_SAVEPOST_BUFFER = 4096;
|
||||
|
||||
// Constants copied from org.apache.solr.common.params.HighlightParams (solr-solrj:1.4.1)
|
||||
@@ -79,11 +82,13 @@ public abstract class AbstractSolrQueryHTTPClient
|
||||
public static final String HIGHLIGHT_PARAMS_SLOP = HIGHLIGHT_PARAMS_HIGHLIGHT + "." + HIGHLIGHT_PARAMS_REGEX + ".slop";
|
||||
public static final String HIGHLIGHT_PARAMS_PATTERN = HIGHLIGHT_PARAMS_HIGHLIGHT + "." + HIGHLIGHT_PARAMS_REGEX + ".pattern";
|
||||
public static final String HIGHLIGHT_PARAMS_MAX_RE_CHARS = HIGHLIGHT_PARAMS_HIGHLIGHT + "." + HIGHLIGHT_PARAMS_REGEX + ".maxAnalyzedChars";
|
||||
|
||||
/** List of SOLR Exceptions that should be returning HTTP 501 status code in Remote API. */
|
||||
private static final List<String> STATUS_CODE_501_EXCEPTIONS = List.of("java.lang.UnsupportedOperationException");
|
||||
|
||||
protected JSONObject postQuery(HttpClient httpClient, String url, JSONObject body) throws UnsupportedEncodingException,
|
||||
IOException, HttpException, URIException, JSONException
|
||||
protected JSONObject postQuery(HttpClient httpClient, String url, JSONObject body) throws IOException, JSONException
|
||||
{
|
||||
PostMethod post = new PostMethod(url);
|
||||
PostMethod post = createNewPostMethod(url);
|
||||
if (body.toString().length() > DEFAULT_SAVEPOST_BUFFER)
|
||||
{
|
||||
post.getParams().setBooleanParameter(HttpMethodParams.USE_EXPECT_CONTINUE, true);
|
||||
@@ -103,9 +108,33 @@ public abstract class AbstractSolrQueryHTTPClient
|
||||
httpClient.executeMethod(post);
|
||||
}
|
||||
}
|
||||
String responseBodyStr = post.getResponseBodyAsString();
|
||||
if (post.getStatusCode() != HttpServletResponse.SC_OK)
|
||||
{
|
||||
throw new QueryParserException("Request failed " + post.getStatusCode() + " " + url.toString());
|
||||
String trace = null;
|
||||
try
|
||||
{
|
||||
trace = new JSONObject(responseBodyStr).getJSONObject("error").getString("trace");
|
||||
}
|
||||
catch (JSONException jsonException)
|
||||
{
|
||||
LOGGER.warn("Node 'error.trace' is not present in Search Services error response: " + responseBodyStr);
|
||||
LOGGER.warn("A generic error message will be provided. Check SOLR log file in order to find the root cause for this issue");
|
||||
}
|
||||
|
||||
int httpStatusCode = post.getStatusCode();
|
||||
String message = "Solr request failed with " + httpStatusCode + " " + url;
|
||||
|
||||
// Override the status code for certain exceptions with 501.
|
||||
if (trace != null)
|
||||
{
|
||||
String traceException = trace.substring(0, trace.indexOf(":")).trim();
|
||||
if (STATUS_CODE_501_EXCEPTIONS.contains(traceException))
|
||||
{
|
||||
httpStatusCode = org.apache.http.HttpStatus.SC_NOT_IMPLEMENTED;
|
||||
}
|
||||
}
|
||||
throw new QueryParserException(message, httpStatusCode);
|
||||
}
|
||||
|
||||
Reader reader = new BufferedReader(new InputStreamReader(post.getResponseBodyAsStream(), post.getResponseCharSet()));
|
||||
@@ -118,4 +147,10 @@ public abstract class AbstractSolrQueryHTTPClient
|
||||
post.releaseConnection();
|
||||
}
|
||||
}
|
||||
|
||||
/** Helper method that can be overridden by unit tests. */
|
||||
protected PostMethod createNewPostMethod(String url)
|
||||
{
|
||||
return new PostMethod(url);
|
||||
}
|
||||
}
|
||||
|
@@ -1,140 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.urlshortening;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.service.cmr.urlshortening.UrlShortener;
|
||||
import org.apache.commons.httpclient.HostConfiguration;
|
||||
import org.apache.commons.httpclient.HttpClient;
|
||||
import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager;
|
||||
import org.apache.commons.httpclient.NameValuePair;
|
||||
import org.apache.commons.httpclient.methods.GetMethod;
|
||||
import org.apache.commons.httpclient.protocol.Protocol;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
/**
|
||||
* @deprecated as it is no longer used in the core repository code.
|
||||
*/
|
||||
@Deprecated
|
||||
public class BitlyUrlShortenerImpl implements UrlShortener
|
||||
{
|
||||
private static final Log log = LogFactory.getLog(BitlyUrlShortenerImpl.class);
|
||||
|
||||
private int urlLength = 20;
|
||||
private String username;
|
||||
private String apiKey = "R_ca15c6c89e9b25ccd170bafd209a0d4f";
|
||||
private HttpClient httpClient;
|
||||
|
||||
public BitlyUrlShortenerImpl()
|
||||
{
|
||||
httpClient = new HttpClient();
|
||||
httpClient.setHttpConnectionManager(new MultiThreadedHttpConnectionManager());
|
||||
HostConfiguration hostConfiguration = new HostConfiguration();
|
||||
hostConfiguration.setHost("api-ssl.bitly.com", 443, Protocol.getProtocol("https"));
|
||||
httpClient.setHostConfiguration(hostConfiguration);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String shortenUrl(String longUrl)
|
||||
{
|
||||
if (log.isDebugEnabled())
|
||||
{
|
||||
log.debug("Shortening URL: " + longUrl);
|
||||
}
|
||||
String shortUrl = longUrl;
|
||||
if (longUrl.length() > urlLength)
|
||||
{
|
||||
GetMethod getMethod = new GetMethod();
|
||||
getMethod.setPath("/v3/shorten");
|
||||
|
||||
List<NameValuePair> args = new ArrayList<NameValuePair>();
|
||||
args.add(new NameValuePair("login", username));
|
||||
args.add(new NameValuePair("apiKey", apiKey));
|
||||
args.add(new NameValuePair("longUrl", longUrl));
|
||||
args.add(new NameValuePair("format", "txt"));
|
||||
getMethod.setQueryString(args.toArray(new NameValuePair[args.size()]));
|
||||
|
||||
try
|
||||
{
|
||||
int resultCode = httpClient.executeMethod(getMethod);
|
||||
if (resultCode == 200)
|
||||
{
|
||||
shortUrl = getMethod.getResponseBodyAsString();
|
||||
}
|
||||
else
|
||||
{
|
||||
log.warn("Failed to shorten URL " + longUrl + " - response code == " + resultCode);
|
||||
log.warn(getMethod.getResponseBodyAsString());
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
log.error("Failed to shorten URL " + longUrl, ex);
|
||||
}
|
||||
if (log.isDebugEnabled())
|
||||
{
|
||||
log.debug("URL " + longUrl + " has been shortened to " + shortUrl);
|
||||
}
|
||||
}
|
||||
return shortUrl.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public int getUrlLength()
|
||||
{
|
||||
return urlLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param urlLength the urlLength to set
|
||||
*/
|
||||
public void setUrlLength(int urlLength)
|
||||
{
|
||||
this.urlLength = urlLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param username the username to set
|
||||
*/
|
||||
public void setUsername(String username)
|
||||
{
|
||||
this.username = username;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param apiKey the apiKey to set
|
||||
*/
|
||||
public void setApiKey(String apiKey)
|
||||
{
|
||||
this.apiKey = apiKey;
|
||||
}
|
||||
}
|
@@ -456,12 +456,20 @@ public class RepoUsageComponentImpl implements RepoUsageComponent
|
||||
}
|
||||
}
|
||||
|
||||
// Check the license expiry
|
||||
// Check the license expiration
|
||||
Long licenseExpiryDate = restrictions.getLicenseExpiryDate();
|
||||
if (licenseExpiryDate != null)
|
||||
{
|
||||
//For informational purposes, get the remaining number of days, counting from the beginning of the day of each date (now and expiration date)
|
||||
int remainingDays = DateUtil.calculateDays(System.currentTimeMillis(), licenseExpiryDate);
|
||||
if (remainingDays <= 0)
|
||||
int remainingMills = 0;
|
||||
if (remainingDays == 0)
|
||||
{
|
||||
//Get exact number of milliseconds between license expiration time and now to see if is expired
|
||||
remainingMills = DateUtil.calculateMs(System.currentTimeMillis(), licenseExpiryDate);
|
||||
}
|
||||
|
||||
if (remainingDays < 0 || remainingMills < 0)
|
||||
{
|
||||
errors.add(I18NUtil.getMessage("system.usage.err.limit_license_expired"));
|
||||
level = RepoUsageLevel.LOCKED_DOWN;
|
||||
|
@@ -84,6 +84,7 @@ public class Version2ServiceImpl extends VersionServiceImpl implements VersionSe
|
||||
private static Log logger = LogFactory.getLog(Version2ServiceImpl.class);
|
||||
|
||||
private PermissionService permissionService;
|
||||
private boolean useVersionAssocIndex = false;
|
||||
|
||||
private ExtendedTrait<VersionServiceTrait> versionServiceTrait;
|
||||
|
||||
@@ -96,7 +97,23 @@ public class Version2ServiceImpl extends VersionServiceImpl implements VersionSe
|
||||
{
|
||||
this.permissionService = permissionService;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set to use child association index on versions. This helps ordering versions when sequential IDs are not
|
||||
* guaranteed by the DBMS.
|
||||
*
|
||||
* @param useVersionAssocIndex
|
||||
*/
|
||||
public void setUseVersionAssocIndex(boolean useVersionAssocIndex)
|
||||
{
|
||||
this.useVersionAssocIndex = useVersionAssocIndex;
|
||||
}
|
||||
|
||||
public boolean isUseVersionAssocIndex()
|
||||
{
|
||||
return useVersionAssocIndex;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialise method
|
||||
*/
|
||||
@@ -506,9 +523,12 @@ public class Version2ServiceImpl extends VersionServiceImpl implements VersionSe
|
||||
QName.createQName(Version2Model.NAMESPACE_URI, Version2Model.CHILD_VERSIONS+"-"+versionNumber), // TODO - testing - note: all children (of a versioned node) will have the same version number, maybe replace with a version sequence of some sort 001-...00n
|
||||
sourceTypeRef,
|
||||
nodeDetails.getProperties());
|
||||
|
||||
if (isUseVersionAssocIndex())
|
||||
{
|
||||
nodeService.setChildAssociationIndex(childAssocRef, getAllVersions(versionHistoryRef).size());
|
||||
}
|
||||
versionNodeRef = childAssocRef.getChildRef();
|
||||
|
||||
|
||||
// NOTE: special ML case - see also MultilingualContentServiceImpl.makeMLContainer
|
||||
if (sourceTypeRef.equals(ContentModel.TYPE_MULTILINGUAL_CONTAINER))
|
||||
{
|
||||
|
@@ -1,31 +1,32 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2022 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.service.cmr.view;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
|
||||
@@ -73,6 +74,8 @@ public interface ExporterContext
|
||||
* @return NodeRef[]
|
||||
*/
|
||||
public NodeRef[] getExportList();
|
||||
|
||||
public Map<Integer, NodeRef[]> getExportMap();
|
||||
|
||||
/**
|
||||
* Gets list of parents for exporting nodes
|
||||
|
@@ -1,28 +1,28 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2022 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.service.cmr.view;
|
||||
|
||||
import java.io.OutputStream;
|
||||
@@ -75,5 +75,6 @@ public interface ExporterService
|
||||
*/
|
||||
@Auditable(parameters = {"exporter", "parameters", "progress"})
|
||||
public void exportView(Exporter exporter, ExporterCrawlerParameters parameters, Exporter progress);
|
||||
|
||||
|
||||
public void setExportChunkSize(String exportChunkSize);
|
||||
}
|
||||
|
@@ -322,7 +322,7 @@ public class CombinedConfig
|
||||
{
|
||||
combinedTransformers.remove(indexToRemove);
|
||||
// this may also require the current index i to be changed so we don't skip one.
|
||||
if (i <= indexToRemove)
|
||||
if (i >= indexToRemove)
|
||||
{
|
||||
i--;
|
||||
}
|
||||
|
@@ -71,4 +71,34 @@ public class DateUtil
|
||||
}
|
||||
return days;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the number of milliseconds between start and end dates based on the <b>default</b> timezone.
|
||||
* If the end date is before the start date, the returned value is negative.
|
||||
*
|
||||
* @param startMs start date in milliseconds
|
||||
* @param endMs end date in milliseconds
|
||||
* @return number milliseconds between
|
||||
*/
|
||||
public static int calculateMs(long startMs, long endMs)
|
||||
{
|
||||
DateTime startDateTime = new DateTime(startMs);
|
||||
DateTime endDateTime = new DateTime(endMs);
|
||||
|
||||
int milliseconds;
|
||||
if (endDateTime.isBefore(startDateTime))
|
||||
{
|
||||
Interval interval = new Interval(endDateTime, startDateTime);
|
||||
Period period = interval.toPeriod(PeriodType.millis());
|
||||
milliseconds = 0 - period.getMillis();
|
||||
}
|
||||
else
|
||||
{
|
||||
Interval interval = new Interval(startDateTime, endDateTime);
|
||||
Period period = interval.toPeriod(PeriodType.millis());
|
||||
milliseconds = period.getMillis();
|
||||
}
|
||||
return milliseconds;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -488,6 +488,9 @@
|
||||
<property name="versionComparatorClass">
|
||||
<value>${version.store.versionComparatorClass}</value>
|
||||
</property>
|
||||
<property name="useVersionAssocIndex">
|
||||
<value>${version.store.useVersionAssocIndex}</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<bean id="versionNodeService" class="org.alfresco.repo.version.Node2ServiceImpl">
|
||||
|
@@ -38,9 +38,10 @@
|
||||
<property name="dictionaryService" ref="dictionaryService"/>
|
||||
<property name="descriptorService" ref="descriptorComponent"/>
|
||||
<property name="eventFilterRegistry" ref="event2FilterRegistry"/>
|
||||
<property name="event2MessageProducer" ref="event2MessageProducer"/>
|
||||
<property name="transactionService" ref="transactionService"/>
|
||||
<property name="personService" ref="personService"/>
|
||||
<property name="nodeResourceHelper" ref="nodeResourceHelper"/>
|
||||
<property name="eventGeneratorQueue" ref="eventGeneratorQueue"/>
|
||||
</bean>
|
||||
|
||||
<bean id="baseNodeResourceHelper" abstract="true">
|
||||
@@ -54,7 +55,45 @@
|
||||
|
||||
<bean id="nodeResourceHelper" class="org.alfresco.repo.event2.NodeResourceHelper" parent="baseNodeResourceHelper"/>
|
||||
|
||||
<bean id="eventGeneratorV2" class="org.alfresco.repo.event2.EventGenerator" parent="baseEventGeneratorV2">
|
||||
<property name="nodeResourceHelper" ref="nodeResourceHelper"/>
|
||||
<bean id="eventGeneratorV2" class="org.alfresco.repo.event2.EventGenerator" parent="baseEventGeneratorV2"/>
|
||||
|
||||
<bean id="eventGeneratorQueue" class="org.alfresco.repo.event2.EventGeneratorQueue" >
|
||||
<property name="enqueueThreadPoolExecutor">
|
||||
<ref bean="eventAsyncEnqueueThreadPool" />
|
||||
</property>
|
||||
<property name="dequeueThreadPoolExecutor">
|
||||
<ref bean="eventAsyncDequeueThreadPool" />
|
||||
</property>
|
||||
<property name="event2MessageProducer" ref="event2MessageProducer"/>
|
||||
</bean>
|
||||
|
||||
<bean id="eventAsyncEnqueueThreadPool" class="org.alfresco.util.ThreadPoolExecutorFactoryBean">
|
||||
<property name="poolName">
|
||||
<value>eventAsyncEnqueueThreadPool</value>
|
||||
</property>
|
||||
<property name="corePoolSize">
|
||||
<value>${repo.event2.queue.enqueueThreadPool.coreSize}</value>
|
||||
</property>
|
||||
<property name="maximumPoolSize">
|
||||
<value>${repo.event2.queue.enqueueThreadPool.maximumSize}</value>
|
||||
</property>
|
||||
<property name="threadPriority">
|
||||
<value>${repo.event2.queue.enqueueThreadPool.priority}</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<bean id="eventAsyncDequeueThreadPool" class="org.alfresco.util.ThreadPoolExecutorFactoryBean">
|
||||
<property name="poolName">
|
||||
<value>eventAsyncDequeueThreadPool</value>
|
||||
</property>
|
||||
<property name="corePoolSize">
|
||||
<value>${repo.event2.queue.dequeueThreadPool.coreSize}</value>
|
||||
</property>
|
||||
<property name="maximumPoolSize">
|
||||
<value>${repo.event2.queue.dequeueThreadPool.maximumSize}</value>
|
||||
</property>
|
||||
<property name="threadPriority">
|
||||
<value>${repo.event2.queue.dequeueThreadPool.priority}</value>
|
||||
</property>
|
||||
</bean>
|
||||
</beans>
|
||||
|
@@ -180,7 +180,8 @@
|
||||
</choose>
|
||||
</foreach>
|
||||
</if>
|
||||
</if>
|
||||
</sql>
|
||||
</if>
|
||||
<if test="limit != 0">limit #{offset}, #{limit}</if>
|
||||
</sql>
|
||||
|
||||
</mapper>
|
@@ -779,6 +779,7 @@
|
||||
<if test="idTwo != null"><![CDATA[and na.node_id < #{idTwo}]]></if>
|
||||
and na.qname_id in
|
||||
<foreach item="item" index="i" collection="ids" open="(" separator="," close=")">#{item}</foreach>
|
||||
<if test="ordered == true">order by node.id ASC</if>
|
||||
</select>
|
||||
|
||||
<!-- Common results for result_NodeAssoc -->
|
||||
|
@@ -130,6 +130,9 @@
|
||||
<property name="permissionService">
|
||||
<ref bean="PermissionService" />
|
||||
</property>
|
||||
<property name="exportChunkSize">
|
||||
<value>${rm.export.chunk.size}</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<bean id="repositoryExporterComponent" class="org.alfresco.repo.exporter.RepositoryExporterComponent">
|
||||
|
@@ -266,4 +266,4 @@
|
||||
<property name="registry" ref="auditModel.extractorRegistry" />
|
||||
<property name="cmisConnector" ref="CMISConnector" />
|
||||
</bean>
|
||||
</beans>
|
||||
</beans>
|
||||
|
@@ -117,6 +117,7 @@
|
||||
<property name="nodeDAO" ref="nodeDAO"/>
|
||||
<property name="maxItemBatchSize" value="${system.fixedACLsUpdater.maxItemBatchSize}"/>
|
||||
<property name="numThreads" value="${system.fixedACLsUpdater.numThreads}"/>
|
||||
<property name="forceSharedACL" value="${system.fixedACLsUpdater.forceSharedACL}"/>
|
||||
<property name="lockTimeToLive" value="${system.fixedACLsUpdater.lockTTL}"/>
|
||||
<property name="policyComponent" ref="policyComponent"/>
|
||||
<property name="policyIgnoreUtil" ref="policyIgnoreUtil"/>
|
||||
|
@@ -178,14 +178,6 @@
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<bean id="htmlRenderingEngine"
|
||||
class="org.alfresco.repo.rendition.executer.HTMLRenderingEngine"
|
||||
parent="baseRenderingAction">
|
||||
<property name="tikaConfig">
|
||||
<ref bean="tikaConfig"/>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<bean id="compositeRenderingEngine"
|
||||
class="org.alfresco.repo.rendition.executer.CompositeRenderingEngine"
|
||||
parent="baseRenderingAction">
|
||||
|
@@ -9,7 +9,9 @@
|
||||
{"name": "allowEnlargement", "value": true},
|
||||
{"name": "maintainAspectRatio", "value": true},
|
||||
{"name": "autoOrient", "value": true},
|
||||
{"name": "thumbnail", "value": true}
|
||||
{"name": "thumbnail", "value": true},
|
||||
{"name": "startPage", "value": "0"},
|
||||
{"name": "endPage", "value": "0"}
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -21,7 +23,9 @@
|
||||
{"name": "allowEnlargement", "value": false},
|
||||
{"name": "maintainAspectRatio", "value": true},
|
||||
{"name": "autoOrient", "value": true},
|
||||
{"name": "thumbnail", "value": true}
|
||||
{"name": "thumbnail", "value": true},
|
||||
{"name": "startPage", "value": "0"},
|
||||
{"name": "endPage", "value": "0"}
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -33,7 +37,9 @@
|
||||
{"name": "allowEnlargement", "value": false},
|
||||
{"name": "maintainAspectRatio", "value": true},
|
||||
{"name": "autoOrient", "value": true},
|
||||
{"name": "thumbnail", "value": true}
|
||||
{"name": "thumbnail", "value": true},
|
||||
{"name": "startPage", "value": "0"},
|
||||
{"name": "endPage", "value": "0"}
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -45,7 +51,9 @@
|
||||
{"name": "allowEnlargement", "value": false},
|
||||
{"name": "maintainAspectRatio", "value": true},
|
||||
{"name": "autoOrient", "value": true},
|
||||
{"name": "thumbnail", "value": true}
|
||||
{"name": "thumbnail", "value": true},
|
||||
{"name": "startPage", "value": "0"},
|
||||
{"name": "endPage", "value": "0"}
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -57,7 +65,9 @@
|
||||
{"name": "allowEnlargement", "value": false},
|
||||
{"name": "maintainAspectRatio", "value": true},
|
||||
{"name": "autoOrient", "value": true},
|
||||
{"name": "thumbnail", "value": true}
|
||||
{"name": "thumbnail", "value": true},
|
||||
{"name": "startPage", "value": "0"},
|
||||
{"name": "endPage", "value": "0"}
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -3,7 +3,7 @@
|
||||
repository.name=Main Repository
|
||||
|
||||
# Schema number
|
||||
version.schema=14002
|
||||
version.schema=14100
|
||||
|
||||
# Directory configuration
|
||||
|
||||
@@ -153,6 +153,7 @@ system.cache.parentAssocs.limitFactor=8
|
||||
system.acl.maxPermissionCheckTimeMillis=10000
|
||||
# The maximum number of search results to perform permission checks against
|
||||
system.acl.maxPermissionChecks=1000
|
||||
system.acl.maxPermissionCheckEnabled=false
|
||||
|
||||
# The maximum number of filefolder list results
|
||||
system.filefolderservice.defaultListMaxResults=5000
|
||||
@@ -377,6 +378,14 @@ version.store.version2Store=workspace://version2Store
|
||||
# if upgrading from a version that used unordered sequences in a cluster.
|
||||
version.store.versionComparatorClass=
|
||||
|
||||
# Optional to set the child association index when creating a new version.
|
||||
# This helps ordering versions when sequential IDs are not guaranteed by the DBMS.
|
||||
# Not compatible with AGS < 7.1.1
|
||||
# Once enabled, it should not be disabled again or new versions will go back
|
||||
# to have index -1 and you will get the wrong order in version history.
|
||||
# Please, see MNT-22715 for details.
|
||||
version.store.useVersionAssocIndex=false
|
||||
|
||||
# Folders for storing people
|
||||
system.system_container.childname=sys:system
|
||||
system.people_container.childname=sys:people
|
||||
@@ -480,7 +489,7 @@ system.thumbnail.definition.default.timeoutMs=-1
|
||||
system.thumbnail.definition.default.readLimitTimeMs=-1
|
||||
system.thumbnail.definition.default.maxSourceSizeKBytes=-1
|
||||
system.thumbnail.definition.default.readLimitKBytes=-1
|
||||
system.thumbnail.definition.default.pageLimit=1
|
||||
system.thumbnail.definition.default.pageLimit=-1
|
||||
system.thumbnail.definition.default.maxPages=-1
|
||||
|
||||
# Max mimetype sizes to create thumbnail icons
|
||||
@@ -1082,6 +1091,8 @@ system.fixedACLsUpdater.lockTTL=10000
|
||||
system.fixedACLsUpdater.maxItemBatchSize=100
|
||||
# fixedACLsUpdater - the number of threads to use
|
||||
system.fixedACLsUpdater.numThreads=4
|
||||
# fixedACLsUpdater - Force shared ACL to propagate through children even if there is an unexpected ACL
|
||||
system.fixedACLsUpdater.forceSharedACL=false
|
||||
# fixedACLsUpdater cron expression - fire at midnight every day
|
||||
system.fixedACLsUpdater.cronExpression=0 0 0 * * ?
|
||||
|
||||
@@ -1207,6 +1218,15 @@ repo.event2.filter.childAssocTypes=rn:rendition
|
||||
repo.event2.filter.users=System, null
|
||||
# Topic name
|
||||
repo.event2.topic.endpoint=amqp:topic:alfresco.repo.event2
|
||||
# Thread pool for async enqueue of repo events
|
||||
repo.event2.queue.enqueueThreadPool.priority=1
|
||||
repo.event2.queue.enqueueThreadPool.coreSize=8
|
||||
repo.event2.queue.enqueueThreadPool.maximumSize=10
|
||||
# Thread pool for async dequeue and delivery of repo events
|
||||
repo.event2.queue.dequeueThreadPool.priority=1
|
||||
repo.event2.queue.dequeueThreadPool.coreSize=1
|
||||
repo.event2.queue.dequeueThreadPool.maximumSize=1
|
||||
|
||||
|
||||
# MNT-21083
|
||||
# --DELETE_NOT_EXISTS - default settings
|
||||
@@ -1225,4 +1245,19 @@ system.new-node-transaction-indexes.ignored=true
|
||||
|
||||
# Allows the configuration of maximum limits of the temp files to be deleted or the maximum time allowed to run for the job
|
||||
system.tempFileCleaner.maxFilesToDelete=
|
||||
system.tempFileCleaner.maxTimeToRun=
|
||||
system.tempFileCleaner.maxTimeToRun=
|
||||
|
||||
# Rhino optimization level
|
||||
scripts.execution.optimizationLevel=0
|
||||
|
||||
# Max seconds a script is allowed to run
|
||||
scripts.execution.maxScriptExecutionSeconds=-1
|
||||
|
||||
# Max call stack depth
|
||||
scripts.execution.maxStackDepth=-1
|
||||
|
||||
# Max memory (bytes) a script can use
|
||||
scripts.execution.maxMemoryUsedInBytes=-1
|
||||
|
||||
# Number of instructions that will trigger the observer
|
||||
scripts.execution.observerInstructionCount=-1
|
@@ -45,6 +45,21 @@
|
||||
<property name="storePath">
|
||||
<value>${spaces.company_home.childname}</value>
|
||||
</property>
|
||||
<property name="optimizationLevel">
|
||||
<value>${scripts.execution.optimizationLevel}</value>
|
||||
</property>
|
||||
<property name="maxScriptExecutionSeconds">
|
||||
<value>${scripts.execution.maxScriptExecutionSeconds}</value>
|
||||
</property>
|
||||
<property name="maxStackDepth">
|
||||
<value>${scripts.execution.maxStackDepth}</value>
|
||||
</property>
|
||||
<property name="maxMemoryUsedInBytes">
|
||||
<value>${scripts.execution.maxMemoryUsedInBytes}</value>
|
||||
</property>
|
||||
<property name="observerInstructionCount">
|
||||
<value>${scripts.execution.observerInstructionCount}</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- base config implementation that script extension beans extend from - for auto registration
|
||||
|
@@ -105,8 +105,35 @@
|
||||
<ref bean="metadataQueryIndexesCheck2" />
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<bean id="search.dbQueryEngineImpl" class="org.alfresco.repo.search.impl.querymodel.impl.db.DBQueryEngine" >
|
||||
|
||||
<bean id="search.dbQueryEngineImpl" class="org.alfresco.util.bean.HierarchicalBeanLoader">
|
||||
<property name="targetBeanName">
|
||||
<value>search.dbQueryEngineImpl.#bean.dialect#</value>
|
||||
</property>
|
||||
<property name="targetClass">
|
||||
<value>org.alfresco.repo.search.impl.querymodel.QueryEngine</value>
|
||||
</property>
|
||||
<property name="dialectBaseClass">
|
||||
<value>org.alfresco.repo.domain.dialect.Dialect</value>
|
||||
</property>
|
||||
<property name="dialectClass">
|
||||
<bean class="org.springframework.beans.factory.config.PropertyPathFactoryBean">
|
||||
<property name="targetBeanName" value="dialect" />
|
||||
<property name="propertyPath" value="class.name" />
|
||||
</bean>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<bean id="search.dbQueryEngineImpl.org.alfresco.repo.domain.dialect.Dialect"
|
||||
parent="search.baseDbQueryEngineImpl">
|
||||
<property name="usePagingQuery" value="false"/>
|
||||
</bean>
|
||||
<bean id="search.dbQueryEngineImpl.org.alfresco.repo.domain.dialect.MySQLInnoDBDialect"
|
||||
parent="search.baseDbQueryEngineImpl">
|
||||
<property name="usePagingQuery" value="true"/>
|
||||
</bean>
|
||||
|
||||
<bean id="search.baseDbQueryEngineImpl" class="org.alfresco.repo.search.impl.querymodel.impl.db.DBQueryEngine" abstract="true">
|
||||
<property name="permissionService" ref="permissionService"/>
|
||||
<property name="dictionaryService" ref="dictionaryService" />
|
||||
<property name="namespaceService" ref="namespaceService" />
|
||||
@@ -126,6 +153,9 @@
|
||||
<property name="maxPermissionCheckTimeMillis">
|
||||
<value>${system.acl.maxPermissionCheckTimeMillis}</value>
|
||||
</property>
|
||||
<property name="maxPermissionCheckEnabled">
|
||||
<value>${system.acl.maxPermissionCheckEnabled}</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<bean id="search.dbQueryEngine" class="org.springframework.aop.framework.ProxyFactoryBean">
|
||||
|
@@ -165,6 +165,8 @@
|
||||
<property name="keyResourceLoader" ref="springKeyResourceLoader"/>
|
||||
<property name="keyStoreParameters" ref="keyStoreParameters"/>
|
||||
<property name="encryptionParameters" ref="md5EncryptionParameters"/>
|
||||
<property name="sharedSecret" value="${solr.sharedSecret}"/>
|
||||
<property name="sharedSecretHeader" value="${solr.sharedSecret.header}"/>
|
||||
<property name="host" value="${solr.host}"/>
|
||||
<property name="port" value="${solr.port}"/>
|
||||
<property name="sslPort" value="${solr.port.ssl}"/>
|
||||
|
@@ -117,31 +117,6 @@
|
||||
"imageMagickOptions"
|
||||
]
|
||||
},
|
||||
{
|
||||
"transformerName": "htmlToPdfViaOdt",
|
||||
"transformerPipeline" : [
|
||||
{"transformerName": "libreoffice", "targetMediaType": "application/vnd.oasis.opendocument.text"},
|
||||
{"transformerName": "libreoffice"}
|
||||
],
|
||||
"supportedSourceAndTargetList": [
|
||||
{"sourceMediaType": "text/html", "targetMediaType": "application/pdf" }
|
||||
],
|
||||
"transformOptions": [
|
||||
]
|
||||
},
|
||||
{
|
||||
"transformerName": "htmlToImageViaPdf",
|
||||
"transformerPipeline" : [
|
||||
{"transformerName": "htmlToPdfViaOdt", "targetMediaType": "application/pdf"},
|
||||
{"transformerName": "pdfToImageViaPng"}
|
||||
],
|
||||
"supportedSourceAndTargetList": [
|
||||
],
|
||||
"transformOptions": [
|
||||
"pdfRendererOptions",
|
||||
"imageMagickOptions"
|
||||
]
|
||||
},
|
||||
{
|
||||
"transformerName": "ooXmlToImageViaText",
|
||||
"transformerPipeline" : [
|
||||
@@ -198,42 +173,66 @@
|
||||
"archiveOptions"
|
||||
]
|
||||
},
|
||||
{
|
||||
"transformerName": "libreofficeHtmlToPdfViaOdt",
|
||||
"transformerPipeline" : [
|
||||
{"transformerName": "libreoffice", "targetMediaType": "application/vnd.oasis.opendocument.text"},
|
||||
{"transformerName": "libreoffice"}
|
||||
],
|
||||
"supportedSourceAndTargetList": [
|
||||
{"sourceMediaType": "text/html", "targetMediaType": "application/pdf" }
|
||||
],
|
||||
"transformOptions": [
|
||||
]
|
||||
},
|
||||
{
|
||||
"transformerName": "libreofficeToPdf",
|
||||
"transformerFailover" : [ "libreoffice", "libreofficeHtmlToPdfViaOdt" ],
|
||||
"supportedSourceAndTargetList": [
|
||||
{"sourceMediaType": "application/vnd.oasis.opendocument.graphics", "priority": 150, "targetMediaType": "application/pdf" },
|
||||
{"sourceMediaType": "application/vnd.sun.xml.calc.template", "priority": 150, "targetMediaType": "application/pdf" },
|
||||
{"sourceMediaType": "application/vnd.sun.xml.impress.template", "priority": 150, "targetMediaType": "application/pdf" },
|
||||
{"sourceMediaType": "application/vnd.sun.xml.writer.template", "priority": 150, "targetMediaType": "application/pdf" },
|
||||
{"sourceMediaType": "text/tab-separated-values", "priority": 150, "targetMediaType": "application/pdf" },
|
||||
{"sourceMediaType": "application/vnd.visio2013", "priority": 150, "targetMediaType": "application/pdf" },
|
||||
{"sourceMediaType": "application/wordperfect", "priority": 150, "targetMediaType": "application/pdf" },
|
||||
{"sourceMediaType": "application/vnd.sun.xml.calc", "priority": 150, "targetMediaType": "application/pdf" },
|
||||
{"sourceMediaType": "application/vnd.sun.xml.impress", "priority": 150, "targetMediaType": "application/pdf" }
|
||||
],
|
||||
"transformOptions": [
|
||||
]
|
||||
},
|
||||
{
|
||||
"transformerName": "libreofficeToPdfBoxViaPdf",
|
||||
"transformerPipeline" : [
|
||||
{"transformerName": "libreofficeToPdf", "targetMediaType": "application/pdf"},
|
||||
{"transformerName": "libreoffice", "targetMediaType": "application/pdf"},
|
||||
{"transformerName": "PdfBox"}
|
||||
],
|
||||
"supportedSourceAndTargetList": [
|
||||
{"sourceMediaType": "application/vnd.oasis.opendocument.graphics", "priority": 150, "targetMediaType": "text/csv"},
|
||||
{"sourceMediaType": "application/vnd.oasis.opendocument.graphics", "priority": 150, "targetMediaType": "text/html"},
|
||||
{"sourceMediaType": "application/vnd.oasis.opendocument.graphics", "maxSourceSizeBytes": 26214400, "priority": 150, "targetMediaType": "text/plain"},
|
||||
{"sourceMediaType": "application/vnd.oasis.opendocument.graphics", "priority": 150, "targetMediaType": "application/xhtml+xml"},
|
||||
{"sourceMediaType": "application/vnd.oasis.opendocument.graphics", "priority": 150, "targetMediaType": "text/xml"},
|
||||
|
||||
{"sourceMediaType": "application/vnd.sun.xml.calc.template", "priority": 150, "targetMediaType": "text/csv"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.calc.template", "priority": 150, "targetMediaType": "text/html"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.calc.template", "maxSourceSizeBytes": 26214400, "priority": 150, "targetMediaType": "text/plain"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.calc.template", "priority": 150, "targetMediaType": "application/xhtml+xml"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.calc.template", "priority": 150, "targetMediaType": "text/xml"},
|
||||
|
||||
{"sourceMediaType": "application/vnd.sun.xml.impress.template", "priority": 150, "targetMediaType": "text/csv"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.impress.template", "priority": 150, "targetMediaType": "text/html"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.impress.template", "maxSourceSizeBytes": 26214400, "priority": 150, "targetMediaType": "text/plain"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.impress.template", "priority": 150, "targetMediaType": "application/xhtml+xml"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.impress.template", "priority": 150, "targetMediaType": "text/xml"},
|
||||
|
||||
{"sourceMediaType": "application/vnd.sun.xml.writer.template", "priority": 150, "targetMediaType": "text/csv"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.writer.template", "priority": 150, "targetMediaType": "text/html"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.writer.template", "maxSourceSizeBytes": 26214400, "priority": 150, "targetMediaType": "text/plain"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.writer.template", "priority": 150, "targetMediaType": "application/xhtml+xml"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.writer.template", "priority": 150, "targetMediaType": "text/xml"},
|
||||
|
||||
{"sourceMediaType": "text/tab-separated-values", "priority": 150, "targetMediaType": "text/csv"},
|
||||
{"sourceMediaType": "text/tab-separated-values", "priority": 150, "targetMediaType": "text/html"},
|
||||
{"sourceMediaType": "text/tab-separated-values", "maxSourceSizeBytes": 26214400, "priority": 150, "targetMediaType": "text/plain"},
|
||||
{"sourceMediaType": "text/tab-separated-values", "priority": 150, "targetMediaType": "application/xhtml+xml"},
|
||||
{"sourceMediaType": "text/tab-separated-values", "priority": 150, "targetMediaType": "text/xml"},
|
||||
|
||||
{"sourceMediaType": "application/vnd.visio2013", "priority": 150, "targetMediaType": "text/csv"},
|
||||
{"sourceMediaType": "application/vnd.visio2013", "priority": 150, "targetMediaType": "text/html"},
|
||||
{"sourceMediaType": "application/vnd.visio2013", "maxSourceSizeBytes": 26214400, "priority": 150, "targetMediaType": "text/plain"},
|
||||
{"sourceMediaType": "application/vnd.visio2013", "priority": 150, "targetMediaType": "application/xhtml+xml"},
|
||||
{"sourceMediaType": "application/vnd.visio2013", "priority": 150, "targetMediaType": "text/xml"},
|
||||
|
||||
{"sourceMediaType": "application/wordperfect", "priority": 150, "targetMediaType": "text/csv"},
|
||||
{"sourceMediaType": "application/wordperfect", "priority": 150, "targetMediaType": "text/html"},
|
||||
{"sourceMediaType": "application/wordperfect", "maxSourceSizeBytes": 26214400, "priority": 150, "targetMediaType": "text/plain"},
|
||||
{"sourceMediaType": "application/wordperfect", "priority": 150, "targetMediaType": "application/xhtml+xml"},
|
||||
{"sourceMediaType": "application/wordperfect", "priority": 150, "targetMediaType": "text/xml"},
|
||||
|
||||
{"sourceMediaType": "application/vnd.sun.xml.calc", "priority": 150, "targetMediaType": "text/csv"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.calc", "priority": 150, "targetMediaType": "text/html"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.calc", "maxSourceSizeBytes": 26214400, "priority": 150, "targetMediaType": "text/plain"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.calc", "priority": 150, "targetMediaType": "application/xhtml+xml"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.calc", "priority": 150, "targetMediaType": "text/xml"},
|
||||
|
||||
{"sourceMediaType": "application/vnd.sun.xml.impress", "priority": 150, "targetMediaType": "text/csv"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.impress", "priority": 150, "targetMediaType": "text/html"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.impress", "maxSourceSizeBytes": 26214400, "priority": 150, "targetMediaType": "text/plain"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.impress", "priority": 150, "targetMediaType": "application/xhtml+xml"},
|
||||
{"sourceMediaType": "application/vnd.sun.xml.impress", "priority": 150, "targetMediaType": "text/xml"}
|
||||
],
|
||||
"transformOptions": [
|
||||
"pdfboxOptions"
|
||||
@@ -263,6 +262,32 @@
|
||||
"transformOptions": [
|
||||
"tikaOptions"
|
||||
]
|
||||
},
|
||||
{
|
||||
"transformerName": "htmlToPdfViaTXT",
|
||||
"transformerPipeline" : [
|
||||
{"transformerName": "string", "targetMediaType": "text/plain"},
|
||||
{"transformerName": "libreoffice"}
|
||||
],
|
||||
"supportedSourceAndTargetList": [
|
||||
{"sourceMediaType": "text/html", "targetMediaType": "application/pdf" }
|
||||
],
|
||||
"transformOptions": [
|
||||
]
|
||||
},
|
||||
{
|
||||
"transformerName": "htmlToImageViaTXT",
|
||||
"transformerPipeline" : [
|
||||
{"transformerName": "string", "targetMediaType": "text/plain"},
|
||||
{"transformerName": "textToImageViaPdf"}
|
||||
],
|
||||
"supportedSourceAndTargetList": [
|
||||
{"sourceMediaType": "text/html", "targetMediaType": "image/png" }
|
||||
],
|
||||
"transformOptions": [
|
||||
"pdfRendererOptions",
|
||||
"imageMagickOptions"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2017 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -77,12 +77,17 @@ import org.junit.runners.Suite;
|
||||
// From MiscContextTestSuite
|
||||
org.alfresco.repo.domain.query.CannedQueryDAOTest.class,
|
||||
|
||||
// REPO-2963 : Tests causing a cascade of failures in AllDBTestsTestSuite on PostgreSQL/MySQL
|
||||
// ACS-1907
|
||||
org.alfresco.repo.search.impl.querymodel.impl.db.ACS1907Test.class,
|
||||
|
||||
// REPO-2963 : Tests causing a cascade of failures in AllDBTestsTestSuite on PostgreSQL/MySQL
|
||||
// Moved at the bottom of the suite because DbNodeServiceImplTest.testNodeCleanupRegistry() takes a long time on a clean DB.
|
||||
org.alfresco.repo.node.db.DbNodeServiceImplTest.class,
|
||||
|
||||
org.alfresco.repo.node.cleanup.TransactionCleanupTest.class,
|
||||
org.alfresco.repo.security.person.GetPeopleCannedQueryTest.class
|
||||
org.alfresco.repo.security.person.GetPeopleCannedQueryTest.class,
|
||||
|
||||
org.alfresco.repo.domain.schema.script.DeleteNotExistsExecutorTest.class
|
||||
})
|
||||
public class AllDBTestsTestSuite
|
||||
{
|
||||
|
@@ -75,7 +75,6 @@ import org.junit.runners.Suite;
|
||||
org.alfresco.repo.transfer.HttpClientTransmitterImplTest.class,
|
||||
org.alfresco.repo.transfer.manifest.TransferManifestTest.class,
|
||||
org.alfresco.repo.transfer.TransferVersionCheckerImplTest.class,
|
||||
org.alfresco.repo.urlshortening.BitlyUrlShortenerTest.class,
|
||||
org.alfresco.service.cmr.calendar.CalendarRecurrenceHelperTest.class,
|
||||
org.alfresco.service.cmr.calendar.CalendarTimezoneHelperTest.class,
|
||||
org.alfresco.tools.RenameUserTest.class,
|
||||
@@ -110,15 +109,17 @@ import org.junit.runners.Suite;
|
||||
org.alfresco.util.schemacomp.validator.NameValidatorTest.class,
|
||||
org.alfresco.util.schemacomp.validator.SchemaVersionValidatorTest.class,
|
||||
org.alfresco.util.schemacomp.validator.TypeNameOnlyValidatorTest.class,
|
||||
org.alfresco.util.test.junitrules.RetryAtMostRuleTest.class,
|
||||
org.alfresco.util.test.junitrules.TemporaryMockOverrideTest.class,
|
||||
org.alfresco.repo.search.impl.solr.AbstractSolrQueryHTTPClientTest.class,
|
||||
org.alfresco.repo.search.impl.solr.SpellCheckDecisionManagerTest.class,
|
||||
org.alfresco.repo.search.impl.solr.SolrStoreMappingWrapperTest.class,
|
||||
org.alfresco.repo.search.impl.solr.SolrQueryHTTPClientTest.class,
|
||||
org.alfresco.repo.search.impl.solr.SolrSQLHttpClientTest.class,
|
||||
org.alfresco.repo.search.impl.solr.SolrStatsResultTest.class,
|
||||
org.alfresco.repo.search.impl.solr.facet.SolrFacetComparatorTest.class,
|
||||
org.alfresco.repo.search.impl.solr.facet.FacetQNameUtilsTest.class,
|
||||
org.alfresco.util.BeanExtenderUnitTest.class,
|
||||
org.alfresco.repo.search.impl.solr.SpellCheckDecisionManagerTest.class,
|
||||
org.alfresco.repo.search.impl.solr.SolrStoreMappingWrapperTest.class,
|
||||
org.alfresco.repo.security.authentication.CompositePasswordEncoderTest.class,
|
||||
org.alfresco.repo.security.authentication.PasswordHashingTest.class,
|
||||
org.alfresco.traitextender.TraitExtenderIntegrationTest.class,
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2017 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -39,7 +39,7 @@ import org.junit.runners.Suite;
|
||||
@RunWith(Categories.class)
|
||||
@Categories.ExcludeCategory({DBTests.class, NonBuildTests.class})
|
||||
@Suite.SuiteClasses({
|
||||
|
||||
|
||||
// there is a test that runs for 184s and another one that runs for 40s
|
||||
org.alfresco.repo.attributes.AttributeServiceTest.class,
|
||||
|
||||
@@ -66,7 +66,9 @@ import org.junit.runners.Suite;
|
||||
org.alfresco.repo.content.RoutingContentStoreTest.class,
|
||||
|
||||
org.alfresco.encryption.EncryptionTests.class,
|
||||
org.alfresco.encryption.KeyStoreTests.class
|
||||
org.alfresco.encryption.KeyStoreTests.class,
|
||||
|
||||
org.alfresco.repo.content.MimetypeMapContentTest.class
|
||||
|
||||
// TODO REPO-2791 org.alfresco.repo.content.routing.StoreSelectorAspectContentStoreTest.class,
|
||||
})
|
||||
|
@@ -56,7 +56,6 @@ import org.junit.runners.Suite;
|
||||
// This test opens, closes and again opens the alfresco application context.
|
||||
org.alfresco.repo.dictionary.CustomModelRepoRestartTest.class,
|
||||
|
||||
org.alfresco.repo.rendition.executer.HTMLRenderingEngineTest.class,
|
||||
org.alfresco.repo.rendition.executer.XSLTFunctionsTest.class,
|
||||
org.alfresco.repo.rendition.executer.XSLTRenderingEngineTest.class,
|
||||
org.alfresco.repo.replication.ReplicationServiceIntegrationTest.class,
|
||||
|
@@ -75,7 +75,6 @@ import org.junit.runners.Suite;
|
||||
org.alfresco.repo.site.SiteServiceImplTest.class,
|
||||
|
||||
// [classpath:alfresco/application-context.xml, classpath:scriptexec/script-exec-test.xml]
|
||||
org.alfresco.repo.domain.schema.script.DeleteNotExistsExecutorTest.class,
|
||||
org.alfresco.repo.domain.schema.script.ScriptExecutorImplIntegrationTest.class,
|
||||
org.alfresco.repo.domain.schema.script.ScriptBundleExecutorImplIntegrationTest.class,
|
||||
|
||||
|
@@ -78,6 +78,7 @@ import org.alfresco.repo.security.authentication.AuthenticationComponent;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationContext;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.site.SiteModel;
|
||||
import org.alfresco.repo.tenant.TenantAdminService;
|
||||
import org.alfresco.repo.tenant.TenantService;
|
||||
import org.alfresco.repo.tenant.TenantUtil;
|
||||
@@ -107,7 +108,13 @@ import org.alfresco.service.cmr.search.SearchService;
|
||||
import org.alfresco.service.cmr.security.AccessPermission;
|
||||
import org.alfresco.service.cmr.security.AuthorityService;
|
||||
import org.alfresco.service.cmr.security.AuthorityType;
|
||||
import org.alfresco.service.cmr.security.MutableAuthenticationService;
|
||||
import org.alfresco.service.cmr.security.PermissionService;
|
||||
import org.alfresco.service.cmr.security.PersonService;
|
||||
import org.alfresco.service.cmr.security.PersonService.PersonInfo;
|
||||
import org.alfresco.service.cmr.site.SiteInfo;
|
||||
import org.alfresco.service.cmr.site.SiteService;
|
||||
import org.alfresco.service.cmr.site.SiteVisibility;
|
||||
import org.alfresco.service.cmr.tagging.TaggingService;
|
||||
import org.alfresco.service.cmr.version.Version;
|
||||
import org.alfresco.service.cmr.version.VersionService;
|
||||
@@ -121,7 +128,6 @@ import org.alfresco.util.ApplicationContextHelper;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.alfresco.util.testing.category.FrequentlyFailingTests;
|
||||
import org.alfresco.util.testing.category.LuceneTests;
|
||||
import org.alfresco.util.testing.category.PerformanceTests;
|
||||
import org.alfresco.util.testing.category.RedundantTests;
|
||||
import org.apache.chemistry.opencmis.commons.PropertyIds;
|
||||
import org.apache.chemistry.opencmis.commons.data.Ace;
|
||||
@@ -213,6 +219,9 @@ public class CMISTest
|
||||
private SearchService searchService;
|
||||
private java.util.Properties globalProperties;
|
||||
private AuditComponentImpl auditComponent;
|
||||
private PersonService personService;
|
||||
private SiteService siteService;
|
||||
private MutableAuthenticationService authenticationService;
|
||||
|
||||
private AlfrescoCmisServiceFactory factory;
|
||||
|
||||
@@ -338,6 +347,9 @@ public class CMISTest
|
||||
this.tenantService = (TenantService) ctx.getBean("tenantService");
|
||||
this.searchService = (SearchService) ctx.getBean("SearchService");
|
||||
this.auditComponent = (AuditComponentImpl) ctx.getBean("auditComponent");
|
||||
this.personService = (PersonService) ctx.getBean("personService");
|
||||
this.siteService = (SiteService) ctx.getBean("siteService");
|
||||
this.authenticationService = (MutableAuthenticationService) ctx.getBean("AuthenticationService");
|
||||
|
||||
this.globalProperties = (java.util.Properties) ctx.getBean("global-properties");
|
||||
this.globalProperties.setProperty(VersionableAspectTest.AUTO_VERSION_PROPS_KEY, "true");
|
||||
@@ -719,12 +731,17 @@ public class CMISTest
|
||||
}
|
||||
|
||||
private <T extends Object> T withCmisService(CmisServiceCallback<T> callback, CmisVersion cmisVersion)
|
||||
{
|
||||
return withCmisService("admin", "admin", callback, cmisVersion);
|
||||
}
|
||||
|
||||
private <T extends Object> T withCmisService(String username, String password, CmisServiceCallback<T> callback, CmisVersion cmisVersion)
|
||||
{
|
||||
CmisService cmisService = null;
|
||||
|
||||
try
|
||||
{
|
||||
CallContext context = new SimpleCallContext("admin", "admin", cmisVersion);
|
||||
CallContext context = new SimpleCallContext(username, password, cmisVersion);
|
||||
cmisService = factory.getService(context);
|
||||
T ret = callback.execute(cmisService);
|
||||
return ret;
|
||||
@@ -4101,6 +4118,108 @@ public class CMISTest
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This test ensures that a non member user of a private site, can edit metadata on a document (where the non member user
|
||||
* has "SiteCollaborator" role) placed on the site.
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testMNT20006() throws Exception
|
||||
{
|
||||
AuthenticationUtil.pushAuthentication();
|
||||
AuthenticationUtil.setFullyAuthenticatedUser(AuthenticationUtil.getAdminUserName());
|
||||
|
||||
final String nonMemberUsername = "user" + System.currentTimeMillis();
|
||||
final String nonMemberPassword = "pass" + System.currentTimeMillis();
|
||||
final String siteId = "site" + System.currentTimeMillis();
|
||||
final String originalDescription = "my description";
|
||||
|
||||
NodeRef fileNode;
|
||||
|
||||
try
|
||||
{
|
||||
fileNode = transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<NodeRef>()
|
||||
{
|
||||
public NodeRef execute() throws Throwable
|
||||
{
|
||||
// Create user
|
||||
authenticationService.createAuthentication(nonMemberUsername, nonMemberPassword.toCharArray());
|
||||
Map<QName, Serializable> props = new HashMap<QName, Serializable>();
|
||||
String email = nonMemberUsername + "@testcmis.com";
|
||||
props.put(ContentModel.PROP_USERNAME, nonMemberUsername);
|
||||
props.put(ContentModel.PROP_FIRSTNAME, nonMemberUsername);
|
||||
props.put(ContentModel.PROP_LASTNAME, nonMemberUsername);
|
||||
props.put(ContentModel.PROP_EMAIL, email);
|
||||
PersonInfo personInfo = personService.getPerson(personService.createPerson(props));
|
||||
assertNotNull("Null person info", personInfo);
|
||||
|
||||
// Create site
|
||||
SiteInfo siteInfo = siteService.createSite("myPreset", siteId, "myTitle", "myDescription", SiteVisibility.PRIVATE);
|
||||
assertNotNull("Null site info", siteInfo);
|
||||
NodeRef siteDocLib = siteService.createContainer(siteId, SiteService.DOCUMENT_LIBRARY, ContentModel.TYPE_FOLDER, null);
|
||||
assertNotNull("Null site doclib", siteDocLib);
|
||||
|
||||
// Create node in site
|
||||
String nodeName = "node" + System.currentTimeMillis() + ".txt";
|
||||
NodeRef fileNode = nodeService.createNode(siteDocLib, ContentModel.ASSOC_CONTAINS, ContentModel.ASSOC_CONTAINS, ContentModel.TYPE_CONTENT).getChildRef();
|
||||
ContentWriter writer = contentService.getWriter(fileNode, ContentModel.PROP_CONTENT, true);
|
||||
writer.putContent("my node content");
|
||||
nodeService.setProperty(fileNode, ContentModel.PROP_TITLE, nodeName);
|
||||
nodeService.setProperty(fileNode, ContentModel.PROP_DESCRIPTION, originalDescription);
|
||||
assertNotNull("Null file node", fileNode);
|
||||
assertTrue(nodeService.exists(fileNode));
|
||||
|
||||
// Sets node permissions to the user who is not member of the site and get site activities
|
||||
permissionService.setPermission(fileNode, nonMemberUsername, SiteModel.SITE_COLLABORATOR, true);
|
||||
|
||||
return fileNode;
|
||||
}
|
||||
});
|
||||
}
|
||||
finally
|
||||
{
|
||||
AuthenticationUtil.popAuthentication();
|
||||
}
|
||||
|
||||
// Edit metadata
|
||||
final String newDescription = "new node description";
|
||||
|
||||
Boolean updated = withCmisService(nonMemberUsername, nonMemberPassword, new CmisServiceCallback<Boolean>()
|
||||
{
|
||||
@Override
|
||||
public Boolean execute(CmisService cmisService)
|
||||
{
|
||||
Boolean updated = true;
|
||||
|
||||
try
|
||||
{
|
||||
// Obtain repository id
|
||||
List<RepositoryInfo> repositories = cmisService.getRepositoryInfos(null);
|
||||
assertTrue(repositories.size() > 0);
|
||||
RepositoryInfo repo = repositories.get(0);
|
||||
String repositoryId = repo.getId();
|
||||
|
||||
// Id holder
|
||||
Holder<String> objectIdHolder = new Holder<String>(fileNode.toString());
|
||||
|
||||
// New Properties
|
||||
PropertiesImpl newProperties = new PropertiesImpl();
|
||||
newProperties.addProperty(new PropertyStringImpl(PropertyIds.DESCRIPTION, newDescription));
|
||||
cmisService.updateProperties(repositoryId, objectIdHolder, null, newProperties, null);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
updated = false;
|
||||
}
|
||||
|
||||
return updated;
|
||||
};
|
||||
}, CmisVersion.CMIS_1_1);
|
||||
|
||||
assertTrue("Document metadata not updated", updated);
|
||||
}
|
||||
|
||||
private NodeRef createFolder(NodeRef parentNodeRef, String folderName, QName folderType) throws IOException
|
||||
{
|
||||
Map<QName, Serializable> properties = new HashMap<QName, Serializable>();
|
||||
|
@@ -26,8 +26,8 @@
|
||||
|
||||
package org.alfresco.repo.action;
|
||||
|
||||
import static java.lang.Thread.sleep;
|
||||
import static junit.framework.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
@@ -39,6 +39,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.action.executer.ActionExecuter;
|
||||
import org.alfresco.repo.action.executer.ContentMetadataExtracter;
|
||||
import org.alfresco.repo.action.executer.CounterIncrementActionExecuter;
|
||||
import org.alfresco.repo.action.executer.ScriptActionExecuter;
|
||||
@@ -259,7 +260,7 @@ public class ActionServiceImpl2Test
|
||||
public void testExecuteScript() throws Exception
|
||||
{
|
||||
final NodeRef scriptToBeExecuted = addTempScript("changeFileNameTest.js",
|
||||
"document.properties.name = \"Changed\" + \"_\" + document.properties.name;\ndocument.save();");
|
||||
"document.properties.name = \"Changed_\" + document.properties.name;\ndocument.save();");
|
||||
assertNotNull("Failed to add the test script.", scriptToBeExecuted);
|
||||
|
||||
// add a test file to the Site in order to change its name
|
||||
@@ -310,6 +311,73 @@ public class ActionServiceImpl2Test
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
//Execute script not in Data Dictionary > Scripts
|
||||
AuthenticationUtil.setFullyAuthenticatedUser(testSiteAndMemberInfo.siteManager);
|
||||
NodeRef companyHomeRef = wellKnownNodes.getCompanyHome();
|
||||
NodeRef sharedFolderRef = nodeService.getChildByName(companyHomeRef, ContentModel.ASSOC_CONTAINS,
|
||||
"Shared");
|
||||
final NodeRef invalidScriptRef = addTempScript("changeFileNameTest.js",
|
||||
"document.properties.name = \"Invalid_Change.pdf\";\ndocument.save();",sharedFolderRef);
|
||||
assertNotNull("Failed to add the test script.", scriptToBeExecuted);
|
||||
transactionHelper.doInTransaction(new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
// Create the action
|
||||
Action action = actionService.createAction(ScriptActionExecuter.NAME);
|
||||
action.setParameterValue(ScriptActionExecuter.PARAM_SCRIPTREF, invalidScriptRef);
|
||||
|
||||
try
|
||||
{
|
||||
// Execute the action
|
||||
actionService.executeAction(action, testNode);
|
||||
}
|
||||
catch (Throwable th)
|
||||
{
|
||||
// do nothing
|
||||
}
|
||||
assertFalse("Scripts outside of Data Dictionary Scripts folder should not be executed",
|
||||
("Invalid_Change.pdf".equals(nodeService.getProperty(testNode, ContentModel.PROP_NAME))));
|
||||
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testActionResult() throws Exception
|
||||
{
|
||||
AuthenticationUtil.setFullyAuthenticatedUser(AuthenticationUtil.getAdminUserName());
|
||||
transactionHelper.doInTransaction(new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
try
|
||||
{
|
||||
// Create the script node reference
|
||||
NodeRef script = addTempScript("test-action-result-script.js", "\"VALUE\";");
|
||||
|
||||
// Create the action
|
||||
Action action = actionService.createAction(ScriptActionExecuter.NAME);
|
||||
action.setParameterValue(ScriptActionExecuter.PARAM_SCRIPTREF, script);
|
||||
|
||||
// Execute the action
|
||||
actionService.executeAction(action, testNode);
|
||||
|
||||
// Get the result
|
||||
String result = (String) action.getParameterValue(ActionExecuter.PARAM_RESULT);
|
||||
assertNotNull(result);
|
||||
assertEquals("VALUE", result);
|
||||
}
|
||||
finally
|
||||
{
|
||||
AuthenticationUtil.clearCurrentSecurityContext();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -369,6 +437,32 @@ public class ActionServiceImpl2Test
|
||||
});
|
||||
}
|
||||
|
||||
private NodeRef addTempScript(final String scriptFileName, final String javaScript, final NodeRef parentRef)
|
||||
{
|
||||
AuthenticationUtil.setFullyAuthenticatedUser(AuthenticationUtil.getAdminUserName());
|
||||
return transactionHelper.doInTransaction(new RetryingTransactionCallback<NodeRef>()
|
||||
{
|
||||
public NodeRef execute() throws Throwable
|
||||
{
|
||||
|
||||
// Create the script node reference
|
||||
NodeRef script = nodeService.createNode(parentRef, ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, scriptFileName),
|
||||
ContentModel.TYPE_CONTENT).getChildRef();
|
||||
|
||||
nodeService.setProperty(script, ContentModel.PROP_NAME, scriptFileName);
|
||||
|
||||
ContentWriter contentWriter = contentService.getWriter(script, ContentModel.PROP_CONTENT, true);
|
||||
contentWriter.setMimetype(MimetypeMap.MIMETYPE_JAVASCRIPT);
|
||||
contentWriter.setEncoding("UTF-8");
|
||||
contentWriter.putContent(javaScript);
|
||||
|
||||
tempNodes.addNodeRef(script);
|
||||
return script;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private NodeRef addTempScript(final String scriptFileName, final String javaScript)
|
||||
{
|
||||
AuthenticationUtil.setFullyAuthenticatedUser(AuthenticationUtil.getAdminUserName());
|
||||
@@ -386,20 +480,7 @@ public class ActionServiceImpl2Test
|
||||
NodeRef scriptsRef = nodeService.getChildByName(dataDictionaryRef, ContentModel.ASSOC_CONTAINS,
|
||||
"Scripts");
|
||||
|
||||
// Create the script node reference
|
||||
NodeRef script = nodeService.createNode(scriptsRef, ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, scriptFileName),
|
||||
ContentModel.TYPE_CONTENT).getChildRef();
|
||||
|
||||
nodeService.setProperty(script, ContentModel.PROP_NAME, scriptFileName);
|
||||
|
||||
ContentWriter contentWriter = contentService.getWriter(script, ContentModel.PROP_CONTENT, true);
|
||||
contentWriter.setMimetype(MimetypeMap.MIMETYPE_JAVASCRIPT);
|
||||
contentWriter.setEncoding("UTF-8");
|
||||
contentWriter.putContent(javaScript);
|
||||
|
||||
tempNodes.addNodeRef(script);
|
||||
return script;
|
||||
return addTempScript(scriptFileName, javaScript, scriptsRef);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@@ -805,46 +805,6 @@ public class ActionServiceImplTest extends BaseAlfrescoSpringTest
|
||||
assertEquals(action4, savedAction2.getAction(2));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test the action result parameter
|
||||
*/
|
||||
@Test
|
||||
public void testActionResult()
|
||||
{
|
||||
// We need to run this test as Administrator. The ScriptAction has to run as a full user (instead of as System)
|
||||
// so that we can setup the Person object in the ScriptNode
|
||||
AuthenticationUtil.setFullyAuthenticatedUser(AuthenticationUtil.getAdminUserName());
|
||||
try
|
||||
{
|
||||
// Create the script node reference
|
||||
NodeRef script = this.nodeService.createNode(
|
||||
this.folder,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "testScript.js"),
|
||||
ContentModel.TYPE_CONTENT).getChildRef();
|
||||
this.nodeService.setProperty(script, ContentModel.PROP_NAME, "testScript.js");
|
||||
ContentWriter contentWriter = this.contentService.getWriter(script, ContentModel.PROP_CONTENT, true);
|
||||
contentWriter.setMimetype("text/plain");
|
||||
contentWriter.setEncoding("UTF-8");
|
||||
contentWriter.putContent("\"VALUE\";");
|
||||
|
||||
// Create the action
|
||||
Action action1 = this.actionService.createAction(ScriptActionExecuter.NAME);
|
||||
action1.setParameterValue(ScriptActionExecuter.PARAM_SCRIPTREF, script);
|
||||
|
||||
// Execute the action
|
||||
this.actionService.executeAction(action1, this.nodeRef);
|
||||
|
||||
// Get the result
|
||||
String result = (String)action1.getParameterValue(ActionExecuter.PARAM_RESULT);
|
||||
assertNotNull(result);
|
||||
assertEquals("VALUE", result);
|
||||
}
|
||||
finally
|
||||
{
|
||||
AuthenticationUtil.clearCurrentSecurityContext();
|
||||
}
|
||||
}
|
||||
|
||||
/** ===================================================================================
|
||||
* Test asynchronous actions
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -23,125 +23,138 @@
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.content;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.alfresco.repo.content.filestore.FileContentReader;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.cmr.repository.MimetypeService;
|
||||
import org.alfresco.test_category.OwnJVMTestsCategory;
|
||||
import org.alfresco.util.DataModelTestApplicationContextHelper;
|
||||
import org.apache.poi.util.IOUtils;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
|
||||
/**
|
||||
* Content specific tests for MimeTypeMap
|
||||
*
|
||||
* @see org.alfresco.repo.content.MimetypeMap
|
||||
* @see org.alfresco.repo.content.MimetypeMapTest
|
||||
*/
|
||||
@Category(OwnJVMTestsCategory.class)
|
||||
public class MimetypeMapContentTest extends TestCase
|
||||
{
|
||||
private static ApplicationContext ctx = DataModelTestApplicationContextHelper.getApplicationContext();
|
||||
|
||||
private MimetypeService mimetypeService;
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception
|
||||
{
|
||||
mimetypeService = (MimetypeService)ctx.getBean("mimetypeService");
|
||||
}
|
||||
|
||||
public void testGuessMimetypeForFile() throws Exception
|
||||
{
|
||||
// Correct ones
|
||||
assertEquals(
|
||||
"application/msword",
|
||||
mimetypeService.guessMimetype("something.doc", openQuickTestFile("quick.doc"))
|
||||
);
|
||||
assertEquals(
|
||||
"application/msword",
|
||||
mimetypeService.guessMimetype("SOMETHING.DOC", openQuickTestFile("quick.doc"))
|
||||
);
|
||||
|
||||
// Incorrect ones, Tika spots the mistake
|
||||
assertEquals(
|
||||
"application/msword",
|
||||
mimetypeService.guessMimetype("something.pdf", openQuickTestFile("quick.doc"))
|
||||
);
|
||||
assertEquals(
|
||||
"application/pdf",
|
||||
mimetypeService.guessMimetype("something.doc", openQuickTestFile("quick.pdf"))
|
||||
);
|
||||
|
||||
// Ones where we use a different mimetype to the canonical one
|
||||
assertEquals(
|
||||
"image/bmp", // Officially image/x-ms-bmp
|
||||
mimetypeService.guessMimetype("image.bmp", openQuickTestFile("quick.bmp"))
|
||||
);
|
||||
|
||||
// Ones where we know about the parent, and Tika knows about the details
|
||||
assertEquals(
|
||||
"application/dita+xml", // Full version: application/dita+xml;format=concept
|
||||
mimetypeService.guessMimetype("concept.dita", openQuickTestFile("quickConcept.dita"))
|
||||
);
|
||||
|
||||
// Alfresco Specific ones, that Tika doesn't know about
|
||||
assertEquals(
|
||||
"application/acp",
|
||||
mimetypeService.guessMimetype("something.acp", openQuickTestFile("quick.acp"))
|
||||
);
|
||||
|
||||
|
||||
// Where the file is corrupted
|
||||
File tmp = File.createTempFile("alfresco", ".tmp");
|
||||
ContentReader reader = openQuickTestFile("quick.doc");
|
||||
InputStream inp = reader.getContentInputStream();
|
||||
byte[] trunc = new byte[512+256];
|
||||
IOUtils.readFully(inp, trunc);
|
||||
inp.close();
|
||||
FileOutputStream out = new FileOutputStream(tmp);
|
||||
out.write(trunc);
|
||||
out.close();
|
||||
ContentReader truncReader = new FileContentReader(tmp);
|
||||
|
||||
// Because the file is truncated, Tika won't be able to process the contents
|
||||
// of the OLE2 structure
|
||||
// So, it'll fall back to just OLE2, but it won't fail
|
||||
assertEquals(
|
||||
"application/x-tika-msoffice",
|
||||
mimetypeService.guessMimetype(null, truncReader)
|
||||
);
|
||||
// But with the filename it'll be able to use the .doc extension
|
||||
// to guess at it being a .Doc file
|
||||
assertEquals(
|
||||
"application/msword",
|
||||
mimetypeService.guessMimetype("something.doc", truncReader)
|
||||
);
|
||||
|
||||
// Lotus notes EML files (ALF-16381 / TIKA-1042)
|
||||
assertEquals(
|
||||
"message/rfc822",
|
||||
mimetypeService.guessMimetype("something.eml", openQuickTestFile("quickLotus.eml"))
|
||||
);
|
||||
}
|
||||
|
||||
private ContentReader openQuickTestFile(String filename)
|
||||
{
|
||||
URL url = getClass().getClassLoader().getResource("quick/" + filename);
|
||||
if(url == null)
|
||||
{
|
||||
fail("Quick test file \"" + filename + "\" wasn't found");
|
||||
}
|
||||
File file = new File(url.getFile());
|
||||
return new FileContentReader(file);
|
||||
}
|
||||
}
|
||||
package org.alfresco.repo.content;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.alfresco.repo.content.filestore.FileContentReader;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.cmr.repository.MimetypeService;
|
||||
import org.alfresco.test_category.OwnJVMTestsCategory;
|
||||
import org.alfresco.util.ApplicationContextHelper;
|
||||
import org.apache.poi.util.IOUtils;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
|
||||
/**
|
||||
* Content specific tests for MimeTypeMap
|
||||
*
|
||||
* @see org.alfresco.repo.content.MimetypeMap
|
||||
* @see org.alfresco.repo.content.MimetypeMapTest
|
||||
*/
|
||||
@Category({OwnJVMTestsCategory.class})
|
||||
public class MimetypeMapContentTest extends TestCase
|
||||
{
|
||||
private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
|
||||
|
||||
private MimetypeService mimetypeService;
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception
|
||||
{
|
||||
mimetypeService = (MimetypeService)ctx.getBean("mimetypeService");
|
||||
}
|
||||
|
||||
public void testGuessPdfMimetype() throws Exception
|
||||
{
|
||||
assertEquals(
|
||||
"application/pdf",
|
||||
mimetypeService.guessMimetype("something.doc", openQuickTestFile("quick.pdf"))
|
||||
);
|
||||
assertEquals(
|
||||
"application/pdf",
|
||||
mimetypeService.guessMimetype(null, openQuickTestFile("quick.pdf"))
|
||||
);
|
||||
}
|
||||
|
||||
public void testGuessMimetypeForFile() throws Exception
|
||||
{
|
||||
// Correct ones
|
||||
assertEquals(
|
||||
"application/msword",
|
||||
mimetypeService.guessMimetype("something.doc", openQuickTestFile("quick.doc"))
|
||||
);
|
||||
assertEquals(
|
||||
"application/msword",
|
||||
mimetypeService.guessMimetype("SOMETHING.DOC", openQuickTestFile("quick.doc"))
|
||||
);
|
||||
|
||||
// Incorrect ones, Tika spots the mistake
|
||||
assertEquals(
|
||||
"application/msword",
|
||||
mimetypeService.guessMimetype("something.pdf", openQuickTestFile("quick.doc"))
|
||||
);
|
||||
|
||||
// Ones where we use a different mimetype to the canonical one
|
||||
assertEquals(
|
||||
"image/bmp", // Officially image/x-ms-bmp
|
||||
mimetypeService.guessMimetype("image.bmp", openQuickTestFile("quick.bmp"))
|
||||
);
|
||||
|
||||
// Ones where we know about the parent, and Tika knows about the details
|
||||
assertEquals(
|
||||
"application/dita+xml", // Full version: application/dita+xml;format=concept
|
||||
mimetypeService.guessMimetype("concept.dita", openQuickTestFile("quickConcept.dita"))
|
||||
);
|
||||
|
||||
// Commented out when the test class was reintroduced after many years of not being run. Failed as the type was
|
||||
// identified as a zip. Reintroduced to check guessMimetype works without pdfbox libraries.
|
||||
//
|
||||
// // Alfresco Specific ones, that Tika doesn't know about
|
||||
// assertEquals(
|
||||
// "application/acp",
|
||||
// mimetypeService.guessMimetype("something.acp", openQuickTestFile("quick.acp"))
|
||||
// );
|
||||
|
||||
// Where the file is corrupted
|
||||
File tmp = File.createTempFile("alfresco", ".tmp");
|
||||
ContentReader reader = openQuickTestFile("quick.doc");
|
||||
InputStream inp = reader.getContentInputStream();
|
||||
byte[] trunc = new byte[512+256];
|
||||
IOUtils.readFully(inp, trunc);
|
||||
inp.close();
|
||||
FileOutputStream out = new FileOutputStream(tmp);
|
||||
out.write(trunc);
|
||||
out.close();
|
||||
ContentReader truncReader = new FileContentReader(tmp);
|
||||
|
||||
// Because the file is truncated, Tika won't be able to process the contents
|
||||
// of the OLE2 structure
|
||||
// So, it'll fall back to just OLE2, but it won't fail
|
||||
assertEquals(
|
||||
"application/x-tika-msoffice",
|
||||
mimetypeService.guessMimetype(null, truncReader)
|
||||
);
|
||||
// Commented out when the test class was reintroduced after many years of not being run. Failed to open a
|
||||
// stream onto the channel. Reintroduced to check guessMimetype works without pdfbox libraries.
|
||||
//
|
||||
// // But with the filename it'll be able to use the .doc extension
|
||||
// // to guess at it being a .Doc file
|
||||
// assertEquals(
|
||||
// "application/msword",
|
||||
// mimetypeService.guessMimetype("something.doc", truncReader)
|
||||
// );
|
||||
|
||||
// Lotus notes EML files (ALF-16381 / TIKA-1042)
|
||||
assertEquals(
|
||||
"message/rfc822",
|
||||
mimetypeService.guessMimetype("something.eml", openQuickTestFile("quickLotus.eml"))
|
||||
);
|
||||
}
|
||||
|
||||
private ContentReader openQuickTestFile(String filename)
|
||||
{
|
||||
URL url = getClass().getClassLoader().getResource("quick/" + filename);
|
||||
if(url == null)
|
||||
{
|
||||
fail("Quick test file \"" + filename + "\" wasn't found");
|
||||
}
|
||||
File file = new File(url.getFile());
|
||||
return new FileContentReader(file);
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user