mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-09-24 14:32:01 +00:00
Compare commits
12 Commits
10.13
...
repo-5439v
Author | SHA1 | Date | |
---|---|---|---|
|
7c37144678 | ||
|
01977e06bd | ||
|
6559994575 | ||
|
c054a8a2de | ||
|
582c09563c | ||
|
019ad6da47 | ||
|
a17bb830f2 | ||
|
d062140883 | ||
|
993f42c877 | ||
|
6ba5c3ac35 | ||
|
a9cf423885 | ||
|
047c47def4 |
@@ -1,4 +1,4 @@
|
||||
# For SmartGit
|
||||
[bugtraq "jira"]
|
||||
url = https://alfresco.atlassian.net/browse/%BUGID%
|
||||
url = https://issues.alfresco.com/jira/browse/%BUGID%
|
||||
logRegex = ([A-Z]+-\\d+)
|
||||
|
167
.travis.yml
167
.travis.yml
@@ -33,7 +33,7 @@ stages:
|
||||
- name: test
|
||||
if: commit_message !~ /\[skip tests\]/
|
||||
- name: release
|
||||
if: fork = false AND (branch = master OR branch =~ /release\/.*/) AND type != pull_request AND commit_message !~ /\[no release\]/
|
||||
if: fork = false AND (branch = master OR branch =~ /release\/.*/ OR branch =~/fix\/.*/) AND type != pull_request AND commit_message !~ /\[no release\]/
|
||||
- name: update_downstream
|
||||
if: fork = false AND (branch = master OR branch =~ /release\/.*/) AND type != pull_request AND commit_message !~ /\[no downstream\]/
|
||||
- name: trigger_downstream
|
||||
@@ -45,48 +45,49 @@ install: travis_retry travis_wait 40 bash scripts/travis/build.sh
|
||||
jobs:
|
||||
include:
|
||||
|
||||
# - name: "Source Clear Scan"
|
||||
# # only on release branches or master and if it is not a PR
|
||||
# if: fork = false AND (branch = master OR branch =~ /release\/.*/) AND type != pull_request
|
||||
# script: skip
|
||||
# addons:
|
||||
# srcclr: true
|
||||
|
||||
- name: "Core, Data-Model, Repository - AllUnitTestsSuite - Build and test"
|
||||
if: commit_message !~ /\[skip repo\]/
|
||||
script:
|
||||
- travis_retry mvn -B test -pl core,data-model
|
||||
- travis_retry mvn -B test -pl repository -Dtest=AllUnitTestsSuite
|
||||
|
||||
- name: "Repository - AppContext01TestSuite"
|
||||
if: commit_message !~ /\[skip repo\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.4.0
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.6
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext01TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - AppContext02TestSuite"
|
||||
if: commit_message !~ /\[skip repo\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext02TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - AppContext03TestSuite"
|
||||
if: commit_message !~ /\[skip repo\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.4.0
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.6
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext03TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - AppContext04TestSuite"
|
||||
if: commit_message !~ /\[skip repo\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.4.0
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.6
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext04TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - AppContext05TestSuite"
|
||||
if: commit_message !~ /\[skip repo\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
- mkdir -p "${HOME}/tmp"
|
||||
- cp repository/src/test/resources/realms/alfresco-realm.json "${HOME}/tmp"
|
||||
- export HOST_IP=$(hostname -I | cut -f1 -d' ')
|
||||
@@ -94,141 +95,131 @@ jobs:
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext05TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco "-Didentity-service.auth-server-url=http://${HOST_IP}:8999/auth"
|
||||
|
||||
- name: "Repository - AppContext06TestSuite"
|
||||
if: commit_message !~ /\[skip repo\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.4.0
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.6
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext06TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - AppContextExtraTestSuite"
|
||||
if: commit_message !~ /\[skip repo\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.4.0
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.6
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContextExtraTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - MiscContextTestSuite"
|
||||
if: commit_message !~ /\[skip repo\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.4.0
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.6
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=MiscContextTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - SearchTestSuite"
|
||||
if: commit_message !~ /\[skip repo\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=SearchTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco -Dindex.subsystem.name=solr6
|
||||
|
||||
- name: "Repository - MariaDB 10.2.18 tests"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 3307:3306 --name mariadb -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mariadb:10.2.18 --transaction-isolation=READ-COMMITTED --max-connections=300 --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
|
||||
|
||||
- name: "Repository - MariaDB 10.4 tests"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 3307:3306 --name mariadb -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mariadb:10.4 --transaction-isolation=READ-COMMITTED --max-connections=300 --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
|
||||
|
||||
- name: "Repository - MariaDB 10.5 tests"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/ OR commit_message =~ /\[latest db\]/
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 3307:3306 --name mariadb -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mariadb:10.5 --transaction-isolation=READ-COMMITTED --max-connections=300 --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
|
||||
|
||||
- name: "Repository - MySQL 5.7.28 tests"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
|
||||
- name: "Repository - MySQL 5.7.23 tests"
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 3307:3306 -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mysql:5.7.28 --transaction-isolation='READ-COMMITTED'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 3307:3306 -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mysql:5.7.23 --transaction-isolation='READ-COMMITTED'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=com.mysql.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - MySQL 8 tests"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/ OR commit_message =~ /\[latest db\]/
|
||||
before_script:
|
||||
- docker run -d -p 3307:3306 -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mysql:8 --transaction-isolation='READ-COMMITTED'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=com.mysql.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
# One failing test to do with the schema reference files. ACS-1180
|
||||
# - name: "Repository - MySQL 8 tests"
|
||||
# if: commit_message !~ /\[skip db\]/
|
||||
# before_script:
|
||||
# - docker run -d -p 3307:3306 -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mysql:8 --transaction-isolation='READ-COMMITTED'
|
||||
# - docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
# script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=com.mysql.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - PostgreSQL 10.9 tests"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:10.9 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - PostgreSQL 11.7 tests"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - PostgreSQL 12.4 tests"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:12.4 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - PostgreSQL 13.1 tests"
|
||||
# We only run DB tests on the latest version of PostgreSQL on feature branches
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Remote-api - AppContext01TestSuite"
|
||||
if: commit_message !~ /\[skip repo\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext01TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Remote-api - AppContext02TestSuite"
|
||||
if: commit_message !~ /\[skip repo\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.4.0
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.6
|
||||
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext02TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Remote-api - AppContext03TestSuite"
|
||||
if: commit_message !~ /\[skip repo\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.4.0
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.6
|
||||
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext03TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Remote-api - AppContext04TestSuite"
|
||||
if: commit_message !~ /\[skip repo\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.4.0
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.3.5
|
||||
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext04TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Remote-api - AppContextExtraTestSuite"
|
||||
if: commit_message !~ /\[skip repo\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.1 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:11.7 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.15.8
|
||||
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContextExtraTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "REST API TAS tests part1"
|
||||
# TAS tests are generally skipped on feature branches as they will be repeated on the enterprise repo or community packaging builds
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
@@ -236,7 +227,7 @@ jobs:
|
||||
- travis_wait 60 mvn -B install -f packaging/tests/tas-restapi/pom.xml -Pall-tas-tests,run-restapi-part1 -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "REST API TAS tests part2"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
@@ -244,7 +235,7 @@ jobs:
|
||||
- travis_wait 60 mvn -B install -f packaging/tests/tas-restapi/pom.xml -Pall-tas-tests,run-restapi-part2 -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "REST API TAS tests part3"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
@@ -252,7 +243,7 @@ jobs:
|
||||
- travis_wait 60 mvn -B install -f packaging/tests/tas-restapi/pom.xml -Pall-tas-tests,run-restapi-part3 -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "CMIS TAS tests - BROWSER binding"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
@@ -260,7 +251,7 @@ jobs:
|
||||
- travis_wait 40 mvn -B install -f packaging/tests/tas-cmis/pom.xml -Pall-tas-tests,run-cmis-browser -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "CMIS TAS tests - ATOM binding"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
@@ -268,7 +259,7 @@ jobs:
|
||||
- travis_wait 40 mvn -B install -f packaging/tests/tas-cmis/pom.xml -Pall-tas-tests,run-cmis-atom -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "CMIS TAS tests - WEBSERVICES binding"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal+transforms.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
@@ -276,7 +267,7 @@ jobs:
|
||||
- travis_wait 40 mvn -B install -f packaging/tests/tas-cmis/pom.xml -Pall-tas-tests,run-cmis-webservices -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "Email TAS tests"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
@@ -284,7 +275,7 @@ jobs:
|
||||
- travis_wait 30 mvn -B install -f packaging/tests/tas-email/pom.xml -Pall-tas-tests -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "WebDAV TAS tests"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
@@ -292,7 +283,7 @@ jobs:
|
||||
- travis_wait 30 mvn -B install -f packaging/tests/tas-webdav/pom.xml -Pall-tas-tests -Denvironment=default -DrunBugs=false
|
||||
|
||||
- name: "Integration TAS tests"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip tas\]/) OR commit_message =~ /\[tas\]/
|
||||
if: branch = master OR commit_message =~ /\[tas\]/
|
||||
before_script:
|
||||
- ${TAS_SCRIPTS}/start-compose.sh ${TAS_ENVIRONMENT}/docker-compose-minimal.yml
|
||||
- ${TAS_SCRIPTS}/wait-for-alfresco-start.sh "http://localhost:8082/alfresco"
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>10.13</version>
|
||||
<version>repo-5439v2-c2</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@@ -21,6 +21,7 @@ package org.alfresco.httpclient;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.security.AlgorithmParameters;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
@@ -31,11 +32,14 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||
import org.alfresco.encryption.AlfrescoKeyStore;
|
||||
import org.alfresco.encryption.AlfrescoKeyStoreImpl;
|
||||
import org.alfresco.encryption.EncryptionUtils;
|
||||
import org.alfresco.encryption.Encryptor;
|
||||
import org.alfresco.encryption.KeyProvider;
|
||||
import org.alfresco.encryption.KeyResourceLoader;
|
||||
import org.alfresco.encryption.KeyStoreParameters;
|
||||
import org.alfresco.encryption.ssl.AuthSSLProtocolSocketFactory;
|
||||
import org.alfresco.encryption.ssl.SSLEncryptionParameters;
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.apache.commons.httpclient.DefaultHttpMethodRetryHandler;
|
||||
import org.apache.commons.httpclient.HostConfiguration;
|
||||
import org.apache.commons.httpclient.HttpClient;
|
||||
@@ -49,6 +53,8 @@ import org.apache.commons.httpclient.SimpleHttpConnectionManager;
|
||||
import org.apache.commons.httpclient.URI;
|
||||
import org.apache.commons.httpclient.URIException;
|
||||
import org.apache.commons.httpclient.cookie.CookiePolicy;
|
||||
import org.apache.commons.httpclient.methods.ByteArrayRequestEntity;
|
||||
import org.apache.commons.httpclient.methods.PostMethod;
|
||||
import org.apache.commons.httpclient.params.DefaultHttpParams;
|
||||
import org.apache.commons.httpclient.params.DefaultHttpParamsFactory;
|
||||
import org.apache.commons.httpclient.params.HttpClientParams;
|
||||
@@ -69,25 +75,23 @@ import org.apache.commons.logging.LogFactory;
|
||||
*/
|
||||
public class HttpClientFactory
|
||||
{
|
||||
/**
|
||||
* Communication type for HttpClient:
|
||||
* - NONE is plain http
|
||||
* - SECRET is plain http with a shared secret via request header
|
||||
* - HTTPS is mTLS with client authentication (certificates are required)
|
||||
*/
|
||||
public static enum SecureCommsType
|
||||
{
|
||||
HTTPS, NONE, SECRET;
|
||||
HTTPS, NONE;
|
||||
|
||||
public static SecureCommsType getType(String type)
|
||||
{
|
||||
switch (type.toLowerCase())
|
||||
if(type.equalsIgnoreCase("https"))
|
||||
{
|
||||
case "https": return HTTPS;
|
||||
case "none": return NONE;
|
||||
case "secret": return SECRET;
|
||||
default: throw new IllegalArgumentException("Invalid communications type");
|
||||
|
||||
return HTTPS;
|
||||
}
|
||||
else if(type.equalsIgnoreCase("none"))
|
||||
{
|
||||
return NONE;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new IllegalArgumentException("Invalid communications type");
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -118,24 +122,14 @@ public class HttpClientFactory
|
||||
|
||||
private int connectionTimeout = 0;
|
||||
|
||||
// Shared secret parameters
|
||||
private String sharedSecret;
|
||||
private String sharedSecretHeader = DEFAULT_SHAREDSECRET_HEADER;
|
||||
|
||||
// Default name for HTTP Request Header when using shared secret communication
|
||||
public static final String DEFAULT_SHAREDSECRET_HEADER = "X-Alfresco-Search-Secret";
|
||||
|
||||
public HttpClientFactory()
|
||||
{
|
||||
}
|
||||
|
||||
/**
|
||||
* Default constructor for legacy subsystems.
|
||||
*/
|
||||
|
||||
public HttpClientFactory(SecureCommsType secureCommsType, SSLEncryptionParameters sslEncryptionParameters,
|
||||
KeyResourceLoader keyResourceLoader, KeyStoreParameters keyStoreParameters,
|
||||
MD5EncryptionParameters encryptionParameters, String host, int port, int sslPort,
|
||||
int maxTotalConnections, int maxHostConnections, int socketTimeout)
|
||||
KeyResourceLoader keyResourceLoader, KeyStoreParameters keyStoreParameters,
|
||||
MD5EncryptionParameters encryptionParameters, String host, int port, int sslPort, int maxTotalConnections,
|
||||
int maxHostConnections, int socketTimeout)
|
||||
{
|
||||
this.secureCommsType = secureCommsType;
|
||||
this.sslEncryptionParameters = sslEncryptionParameters;
|
||||
@@ -151,21 +145,6 @@ public class HttpClientFactory
|
||||
init();
|
||||
}
|
||||
|
||||
/**
|
||||
* Recommended constructor for subsystems supporting Shared Secret communication.
|
||||
* This constructor supports Shared Secret ("secret") communication method additionally to the legacy ones: "none" and "https".
|
||||
*/
|
||||
public HttpClientFactory(SecureCommsType secureCommsType, SSLEncryptionParameters sslEncryptionParameters,
|
||||
KeyResourceLoader keyResourceLoader, KeyStoreParameters keyStoreParameters,
|
||||
MD5EncryptionParameters encryptionParameters, String sharedSecret, String sharedSecretHeader,
|
||||
String host, int port, int sslPort, int maxTotalConnections, int maxHostConnections, int socketTimeout)
|
||||
{
|
||||
this(secureCommsType, sslEncryptionParameters, keyResourceLoader, keyStoreParameters, encryptionParameters,
|
||||
host, port, sslPort, maxTotalConnections, maxHostConnections, socketTimeout);
|
||||
this.sharedSecret = sharedSecret;
|
||||
this.sharedSecretHeader = sharedSecretHeader;
|
||||
}
|
||||
|
||||
public void init()
|
||||
{
|
||||
this.sslKeyStore = new AlfrescoKeyStoreImpl(sslEncryptionParameters.getKeyStoreParameters(), keyResourceLoader);
|
||||
@@ -293,44 +272,10 @@ public class HttpClientFactory
|
||||
this.connectionTimeout = connectionTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Shared secret used for SECRET communication
|
||||
* @param secret shared secret word
|
||||
*/
|
||||
public void setSharedSecret(String sharedSecret)
|
||||
{
|
||||
this.sharedSecret = sharedSecret;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Shared secret used for SECRET communication
|
||||
*/
|
||||
public String getSharedSecret()
|
||||
{
|
||||
return sharedSecret;
|
||||
}
|
||||
|
||||
/**
|
||||
* HTTP Request header used for SECRET communication
|
||||
* @param sharedSecretHeader HTTP Request header
|
||||
*/
|
||||
public void setSharedSecretHeader(String sharedSecretHeader)
|
||||
{
|
||||
this.sharedSecretHeader = sharedSecretHeader;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return HTTP Request header used for SECRET communication
|
||||
*/
|
||||
public String getSharedSecretHeader()
|
||||
{
|
||||
return sharedSecretHeader;
|
||||
}
|
||||
|
||||
protected RequestHeadersHttpClient constructHttpClient()
|
||||
protected HttpClient constructHttpClient()
|
||||
{
|
||||
MultiThreadedHttpConnectionManager connectionManager = new MultiThreadedHttpConnectionManager();
|
||||
RequestHeadersHttpClient httpClient = new RequestHeadersHttpClient(connectionManager);
|
||||
HttpClient httpClient = new HttpClient(connectionManager);
|
||||
HttpClientParams params = httpClient.getParams();
|
||||
params.setBooleanParameter(HttpConnectionParams.TCP_NODELAY, true);
|
||||
params.setBooleanParameter(HttpConnectionParams.STALE_CONNECTION_CHECK, true);
|
||||
@@ -346,15 +291,15 @@ public class HttpClientFactory
|
||||
return httpClient;
|
||||
}
|
||||
|
||||
protected RequestHeadersHttpClient getHttpsClient()
|
||||
protected HttpClient getHttpsClient()
|
||||
{
|
||||
return getHttpsClient(host, sslPort);
|
||||
}
|
||||
|
||||
protected RequestHeadersHttpClient getHttpsClient(String httpsHost, int httpsPort)
|
||||
protected HttpClient getHttpsClient(String httpsHost, int httpsPort)
|
||||
{
|
||||
// Configure a custom SSL socket factory that will enforce mutual authentication
|
||||
RequestHeadersHttpClient httpClient = constructHttpClient();
|
||||
HttpClient httpClient = constructHttpClient();
|
||||
// Default port is 443 for the HostFactory, when including customised port (like 8983) the port name is skipped from "getHostURL" string
|
||||
HttpHostFactory hostFactory = new HttpHostFactory(new Protocol("https", sslSocketFactory, HttpsURL.DEFAULT_PORT));
|
||||
httpClient.setHostConfiguration(new HostConfigurationWithHostFactory(hostFactory));
|
||||
@@ -362,54 +307,28 @@ public class HttpClientFactory
|
||||
return httpClient;
|
||||
}
|
||||
|
||||
protected RequestHeadersHttpClient getDefaultHttpClient()
|
||||
protected HttpClient getDefaultHttpClient()
|
||||
{
|
||||
return getDefaultHttpClient(host, port);
|
||||
}
|
||||
|
||||
protected RequestHeadersHttpClient getDefaultHttpClient(String httpHost, int httpPort)
|
||||
protected HttpClient getDefaultHttpClient(String httpHost, int httpPort)
|
||||
{
|
||||
RequestHeadersHttpClient httpClient = constructHttpClient();
|
||||
HttpClient httpClient = constructHttpClient();
|
||||
httpClient.getHostConfiguration().setHost(httpHost, httpPort);
|
||||
return httpClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build HTTP Client using default headers
|
||||
* @return RequestHeadersHttpClient including default header for shared secret method
|
||||
*/
|
||||
protected RequestHeadersHttpClient constructSharedSecretHttpClient()
|
||||
{
|
||||
RequestHeadersHttpClient client = constructHttpClient();
|
||||
client.setDefaultHeaders(Map.of(sharedSecretHeader, sharedSecret));
|
||||
return client;
|
||||
}
|
||||
|
||||
protected RequestHeadersHttpClient getSharedSecretHttpClient()
|
||||
{
|
||||
return getSharedSecretHttpClient(host, port);
|
||||
}
|
||||
|
||||
protected RequestHeadersHttpClient getSharedSecretHttpClient(String httpHost, int httpPort)
|
||||
{
|
||||
RequestHeadersHttpClient httpClient = constructSharedSecretHttpClient();
|
||||
httpClient.getHostConfiguration().setHost(httpHost, httpPort);
|
||||
return httpClient;
|
||||
}
|
||||
|
||||
protected AlfrescoHttpClient getAlfrescoHttpsClient()
|
||||
{
|
||||
return new HttpsClient(getHttpsClient());
|
||||
AlfrescoHttpClient repoClient = new HttpsClient(getHttpsClient());
|
||||
return repoClient;
|
||||
}
|
||||
|
||||
protected AlfrescoHttpClient getAlfrescoHttpClient()
|
||||
{
|
||||
return new DefaultHttpClient(getDefaultHttpClient());
|
||||
}
|
||||
|
||||
protected AlfrescoHttpClient getAlfrescoSharedSecretClient()
|
||||
{
|
||||
return new DefaultHttpClient(getSharedSecretHttpClient());
|
||||
AlfrescoHttpClient repoClient = new DefaultHttpClient(getDefaultHttpClient());
|
||||
return repoClient;
|
||||
}
|
||||
|
||||
protected HttpClient getMD5HttpClient(String host, int port)
|
||||
@@ -422,37 +341,66 @@ public class HttpClientFactory
|
||||
|
||||
public AlfrescoHttpClient getRepoClient(String host, int port)
|
||||
{
|
||||
switch (secureCommsType)
|
||||
AlfrescoHttpClient repoClient = null;
|
||||
|
||||
if(secureCommsType == SecureCommsType.HTTPS)
|
||||
{
|
||||
case HTTPS: return getAlfrescoHttpsClient();
|
||||
case NONE: return getAlfrescoHttpClient();
|
||||
case SECRET: return getAlfrescoSharedSecretClient();
|
||||
default: throw new AlfrescoRuntimeException("Invalid Solr secure communications type configured in [solr|alfresco].secureComms, should be 'ssl', 'none' or 'secret'");
|
||||
repoClient = getAlfrescoHttpsClient();
|
||||
}
|
||||
}
|
||||
|
||||
public RequestHeadersHttpClient getHttpClient()
|
||||
{
|
||||
switch (secureCommsType)
|
||||
else if(secureCommsType == SecureCommsType.NONE)
|
||||
{
|
||||
case HTTPS: return getHttpsClient();
|
||||
case NONE: return getDefaultHttpClient();
|
||||
case SECRET: return getSharedSecretHttpClient();
|
||||
default: throw new AlfrescoRuntimeException("Invalid Solr secure communications type configured in [solr|alfresco].secureComms, should be 'ssl', 'none' or 'secret'");
|
||||
repoClient = getAlfrescoHttpClient();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Invalid Solr secure communications type configured in alfresco.secureComms, should be 'ssl'or 'none'");
|
||||
}
|
||||
|
||||
return repoClient;
|
||||
}
|
||||
|
||||
public RequestHeadersHttpClient getHttpClient(String host, int port)
|
||||
public HttpClient getHttpClient()
|
||||
{
|
||||
switch (secureCommsType)
|
||||
HttpClient httpClient = null;
|
||||
|
||||
if(secureCommsType == SecureCommsType.HTTPS)
|
||||
{
|
||||
case HTTPS: return getHttpsClient(host, port);
|
||||
case NONE: return getDefaultHttpClient(host, port);
|
||||
case SECRET: return getSharedSecretHttpClient(host, port);
|
||||
default: throw new AlfrescoRuntimeException("Invalid Solr secure communications type configured in [solr|alfresco].secureComms, should be 'ssl', 'none' or 'secret'");
|
||||
httpClient = getHttpsClient();
|
||||
}
|
||||
else if(secureCommsType == SecureCommsType.NONE)
|
||||
{
|
||||
httpClient = getDefaultHttpClient();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Invalid Solr secure communications type configured in alfresco.secureComms, should be 'ssl'or 'none'");
|
||||
}
|
||||
|
||||
return httpClient;
|
||||
}
|
||||
|
||||
public HttpClient getHttpClient(String host, int port)
|
||||
{
|
||||
HttpClient httpClient = null;
|
||||
|
||||
if(secureCommsType == SecureCommsType.HTTPS)
|
||||
{
|
||||
httpClient = getHttpsClient(host, port);
|
||||
}
|
||||
else if(secureCommsType == SecureCommsType.NONE)
|
||||
{
|
||||
httpClient = getDefaultHttpClient(host, port);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Invalid Solr secure communications type configured in alfresco.secureComms, should be 'ssl'or 'none'");
|
||||
}
|
||||
|
||||
return httpClient;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* A secure client connection to the repository.
|
||||
*
|
||||
|
@@ -1,87 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2005-2021 Alfresco Software Limited.
|
||||
*
|
||||
* This file is part of Alfresco
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
package org.alfresco.httpclient;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.httpclient.HostConfiguration;
|
||||
import org.apache.commons.httpclient.HttpClient;
|
||||
import org.apache.commons.httpclient.HttpException;
|
||||
import org.apache.commons.httpclient.HttpMethod;
|
||||
import org.apache.commons.httpclient.HttpState;
|
||||
import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager;
|
||||
|
||||
/**
|
||||
* Since Apache HttpClient 3.1 doesn't support including custom headers by default,
|
||||
* this class is adding that custom headers every time a method is invoked.
|
||||
*/
|
||||
public class RequestHeadersHttpClient extends HttpClient
|
||||
{
|
||||
|
||||
private Map<String, String> defaultHeaders;
|
||||
|
||||
public RequestHeadersHttpClient(MultiThreadedHttpConnectionManager connectionManager)
|
||||
{
|
||||
super(connectionManager);
|
||||
}
|
||||
|
||||
public Map<String, String> getDefaultHeaders()
|
||||
{
|
||||
return defaultHeaders;
|
||||
}
|
||||
|
||||
public void setDefaultHeaders(Map<String, String> defaultHeaders)
|
||||
{
|
||||
this.defaultHeaders = defaultHeaders;
|
||||
}
|
||||
|
||||
private void addDefaultHeaders(HttpMethod method)
|
||||
{
|
||||
if (defaultHeaders != null)
|
||||
{
|
||||
defaultHeaders.forEach((k,v) -> {
|
||||
method.addRequestHeader(k, v);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int executeMethod(HttpMethod method) throws IOException, HttpException
|
||||
{
|
||||
addDefaultHeaders(method);
|
||||
return super.executeMethod(method);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int executeMethod(HostConfiguration hostConfiguration, HttpMethod method) throws IOException, HttpException
|
||||
{
|
||||
addDefaultHeaders(method);
|
||||
return super.executeMethod(hostConfiguration, method);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int executeMethod(HostConfiguration hostconfig, HttpMethod method, HttpState state)
|
||||
throws IOException, HttpException
|
||||
{
|
||||
addDefaultHeaders(method);
|
||||
return super.executeMethod(hostconfig, method, state);
|
||||
}
|
||||
|
||||
}
|
@@ -24,10 +24,8 @@ import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.time.Duration;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.alfresco.api.AlfrescoPublicApi;
|
||||
import org.alfresco.api.AlfrescoPublicApi;
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
@@ -350,17 +348,6 @@ public class TempFileProvider
|
||||
{
|
||||
public static final String KEY_PROTECT_HOURS = "protectHours";
|
||||
public static final String KEY_DIRECTORY_NAME = "directoryName";
|
||||
public static final String KEY_MAX_FILES_TO_DELETE = "maxFilesToDelete";
|
||||
public static final String KEY_MAX_TIME_TO_RUN = "maxTimeToRun";
|
||||
|
||||
/** The time when the job has actually started */
|
||||
private static long jobStartTime;
|
||||
|
||||
/** The maximum number of files that can be deleted when the cleaning jobs runs */
|
||||
private static AtomicLong maxFilesToDelete;
|
||||
|
||||
/** The maximum time allowed for the cleaning job to run */
|
||||
private static Duration maxTimeToRun;
|
||||
|
||||
/**
|
||||
* Gets a list of all files in the {@link TempFileProvider#ALFRESCO_TEMP_FILE_DIR temp directory}
|
||||
@@ -389,59 +376,24 @@ public class TempFileProvider
|
||||
}
|
||||
|
||||
String directoryName = (String) context.getJobDetail().getJobDataMap().get(KEY_DIRECTORY_NAME);
|
||||
|
||||
try
|
||||
{
|
||||
final Object oMaxFilesToDelete = context.getJobDetail().getJobDataMap().get(KEY_MAX_FILES_TO_DELETE);
|
||||
if (oMaxFilesToDelete != null)
|
||||
{
|
||||
final String strMaxFilesToDelete = (String) oMaxFilesToDelete;
|
||||
maxFilesToDelete = new AtomicLong(Long.parseLong(strMaxFilesToDelete));
|
||||
logger.debug("Set the maximum number of temp files to be deleted to: " + maxFilesToDelete.get());
|
||||
}
|
||||
else
|
||||
{
|
||||
logger.debug("No maximum number of files was configured for the temp file clean job.");
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
logger.warn(e);
|
||||
throw new JobExecutionException("Invalid job data, maxFilesToDelete");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
final Object oMaxTimeToRun = context.getJobDetail().getJobDataMap().get(KEY_MAX_TIME_TO_RUN);
|
||||
if (oMaxTimeToRun != null)
|
||||
{
|
||||
final String strMaxTimeToRun = (String) oMaxTimeToRun;
|
||||
maxTimeToRun = Duration.parse(strMaxTimeToRun);
|
||||
logger.debug("Set the maximum duration time of the temp file clean job to: " + maxTimeToRun);
|
||||
}
|
||||
else
|
||||
{
|
||||
logger.debug("No maximum duration was configured for the temp file clean job.");
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
logger.warn(e);
|
||||
throw new JobExecutionException("Invalid job data, maxTimeToRun");
|
||||
}
|
||||
|
||||
if (directoryName == null)
|
||||
{
|
||||
directoryName = ALFRESCO_TEMP_FILE_DIR;
|
||||
}
|
||||
|
||||
jobStartTime = System.currentTimeMillis();
|
||||
long aFewHoursBack = jobStartTime - (3600L * 1000L * protectHours);
|
||||
long aLongTimeBack = jobStartTime - (24 * 3600L * 1000L);
|
||||
long now = System.currentTimeMillis();
|
||||
long aFewHoursBack = now - (3600L * 1000L * protectHours);
|
||||
|
||||
long aLongTimeBack = now - (24 * 3600L * 1000L);
|
||||
|
||||
File tempDir = TempFileProvider.getTempDir(directoryName);
|
||||
int count = removeFiles(tempDir, aFewHoursBack, aLongTimeBack, false); // don't delete this directory
|
||||
logger.debug("Removed " + count + " files from temp directory: " + tempDir);
|
||||
// done
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Removed " + count + " files from temp directory: " + tempDir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -477,23 +429,29 @@ public class TempFileProvider
|
||||
}
|
||||
// list all files
|
||||
File[] files = directory.listFiles();
|
||||
File[] filesToIterate = files != null ? files : new File[0];
|
||||
int count = 0;
|
||||
for (File file : filesToIterate)
|
||||
for (File file : files)
|
||||
{
|
||||
if (shouldTheDeletionStop())
|
||||
{
|
||||
break;
|
||||
}
|
||||
if (file.isDirectory())
|
||||
{
|
||||
// long life for this folder and its children
|
||||
// OR
|
||||
// enter subdirectory and clean it out and remove itsynetics
|
||||
int countRemoved = removeFiles(file,
|
||||
isLongLifeTempDir(file) ? longLifeBefore : removeBefore, longLifeBefore,
|
||||
true);
|
||||
logger.debug("Removed " + countRemoved + " files from " + (isLongLifeTempDir(file) ? "temp " : " ") + "directory: " + file);
|
||||
if(isLongLifeTempDir(file))
|
||||
{
|
||||
// long life for this folder and its children
|
||||
int countRemoved = removeFiles(file, longLifeBefore, longLifeBefore, true);
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Removed " + countRemoved + " files from temp directory: " + file);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// enter subdirectory and clean it out and remove itsynetics
|
||||
int countRemoved = removeFiles(file, removeBefore, longLifeBefore, true);
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Removed " + countRemoved + " files from directory: " + file);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -506,19 +464,11 @@ public class TempFileProvider
|
||||
// it is a file - attempt a delete
|
||||
try
|
||||
{
|
||||
logger.debug("Deleting temp file: " + file);
|
||||
if(logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Deleting temp file: " + file);
|
||||
}
|
||||
file.delete();
|
||||
|
||||
if (maxFilesToDelete != null)
|
||||
{
|
||||
maxFilesToDelete.decrementAndGet();
|
||||
logger.debug(maxFilesToDelete.get() + " files left to delete.");
|
||||
}
|
||||
if (maxTimeToRun != null)
|
||||
{
|
||||
logger.debug((jobStartTime + maxTimeToRun.toMillis() - System.currentTimeMillis()) + " millis left to delete.");
|
||||
}
|
||||
|
||||
count++;
|
||||
}
|
||||
catch (Throwable e)
|
||||
@@ -537,8 +487,10 @@ public class TempFileProvider
|
||||
if(listing != null && listing.length == 0)
|
||||
{
|
||||
// directory is empty
|
||||
logger.debug("Deleting empty directory: " + directory);
|
||||
// ignore the limits for empty directories that just need cleanup
|
||||
if(logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Deleting empty directory: " + directory);
|
||||
}
|
||||
directory.delete();
|
||||
}
|
||||
}
|
||||
@@ -547,21 +499,8 @@ public class TempFileProvider
|
||||
logger.info("Failed to remove temp directory: " + directory, e);
|
||||
}
|
||||
}
|
||||
// done
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decides whether or not the job should continue iterating through the temp files and delete.
|
||||
* It achieves the result by checking the number of files deleted against the limit and whether
|
||||
* or not it is within the time limit
|
||||
*
|
||||
* @return true or false
|
||||
*/
|
||||
private static boolean shouldTheDeletionStop()
|
||||
{
|
||||
return maxFilesToDelete != null && maxFilesToDelete.get() <= 0
|
||||
|| maxTimeToRun != null && ((jobStartTime + maxTimeToRun.toMillis()) < System
|
||||
.currentTimeMillis());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>10.13</version>
|
||||
<version>repo-5439v2-c2</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
@@ -167,7 +167,7 @@
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.woodstox</groupId>
|
||||
<artifactId>woodstox-core</artifactId>
|
||||
<version>6.2.6</version>
|
||||
<version>6.2.4</version>
|
||||
</dependency>
|
||||
|
||||
<!-- the cxf libs were updated, see dependencyManagement section -->
|
||||
@@ -283,31 +283,6 @@
|
||||
<groupId>com.sun.activation</groupId>
|
||||
<artifactId>javax.activation</artifactId>
|
||||
</exclusion>
|
||||
<!-- No longer needed -->
|
||||
<exclusion>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>pdfbox</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>pdfbox-tools</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>preflight</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>jempbox</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>xmpbox</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>jbig2-imageio</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Data model classes
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -60,15 +60,12 @@ public abstract class ConfigScheduler<Data>
|
||||
// Synchronized has little effect in normal operation, but on laptops that are suspended, there can be a number
|
||||
// of Threads calling execute concurrently without it, resulting in errors in the log. Theoretically possible in
|
||||
// production but not very likely.
|
||||
public void execute(JobExecutionContext context) throws JobExecutionException
|
||||
public synchronized void execute(JobExecutionContext context) throws JobExecutionException
|
||||
{
|
||||
JobDataMap dataMap = context.getJobDetail().getJobDataMap();
|
||||
ConfigScheduler configScheduler = (ConfigScheduler)dataMap.get(CONFIG_SCHEDULER);
|
||||
synchronized (configScheduler)
|
||||
{
|
||||
boolean successReadingConfig = configScheduler.readConfigAndReplace(true);
|
||||
configScheduler.changeScheduleOnStateChange(successReadingConfig);
|
||||
}
|
||||
boolean successReadingConfig = configScheduler.readConfigAndReplace(true);
|
||||
configScheduler.changeScheduleOnStateChange(successReadingConfig);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -9,6 +9,6 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>10.13</version>
|
||||
<version>repo-5439v2-c2</version>
|
||||
</parent>
|
||||
</project>
|
||||
|
@@ -1,54 +0,0 @@
|
||||
/*--
|
||||
|
||||
Copyright (C) 2000-2012 Jason Hunter & Brett McLaughlin.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions, and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions, and the disclaimer that follows
|
||||
these conditions in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
3. The name "JDOM" must not be used to endorse or promote products
|
||||
derived from this software without prior written permission. For
|
||||
written permission, please contact <request_AT_jdom_DOT_org>.
|
||||
|
||||
4. Products derived from this software may not be called "JDOM", nor
|
||||
may "JDOM" appear in their name, without prior written permission
|
||||
from the JDOM Project Management <request_AT_jdom_DOT_org>.
|
||||
|
||||
In addition, we request (but do not require) that you include in the
|
||||
end-user documentation provided with the redistribution and/or in the
|
||||
software itself an acknowledgement equivalent to the following:
|
||||
"This product includes software developed by the
|
||||
JDOM Project (http://www.jdom.org/)."
|
||||
Alternatively, the acknowledgment may be graphical using the logos
|
||||
available at http://www.jdom.org/images/logos.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
|
||||
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE JDOM AUTHORS OR THE PROJECT
|
||||
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
|
||||
USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
|
||||
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
This software consists of voluntary contributions made by many
|
||||
individuals on behalf of the JDOM Project and was originally
|
||||
created by Jason Hunter <jhunter_AT_jdom_DOT_org> and
|
||||
Brett McLaughlin <brett_AT_jdom_DOT_org>. For more information
|
||||
on the JDOM Project, please see <http://www.jdom.org/>.
|
||||
|
||||
*/
|
||||
|
@@ -1,46 +0,0 @@
|
||||
Indiana University Extreme! Lab Software License
|
||||
|
||||
Version 1.1.1
|
||||
|
||||
Copyright (c) 2002 Extreme! Lab, Indiana University. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in
|
||||
the documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. The end-user documentation included with the redistribution, if any,
|
||||
must include the following acknowledgment:
|
||||
|
||||
"This product includes software developed by the Indiana University
|
||||
Extreme! Lab (http://www.extreme.indiana.edu/)."
|
||||
|
||||
Alternately, this acknowledgment may appear in the software itself,
|
||||
if and wherever such third-party acknowledgments normally appear.
|
||||
|
||||
4. The names "Indiana Univeristy" and "Indiana Univeristy Extreme! Lab"
|
||||
must not be used to endorse or promote products derived from this
|
||||
software without prior written permission. For written permission,
|
||||
please contact http://www.extreme.indiana.edu/.
|
||||
|
||||
5. Products derived from this software may not use "Indiana Univeristy"
|
||||
name nor may "Indiana Univeristy" appear in their name, without prior
|
||||
written permission of the Indiana University.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESSED OR IMPLIED
|
||||
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
IN NO EVENT SHALL THE AUTHORS, COPYRIGHT HOLDERS OR ITS CONTRIBUTORS
|
||||
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
||||
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
@@ -1,6 +1,6 @@
|
||||
# Fetch image based on Tomcat 9.0, Java 11 and Centos 8
|
||||
# More infos about this image: https://github.com/Alfresco/alfresco-docker-base-tomcat
|
||||
FROM alfresco/alfresco-base-tomcat:9.0.52-java-11-centos-7
|
||||
FROM alfresco/alfresco-base-tomcat:9.0.41-java-11-openjdk-centos-8
|
||||
|
||||
# Set default docker_context.
|
||||
ARG resource_path=target
|
||||
@@ -65,12 +65,12 @@ RUN sed -i -e "s_log4j.appender.File.File\=alfresco.log_log4j.appender.File.File
|
||||
|
||||
# fontconfig is required by Activiti worflow diagram generator
|
||||
# installing pinned dependencies as well
|
||||
RUN yum install -y fontconfig-2.13.0-4.3.el7 \
|
||||
dejavu-fonts-common-2.33-6.el7 \
|
||||
fontpackages-filesystem-1.44-8.el7 \
|
||||
freetype-2.8-14.el7_9.1 \
|
||||
libpng-1.5.13-8.el7 \
|
||||
dejavu-sans-fonts-2.33-6.el7 && \
|
||||
RUN yum install -y fontconfig-2.13.1-3.el8 \
|
||||
dejavu-fonts-common-2.35-6.el8 \
|
||||
fontpackages-filesystem-1.44-22.el8 \
|
||||
freetype-2.9.1-4.el8_3.1 \
|
||||
libpng-1.6.34-5.el8 \
|
||||
dejavu-sans-fonts-2.35-6.el8 && \
|
||||
yum clean all
|
||||
|
||||
# The standard configuration is to have all Tomcat files owned by root with group GROUPNAME and whilst owner has read/write privileges,
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>10.13</version>
|
||||
<version>repo-5439v2-c2</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>10.13</version>
|
||||
<version>repo-5439v2-c2</version>
|
||||
</parent>
|
||||
|
||||
<profiles>
|
||||
|
@@ -1,4 +1,4 @@
|
||||
TRANSFORMERS_TAG=2.5.0
|
||||
SOLR6_TAG=2.0.2
|
||||
POSTGRES_TAG=13.1
|
||||
ACTIVEMQ_TAG=5.16.1
|
||||
TRANSFORMERS_TAG=2.3.6
|
||||
SOLR6_TAG=2.0.1
|
||||
POSTGRES_TAG=11.7
|
||||
ACTIVEMQ_TAG=5.15.8
|
||||
|
@@ -38,7 +38,7 @@ services:
|
||||
-Dftp.dataPortTo=30099
|
||||
-Dshare.host=localhost
|
||||
-Daos.baseUrlOverwrite=http://localhost:8082/alfresco/aos
|
||||
-Dmessaging.broker.url=\"failover:(nio://activemq:61616)?timeout=3000&jms.useCompression=true\"
|
||||
-Dmessaging.broker.url=\"failover:(tcp://activemq:61616)?timeout=3000&jms.useCompression=true\"
|
||||
-DlocalTransform.core-aio.url=http://transform-core-aio:8090/
|
||||
-Dimap.server.port=1143
|
||||
-Dftp.port=1221
|
||||
|
@@ -38,7 +38,7 @@ services:
|
||||
-Dftp.dataPortTo=30099
|
||||
-Dshare.host=localhost
|
||||
-Daos.baseUrlOverwrite=http://localhost:8082/alfresco/aos
|
||||
-Dmessaging.broker.url=\"failover:(nio://activemq:61616)?timeout=3000&jms.useCompression=true\"
|
||||
-Dmessaging.broker.url=\"failover:(tcp://activemq:61616)?timeout=3000&jms.useCompression=true\"
|
||||
-Dlocal.transform.service.enabled=false
|
||||
-Dlegacy.transform.service.enabled=false
|
||||
-Dimap.server.port=1143
|
||||
|
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>10.13</version>
|
||||
<version>repo-5439v2-c2</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>10.13</version>
|
||||
<version>repo-5439v2-c2</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>10.13</version>
|
||||
<version>repo-5439v2-c2</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>10.13</version>
|
||||
<version>repo-5439v2-c2</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>10.13</version>
|
||||
<version>repo-5439v2-c2</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -1,54 +0,0 @@
|
||||
package org.alfresco.rest.models.aspects;
|
||||
|
||||
import org.alfresco.rest.RestTest;
|
||||
import org.alfresco.rest.model.RestAspectModel;
|
||||
import org.alfresco.rest.model.RestErrorModel;
|
||||
import org.alfresco.utility.model.TestGroup;
|
||||
import org.alfresco.utility.testrail.ExecutionType;
|
||||
import org.alfresco.utility.testrail.annotation.TestRail;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
public class GetAspectTests extends RestTest
|
||||
{
|
||||
|
||||
@BeforeClass(alwaysRun=true)
|
||||
public void dataPreparation() throws Exception
|
||||
{
|
||||
restClient.authenticateUser(dataUser.createRandomTestUser());
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = { TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Verify inexistent aspect and status code is Not Found (404)")
|
||||
public void getInexistentAspect() throws Exception
|
||||
{
|
||||
String unknownAspect = "unknown:aspect";
|
||||
restClient.withModelAPI().getAspect(unknownAspect);
|
||||
restClient.assertStatusCodeIs(HttpStatus.NOT_FOUND)
|
||||
.assertLastError().containsSummary(String.format(RestErrorModel.ENTITY_WAS_NOT_FOUND, unknownAspect));
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = { TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Verify Aspect Info and status code is OK (200)")
|
||||
public void getAspect() throws Exception
|
||||
{
|
||||
RestAspectModel aspect = restClient.withModelAPI().getAspect("cm:titled");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
aspect.assertThat().field("associations").isEmpty().and()
|
||||
.field("mandatoryAspects").isEmpty().and()
|
||||
.field("properties").isNotEmpty().and()
|
||||
.field("includedInSupertypeQuery").is(true).and()
|
||||
.field("isContainer").is(false).and()
|
||||
.field("id").is("cm:titled").and()
|
||||
.field("description").is("Titled").and()
|
||||
.field("title").is("Titled").and()
|
||||
.field("model.id").is("cm:contentmodel").and()
|
||||
.field("model.author").is("Alfresco").and()
|
||||
.field("model.description").is("Alfresco Content Domain Model").and()
|
||||
.field("model.namespaceUri").is("http://www.alfresco.org/model/content/1.0").and()
|
||||
.field("model.namespacePrefix").is("cm");
|
||||
}
|
||||
}
|
@@ -1,199 +0,0 @@
|
||||
package org.alfresco.rest.models.aspects;
|
||||
|
||||
import org.alfresco.rest.RestTest;
|
||||
import org.alfresco.rest.model.RestAbstractClassModel;
|
||||
import org.alfresco.rest.model.RestAspectsCollection;
|
||||
import org.alfresco.utility.model.TestGroup;
|
||||
import org.alfresco.utility.model.UserModel;
|
||||
import org.alfresco.utility.testrail.ExecutionType;
|
||||
import org.alfresco.utility.testrail.annotation.TestRail;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
public class GetAspectsTests extends RestTest
|
||||
{
|
||||
|
||||
private UserModel regularUser;
|
||||
|
||||
@BeforeClass(alwaysRun=true)
|
||||
public void dataPreparation() throws Exception
|
||||
{
|
||||
regularUser = dataUser.createRandomTestUser();
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Verify user get aspects and gets status code OK (200)")
|
||||
public void getAspects() throws Exception
|
||||
{
|
||||
RestAspectsCollection aspects = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.getAspects();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
aspects.assertThat()
|
||||
.entriesListCountIs(100)
|
||||
.and().entriesListContains("id", "cm:classifiable")
|
||||
.and().entriesListContains("id", "cm:author")
|
||||
.and().entriesListContains("id", "cm:checkedOut");
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Should filter aspects using namespace uri and gets status code OK (200)")
|
||||
public void getAspectByNamespaceUri() throws Exception
|
||||
{
|
||||
RestAspectsCollection aspects = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(namespaceUri matches('http://www.alfresco.org/model.*'))")
|
||||
.getAspects();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
aspects.assertThat().entriesListCountIs(100);
|
||||
|
||||
aspects = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(not namespaceUri matches('http://www.alfresco.org/model.*'))")
|
||||
.getAspects();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
aspects.assertThat().entriesListCountIs(0);
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Should filter aspects using modelId and gets status code OK (200)")
|
||||
public void getAspectByModelsIds() throws Exception
|
||||
{
|
||||
RestAspectsCollection aspects = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(modelId in ('cm:contentmodel', 'smf:smartFolder'))")
|
||||
.getAspects();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
aspects.getPagination().assertThat().fieldsCount().is(5).and()
|
||||
.field("totalItems").isLessThan(65).and()
|
||||
.field("maxItems").is(100).and()
|
||||
.field("skipCount").isGreaterThan(0).and()
|
||||
.field("hasMoreItems").is(false);
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Should filter aspects using modelId with subaspects and gets status code OK (200)")
|
||||
public void getAspectByModelsIdsWithIncludeSubAspects() throws Exception
|
||||
{
|
||||
RestAspectsCollection aspects = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(modelId in ('cm:contentmodel INCLUDESUBASPECTS', 'smf:smartFolder INCLUDESUBASPECTS'))")
|
||||
.getAspects();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
aspects.getPagination().assertThat().fieldsCount().is(5).and()
|
||||
.field("totalItems").isGreaterThan(65).and()
|
||||
.field("maxItems").is(100).and()
|
||||
.field("skipCount").isGreaterThan(0).and()
|
||||
.field("hasMoreItems").is(false);
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Should filter aspects using parentId and gets status code OK (200)")
|
||||
public void getAspectByParentId() throws Exception
|
||||
{
|
||||
RestAspectsCollection aspects = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(parentId in ('cm:titled'))")
|
||||
.getAspects();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
aspects.getPagination().assertThat().fieldsCount().is(5).and()
|
||||
.field("totalItems").is(5).and()
|
||||
.field("hasMoreItems").is(false);
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Should Aspects association, properties and mandatory aspects and gets status code OK (200)")
|
||||
public void getAspectIncludeParams() throws Exception
|
||||
{
|
||||
RestAspectsCollection aspects = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("include=properties,mandatoryAspects,associations")
|
||||
.getAspects();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
|
||||
for (RestAbstractClassModel aspect : aspects.getEntries())
|
||||
{
|
||||
aspect.onModel().assertThat()
|
||||
.field("associations").isNotNull().and()
|
||||
.field("properties").isNotNull().and()
|
||||
.field("mandatoryAspects").isNotNull();
|
||||
}
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Should verify the query errors with possible options")
|
||||
public void verifyAspectsQueryError()
|
||||
{
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(modelId in (' ')")
|
||||
.getAspects();
|
||||
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST);
|
||||
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(modelId in ('cm:contentmodel INCLUDESUBASPECTS',))")
|
||||
.getAspects();
|
||||
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST);
|
||||
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(modelId in ('cm:contentmodel INCLUDESUBTYPES'))")
|
||||
.getAspects();
|
||||
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST);
|
||||
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(parentId in (' ')")
|
||||
.getAspects();
|
||||
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST);
|
||||
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(parentId in ('cm:content',))")
|
||||
.getAspects();
|
||||
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST);
|
||||
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(parentId in ('cm:content',))&include=properties")
|
||||
.getAspects();
|
||||
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST);
|
||||
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(namespaceUri matches('*'))")
|
||||
.getAspects();
|
||||
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST);
|
||||
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(parentId in ('cm:content'))&include=properties")
|
||||
.getAspects();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section={TestGroup.REST_API, TestGroup.MODEL}, executionType= ExecutionType.REGRESSION,
|
||||
description= "Verify if any user gets aspects with high skipCount and maxItems parameter applied")
|
||||
public void getPaginationParameter() throws Exception
|
||||
{
|
||||
RestAspectsCollection aspects = restClient.authenticateUser(regularUser)
|
||||
.withModelAPI()
|
||||
.usingParams("maxItems=10&skipCount=10")
|
||||
.getAspects();
|
||||
aspects.assertThat().entriesListCountIs(10);
|
||||
aspects.assertThat().paginationField("hasMoreItems").is("true");
|
||||
aspects.assertThat().paginationField("skipCount").is("10");
|
||||
aspects.assertThat().paginationField("maxItems").is("10");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section={TestGroup.REST_API, TestGroup.MODEL}, executionType= ExecutionType.REGRESSION,
|
||||
description= "Verify if any user gets aspects with hasMoreItems applied bases on skip count and maxItems")
|
||||
public void getHighPaginationQuery() throws Exception
|
||||
{
|
||||
RestAspectsCollection aspects = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("maxItems=10&skipCount=150")
|
||||
.getAspects();
|
||||
aspects.assertThat().entriesListCountIs(0);
|
||||
aspects.assertThat().paginationField("hasMoreItems").is("false");
|
||||
aspects.assertThat().paginationField("skipCount").is("150");
|
||||
aspects.assertThat().paginationField("maxItems").is("10");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
}
|
||||
}
|
@@ -1,55 +0,0 @@
|
||||
package org.alfresco.rest.models.types;
|
||||
|
||||
import org.alfresco.rest.RestTest;
|
||||
import org.alfresco.rest.model.RestErrorModel;
|
||||
import org.alfresco.rest.model.RestTypeModel;
|
||||
import org.alfresco.utility.model.TestGroup;
|
||||
import org.alfresco.utility.testrail.ExecutionType;
|
||||
import org.alfresco.utility.testrail.annotation.TestRail;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
public class GetTypeTests extends RestTest
|
||||
{
|
||||
|
||||
@BeforeClass(alwaysRun=true)
|
||||
public void dataPreparation() throws Exception
|
||||
{
|
||||
restClient.authenticateUser(dataUser.createRandomTestUser());
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = { TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Verify inexistent type and status code is Not Found (404)")
|
||||
public void getInexistentType() throws Exception
|
||||
{
|
||||
String unknownType = "unknown:type";
|
||||
restClient.withModelAPI().getType(unknownType);
|
||||
restClient.assertStatusCodeIs(HttpStatus.NOT_FOUND)
|
||||
.assertLastError().containsSummary(String.format(RestErrorModel.ENTITY_WAS_NOT_FOUND, unknownType));
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = { TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Verify Type Info and status code is OK (200)")
|
||||
public void getType() throws Exception
|
||||
{
|
||||
RestTypeModel type = restClient.withModelAPI().getType("cm:content");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
type.assertThat().field("associations").isEmpty().and()
|
||||
.field("mandatoryAspects").isNotEmpty().and()
|
||||
.field("properties").isNotEmpty().and()
|
||||
.field("includedInSupertypeQuery").is(true).and()
|
||||
.field("isArchive").is(true).and()
|
||||
.field("isContainer").is(false).and()
|
||||
.field("id").is("cm:content").and()
|
||||
.field("description").is("Base Content Object").and()
|
||||
.field("title").is("Content").and()
|
||||
.field("model.id").is("cm:contentmodel").and()
|
||||
.field("model.author").is("Alfresco").and()
|
||||
.field("model.description").is("Alfresco Content Domain Model").and()
|
||||
.field("model.namespaceUri").is("http://www.alfresco.org/model/content/1.0").and()
|
||||
.field("model.namespacePrefix").is("cm");
|
||||
}
|
||||
}
|
@@ -1,199 +0,0 @@
|
||||
package org.alfresco.rest.models.types;
|
||||
|
||||
import org.alfresco.rest.RestTest;
|
||||
import org.alfresco.rest.model.RestAbstractClassModel;
|
||||
import org.alfresco.rest.model.RestTypesCollection;
|
||||
import org.alfresco.utility.model.TestGroup;
|
||||
import org.alfresco.utility.model.UserModel;
|
||||
import org.alfresco.utility.testrail.ExecutionType;
|
||||
import org.alfresco.utility.testrail.annotation.TestRail;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
public class GetTypesTests extends RestTest
|
||||
{
|
||||
|
||||
private UserModel regularUser;
|
||||
|
||||
@BeforeClass(alwaysRun=true)
|
||||
public void dataPreparation() throws Exception
|
||||
{
|
||||
regularUser = dataUser.createRandomTestUser();
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Verify user get types and gets status code OK (200)")
|
||||
public void getTypes() throws Exception
|
||||
{
|
||||
RestTypesCollection types = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.getTypes();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
types.assertThat()
|
||||
.entriesListCountIs(100)
|
||||
.and().entriesListContains("id", "cm:content")
|
||||
.and().entriesListContains("id", "cm:systemfolder")
|
||||
.and().entriesListContains("id", "cm:folder");
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Should filter types using namespace uri and gets status code OK (200)")
|
||||
public void getTypeByNamespaceUri() throws Exception
|
||||
{
|
||||
RestTypesCollection types = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(namespaceUri matches('http://www.alfresco.org/model.*'))")
|
||||
.getTypes();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
types.assertThat().entriesListCountIs(100);
|
||||
|
||||
types = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(not namespaceUri matches('http://www.alfresco.org/model.*'))")
|
||||
.getTypes();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
types.assertThat().entriesListCountIs(0);
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Should filter types using modelId and gets status code OK (200)")
|
||||
public void getTypeByModelsIds() throws Exception
|
||||
{
|
||||
RestTypesCollection types = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(modelId in ('cm:contentmodel', 'smf:smartFolder'))")
|
||||
.getTypes();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
types.getPagination().assertThat().fieldsCount().is(5).and()
|
||||
.field("totalItems").isLessThan(65).and()
|
||||
.field("maxItems").is(100).and()
|
||||
.field("skipCount").isGreaterThan(0).and()
|
||||
.field("hasMoreItems").is(false);
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Should filter types using modelId with subtypes and gets status code OK (200)")
|
||||
public void getTypeByModelsIdsWithIncludeSubTypes() throws Exception
|
||||
{
|
||||
RestTypesCollection types = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(modelId in ('cm:contentmodel INCLUDESUBTYPES', 'smf:smartFolder INCLUDESUBTYPES'))")
|
||||
.getTypes();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
types.getPagination().assertThat().fieldsCount().is(5).and()
|
||||
.field("totalItems").isGreaterThan(65).and()
|
||||
.field("maxItems").is(100).and()
|
||||
.field("skipCount").isGreaterThan(0).and()
|
||||
.field("hasMoreItems").is(false);
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Should filter types using parentId and gets status code OK (200)")
|
||||
public void getTypeByParentId() throws Exception
|
||||
{
|
||||
RestTypesCollection types = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(parentId in ('cm:content'))")
|
||||
.getTypes();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
types.getPagination().assertThat().fieldsCount().is(5).and()
|
||||
.field("totalItems").isGreaterThan(40).and()
|
||||
.field("hasMoreItems").is(false);
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Should get Type with association, properties and mandatory types and gets status code OK (200)")
|
||||
public void getTypeIncludeParams() throws Exception
|
||||
{
|
||||
RestTypesCollection types = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("include=properties,mandatoryAspects,associations")
|
||||
.getTypes();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
|
||||
for (RestAbstractClassModel type : types.getEntries())
|
||||
{
|
||||
type.onModel().assertThat()
|
||||
.field("associations").isNotNull().and()
|
||||
.field("properties").isNotNull().and()
|
||||
.field("mandatoryAspects").isNotNull();
|
||||
}
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.MODEL }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Should verify the query errors with possible options")
|
||||
public void verifyTypesQueryError() throws Exception
|
||||
{
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(modelId in (' ')")
|
||||
.getTypes();
|
||||
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST);
|
||||
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(modelId in ('cm:contentmodel INCLUDESUBTYPES',))")
|
||||
.getTypes();
|
||||
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST);
|
||||
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(modelId in ('cm:contentmodel INCLUDESUBASPECTS'))")
|
||||
.getTypes();
|
||||
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST);
|
||||
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(parentId in (' ')")
|
||||
.getTypes();
|
||||
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST);
|
||||
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(parentId in ('cm:titled',))")
|
||||
.getTypes();
|
||||
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST);
|
||||
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(parentId in ('cm:titled',))&include=properties")
|
||||
.getTypes();
|
||||
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST);
|
||||
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(namespaceUri matches('*'))")
|
||||
.getTypes();
|
||||
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST);
|
||||
|
||||
restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("where=(parentId in ('cm:titled'))&include=properties")
|
||||
.getTypes();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section={TestGroup.REST_API, TestGroup.MODEL}, executionType= ExecutionType.REGRESSION,
|
||||
description= "Verify if any user gets types with high skipCount and maxItems parameter applied")
|
||||
public void getPaginationParameter() throws Exception
|
||||
{
|
||||
RestTypesCollection types = restClient.authenticateUser(regularUser)
|
||||
.withModelAPI()
|
||||
.usingParams("maxItems=10&skipCount=10")
|
||||
.getTypes();
|
||||
types.assertThat().entriesListCountIs(10);
|
||||
types.assertThat().paginationField("hasMoreItems").is("true");
|
||||
types.assertThat().paginationField("skipCount").is("10");
|
||||
types.assertThat().paginationField("maxItems").is("10");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.MODEL, TestGroup.REGRESSION })
|
||||
@TestRail(section={TestGroup.REST_API, TestGroup.MODEL}, executionType= ExecutionType.REGRESSION,
|
||||
description= "Verify if any user gets types with hasMoreItems applied bases on skip count and maxItems")
|
||||
public void getHighPaginationQuery() throws Exception
|
||||
{
|
||||
RestTypesCollection types = restClient.authenticateUser(regularUser).withModelAPI()
|
||||
.usingParams("maxItems=10&skipCount=150")
|
||||
.getTypes();
|
||||
types.assertThat().entriesListCountIs(0);
|
||||
types.assertThat().paginationField("hasMoreItems").is("false");
|
||||
types.assertThat().paginationField("skipCount").is("150");
|
||||
types.assertThat().paginationField("maxItems").is("10");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
}
|
||||
}
|
@@ -15,7 +15,6 @@
|
||||
<package name="org.alfresco.rest.tags.*"/>
|
||||
<package name="org.alfresco.rest.trashcan.*"/>
|
||||
<package name="org.alfresco.rest.workflow.*"/>
|
||||
<package name="org.alfresco.rest.models.*"/>
|
||||
</packages>
|
||||
</test>
|
||||
</suite>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>10.13</version>
|
||||
<version>repo-5439v2-c2</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -7,12 +7,12 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>10.13</version>
|
||||
<version>repo-5439v2-c2</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
<scm-path>${project.parent.parent.scm.url}</scm-path>
|
||||
<scm-revision>${buildNumber}</scm-revision>
|
||||
<scm-revision>${build-number}</scm-revision>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
@@ -140,23 +140,6 @@
|
||||
</resource>
|
||||
</resources>
|
||||
<plugins>
|
||||
<!-- Gets the scm revision and stores it in the ${buildNumber} variable -->
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>buildnumber-maven-plugin</artifactId>
|
||||
<version>1.4</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>validate</phase>
|
||||
<goals>
|
||||
<goal>create</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<shortRevisionLength>8</shortRevisionLength>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<executions>
|
||||
|
@@ -66,8 +66,6 @@
|
||||
|
||||
<bean id="SOLRAuthenticationFilter" class="org.alfresco.repo.web.scripts.solr.SOLRAuthenticationFilter">
|
||||
<property name="secureComms" value="${solr.secureComms}"/>
|
||||
<property name="sharedSecret" value="${solr.sharedSecret}"/>
|
||||
<property name="sharedSecretHeader" value="${solr.sharedSecret.header}"/>
|
||||
</bean>
|
||||
|
||||
<bean id="WebscriptAuthenticationFilter" class="org.alfresco.repo.management.subsystems.ChainingSubsystemProxyFactory">
|
||||
|
@@ -184,15 +184,5 @@
|
||||
</filter>
|
||||
|
||||
</config>
|
||||
<!--
|
||||
A set of HTTP response headers that instructs the browser to behave in certain ways to improve security
|
||||
-->
|
||||
<config evaluator="string-compare" condition="SecurityHeadersPolicy">
|
||||
<headers>
|
||||
<header>
|
||||
<name>X-Frame-Options</name>
|
||||
<value>SAMEORIGIN</value>
|
||||
</header>
|
||||
</headers>
|
||||
</config>
|
||||
|
||||
</alfresco-config>
|
@@ -104,12 +104,6 @@
|
||||
<filter-class>org.springframework.extensions.webscripts.servlet.CSRFFilter</filter-class>
|
||||
</filter>
|
||||
|
||||
<filter>
|
||||
<description>Security Headers filter. Adds security response headers based on config.</description>
|
||||
<filter-name>Security Headers Filter</filter-name>
|
||||
<filter-class>org.springframework.extensions.webscripts.servlet.SecurityHeadersFilter</filter-class>
|
||||
</filter>
|
||||
|
||||
<!-- Enterprise filter placeholder -->
|
||||
<filter-mapping>
|
||||
<filter-name>Clear security context filter</filter-name>
|
||||
@@ -231,11 +225,6 @@
|
||||
<url-pattern>/wcs/admin/*</url-pattern>
|
||||
</filter-mapping>
|
||||
|
||||
<filter-mapping>
|
||||
<filter-name>Security Headers Filter</filter-name>
|
||||
<url-pattern>/*</url-pattern>
|
||||
</filter-mapping>
|
||||
|
||||
<!-- Enterprise filter-mapping placeholder -->
|
||||
|
||||
<!-- Spring Context Loader listener - can disable loading of context if runtime config changes are needed -->
|
||||
|
@@ -57,8 +57,8 @@ ModuleDetails shareServicesModule = moduleService.getModule("alfresco-share-serv
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head>
|
||||
<title>Alfresco</title>
|
||||
<link rel="stylesheet" type="text/css" href="/<%=sysAdminParams.getAlfrescoContext()%>/css/reset.css" />
|
||||
<link rel="stylesheet" type="text/css" href="/<%=sysAdminParams.getAlfrescoContext()%>/css/alfresco.css" />
|
||||
<link rel="stylesheet" type="text/css" href="./css/reset.css" />
|
||||
<link rel="stylesheet" type="text/css" href="./css/alfresco.css" />
|
||||
</head>
|
||||
<body>
|
||||
<div class="sticky-wrapper">
|
||||
|
@@ -4,21 +4,21 @@
|
||||
%%
|
||||
Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
%%
|
||||
This file is part of the Alfresco software.
|
||||
If the software was purchased under a paid Alfresco license, the terms of
|
||||
the paid license agreement will prevail. Otherwise, the software is
|
||||
This file is part of the Alfresco software.
|
||||
If the software was purchased under a paid Alfresco license, the terms of
|
||||
the paid license agreement will prevail. Otherwise, the software is
|
||||
provided under the following open source license terms:
|
||||
|
||||
|
||||
Alfresco is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Lesser General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
|
||||
Alfresco is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Lesser General Public License for more details.
|
||||
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public License
|
||||
along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
#L%
|
||||
@@ -57,19 +57,19 @@ ModuleDetails shareServicesModule = moduleService.getModule("alfresco-share-serv
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head>
|
||||
<title>Alfresco</title>
|
||||
<link rel="stylesheet" type="text/css" href="/<%=sysAdminParams.getAlfrescoContext()%>/css/reset.css" />
|
||||
<link rel="stylesheet" type="text/css" href="/<%=sysAdminParams.getAlfrescoContext()%>/css/alfresco.css" />
|
||||
<link rel="stylesheet" type="text/css" href="./css/reset.css" />
|
||||
<link rel="stylesheet" type="text/css" href="./css/alfresco.css" />
|
||||
</head>
|
||||
<body>
|
||||
<div class="sticky-wrapper">
|
||||
<div class="index">
|
||||
|
||||
|
||||
<div class="title">
|
||||
<span class="logo"><a href="http://www.alfresco.com"><img src="./images/logo/logo.png" width="145" height="48" alt="" border="0" /></a></span>
|
||||
<span class="logo-separator"> </span>
|
||||
<h1>Welcome to Alfresco</h1>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="index-list">
|
||||
<h4><%=descriptorService.getServerDescriptor().getEdition()%> - <%=descriptorService.getServerDescriptor().getVersion()%></h4>
|
||||
<p></p>
|
||||
@@ -94,7 +94,7 @@ ModuleDetails shareServicesModule = moduleService.getModule("alfresco-share-serv
|
||||
{
|
||||
%>
|
||||
<p>WARNING: The system is in Read Only mode, the License may have failed to deploy. Please visit the <a href="./s/enterprise/admin">Alfresco Administration Console</a> (admin only)</p>
|
||||
<%
|
||||
<%
|
||||
}
|
||||
if (descriptorService.getLicenseDescriptor() != null && descriptorService.getLicenseDescriptor().getLicenseMode().toString().equals("ENTERPRISE"))
|
||||
{
|
||||
@@ -120,7 +120,7 @@ ModuleDetails shareServicesModule = moduleService.getModule("alfresco-share-serv
|
||||
<p><a href="./api/-default-/public/cmis/versions/1.1/atom">CMIS 1.1 AtomPub Service Document</a></p>
|
||||
<p><a href="./api/-default-/public/cmis/versions/1.1/browser">CMIS 1.1 Browser Binding URL</a></p>
|
||||
</div>
|
||||
|
||||
|
||||
</div>
|
||||
<div class="push"></div>
|
||||
</div>
|
||||
|
80
pom.xml
80
pom.xml
@@ -2,7 +2,7 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>10.13</version>
|
||||
<version>repo-5439v2-c2</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>Alfresco Community Repo Parent</name>
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
<properties>
|
||||
<acs.version.major>7</acs.version.major>
|
||||
<acs.version.minor>0</acs.version.minor>
|
||||
<acs.version.revision>2</acs.version.revision>
|
||||
<acs.version.revision>0</acs.version.revision>
|
||||
<acs.version.label />
|
||||
|
||||
<version.edition>Community</version.edition>
|
||||
@@ -34,35 +34,35 @@
|
||||
<image.registry>quay.io</image.registry>
|
||||
|
||||
<java.version>11</java.version>
|
||||
<maven.compiler.source>${java.version}</maven.compiler.source>
|
||||
<maven.compiler.target>${java.version}</maven.compiler.target>
|
||||
<maven.build.sourceVersion>${java.version}</maven.build.sourceVersion>
|
||||
<maven.compiler.source>11</maven.compiler.source>
|
||||
<maven.compiler.target>11</maven.compiler.target>
|
||||
<maven.build.sourceVersion>11</maven.build.sourceVersion>
|
||||
|
||||
<dir.root>${project.build.directory}/alf_data</dir.root>
|
||||
|
||||
<dependency.alfresco-hb-data-sender.version>1.0.12</dependency.alfresco-hb-data-sender.version>
|
||||
<dependency.alfresco-mmt.version>6.0</dependency.alfresco-mmt.version>
|
||||
<dependency.alfresco-trashcan-cleaner.version>2.4.1</dependency.alfresco-trashcan-cleaner.version>
|
||||
<dependency.alfresco-trashcan-cleaner.version>2.3</dependency.alfresco-trashcan-cleaner.version>
|
||||
<dependency.alfresco-jlan.version>7.1</dependency.alfresco-jlan.version>
|
||||
<dependency.alfresco-server-root.version>6.0.1</dependency.alfresco-server-root.version>
|
||||
<dependency.alfresco-messaging-repo.version>1.2.15</dependency.alfresco-messaging-repo.version>
|
||||
<dependency.alfresco-log-sanitizer.version>0.2</dependency.alfresco-log-sanitizer.version>
|
||||
<dependency.activiti-engine.version>5.23.0</dependency.activiti-engine.version>
|
||||
<dependency.activiti.version>5.23.0</dependency.activiti.version>
|
||||
<dependency.transform.model.version>1.3.1</dependency.transform.model.version>
|
||||
<dependency.transform.model.version>1.0.2.12</dependency.transform.model.version>
|
||||
<dependency.alfresco-greenmail.version>6.2</dependency.alfresco-greenmail.version>
|
||||
<dependency.acs-event-model.version>0.0.12</dependency.acs-event-model.version>
|
||||
<dependency.acs-event-model.version>0.0.11</dependency.acs-event-model.version>
|
||||
|
||||
<dependency.spring.version>5.3.18</dependency.spring.version>
|
||||
<dependency.spring.version>5.3.3</dependency.spring.version>
|
||||
<dependency.antlr.version>3.5.2</dependency.antlr.version>
|
||||
<dependency.jackson.version>2.12.3</dependency.jackson.version>
|
||||
<dependency.jackson-databind.version>2.12.3</dependency.jackson-databind.version>
|
||||
<dependency.cxf.version>3.4.4</dependency.cxf.version>
|
||||
<dependency.jackson.version>2.12.1</dependency.jackson.version>
|
||||
<dependency.jackson-databind.version>${dependency.jackson.version}</dependency.jackson-databind.version>
|
||||
<dependency.cxf.version>3.4.2</dependency.cxf.version>
|
||||
<dependency.opencmis.version>1.0.0</dependency.opencmis.version>
|
||||
<dependency.webscripts.version>8.29</dependency.webscripts.version>
|
||||
<dependency.bouncycastle.version>1.69</dependency.bouncycastle.version>
|
||||
<dependency.mockito-core.version>3.9.0</dependency.mockito-core.version>
|
||||
<dependency.org-json.version>20210307</dependency.org-json.version>
|
||||
<dependency.webscripts.version>8.15</dependency.webscripts.version>
|
||||
<dependency.bouncycastle.version>1.68</dependency.bouncycastle.version>
|
||||
<dependency.mockito-core.version>3.7.7</dependency.mockito-core.version>
|
||||
<dependency.org-json.version>20201115</dependency.org-json.version>
|
||||
<dependency.commons-dbcp.version>1.4-DBCP330</dependency.commons-dbcp.version>
|
||||
<dependency.commons-io.version>2.8.0</dependency.commons-io.version>
|
||||
<dependency.gson.version>2.8.5</dependency.gson.version>
|
||||
@@ -73,18 +73,17 @@
|
||||
<dependency.slf4j.version>1.7.30</dependency.slf4j.version>
|
||||
<dependency.gytheio.version>0.12</dependency.gytheio.version>
|
||||
<dependency.groovy.version>2.5.9</dependency.groovy.version>
|
||||
<dependency.tika.version>1.26</dependency.tika.version>
|
||||
<dependency.spring-security.version>5.5.0</dependency.spring-security.version>
|
||||
<dependency.tika.version>1.25</dependency.tika.version>
|
||||
<dependency.spring-security.version>5.4.1</dependency.spring-security.version>
|
||||
<dependency.truezip.version>7.7.10</dependency.truezip.version>
|
||||
<dependency.poi.version>4.1.2</dependency.poi.version>
|
||||
<dependency.ooxml-schemas.version>1.4</dependency.ooxml-schemas.version>
|
||||
<dependency.keycloak.version>13.0.1</dependency.keycloak.version>
|
||||
<dependency.keycloak.version>11.0.0-alfresco-001</dependency.keycloak.version>
|
||||
<dependency.jboss.logging.version>3.4.1.Final</dependency.jboss.logging.version>
|
||||
<dependency.camel.version>3.7.4</dependency.camel.version>
|
||||
<dependency.camel.version>3.7.0</dependency.camel.version>
|
||||
<dependency.activemq.version>5.16.1</dependency.activemq.version>
|
||||
<dependency.apache-compress.version>1.21</dependency.apache-compress.version>
|
||||
<dependency.apache.taglibs.version>1.2.5</dependency.apache.taglibs.version>
|
||||
<dependency.awaitility.version>4.1.0</dependency.awaitility.version>
|
||||
<dependency.awaitility.version>4.0.3</dependency.awaitility.version>
|
||||
|
||||
<dependency.jakarta-jaxb-api.version>2.3.3</dependency.jakarta-jaxb-api.version>
|
||||
<dependency.jakarta-ws-api.version>2.3.3</dependency.jakarta-ws-api.version>
|
||||
@@ -97,16 +96,16 @@
|
||||
<dependency.jakarta-json-api.version>1.1.6</dependency.jakarta-json-api.version>
|
||||
<dependency.jakarta-rpc-api.version>1.1.4</dependency.jakarta-rpc-api.version>
|
||||
|
||||
<alfresco.googledrive.version>3.2.1.3</alfresco.googledrive.version>
|
||||
<alfresco.aos-module.version>1.4.0.1</alfresco.aos-module.version>
|
||||
<alfresco.googledrive.version>3.2.0</alfresco.googledrive.version>
|
||||
<alfresco.aos-module.version>1.4.0-M1</alfresco.aos-module.version>
|
||||
|
||||
<dependency.postgresql.version>42.2.20</dependency.postgresql.version>
|
||||
<dependency.mysql.version>8.0.25</dependency.mysql.version>
|
||||
<dependency.postgresql.version>42.2.18</dependency.postgresql.version>
|
||||
<dependency.mysql.version>8.0.23</dependency.mysql.version>
|
||||
<dependency.mariadb.version>2.7.2</dependency.mariadb.version>
|
||||
<dependency.tas-utility.version>3.0.44</dependency.tas-utility.version>
|
||||
<dependency.tas-utility.version>3.0.42</dependency.tas-utility.version>
|
||||
<dependency.rest-assured.version>3.3.0</dependency.rest-assured.version>
|
||||
<dependency.tas-restapi.version>1.58</dependency.tas-restapi.version>
|
||||
<dependency.tas-cmis.version>1.30</dependency.tas-cmis.version>
|
||||
<dependency.tas-restapi.version>1.52</dependency.tas-restapi.version>
|
||||
<dependency.tas-cmis.version>1.27</dependency.tas-cmis.version>
|
||||
<dependency.tas-email.version>1.8</dependency.tas-email.version>
|
||||
<dependency.tas-webdav.version>1.6</dependency.tas-webdav.version>
|
||||
<dependency.tas-ftp.version>1.5</dependency.tas-ftp.version>
|
||||
@@ -117,7 +116,7 @@
|
||||
<connection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</connection>
|
||||
<developerConnection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</developerConnection>
|
||||
<url>https://github.com/Alfresco/alfresco-community-repo</url>
|
||||
<tag>10.13</tag>
|
||||
<tag>repo-5439v2-c2</tag>
|
||||
</scm>
|
||||
|
||||
<distributionManagement>
|
||||
@@ -550,7 +549,8 @@
|
||||
<dependency>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcprov-jdk15on</artifactId>
|
||||
<version>${dependency.bouncycastle.version}</version>
|
||||
<version>1.68</version>
|
||||
<!-- <version>${dependency.bouncycastle.version}</version>-->
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
@@ -583,7 +583,7 @@
|
||||
<dependency>
|
||||
<groupId>com.drewnoakes</groupId>
|
||||
<artifactId>metadata-extractor</artifactId>
|
||||
<version>2.16.0</version>
|
||||
<version>2.15.0</version>
|
||||
</dependency>
|
||||
<!-- upgrade dependency from TIKA -->
|
||||
<dependency>
|
||||
@@ -595,13 +595,13 @@
|
||||
<dependency>
|
||||
<groupId>org.jsoup</groupId>
|
||||
<artifactId>jsoup</artifactId>
|
||||
<version>1.14.2</version>
|
||||
<version>1.13.1</version>
|
||||
</dependency>
|
||||
<!-- upgrade dependency from TIKA -->
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-compress</artifactId>
|
||||
<version>${dependency.apache-compress.version}</version>
|
||||
<version>1.20</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
@@ -679,7 +679,7 @@
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>2.10.10</version>
|
||||
<version>2.10.9</version>
|
||||
</dependency>
|
||||
|
||||
<!-- provided dependencies -->
|
||||
@@ -694,7 +694,7 @@
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.13.2</version>
|
||||
<version>4.13</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@@ -777,11 +777,6 @@
|
||||
<artifactId>camel-direct</artifactId>
|
||||
<version>${dependency.camel.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.camel</groupId>
|
||||
<artifactId>camel-management</artifactId>
|
||||
<version>${dependency.camel.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.camel</groupId>
|
||||
<artifactId>camel-mock</artifactId>
|
||||
@@ -814,7 +809,7 @@
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>3.3.0</version>
|
||||
<version>3.2.0</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
@@ -829,4 +824,5 @@
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>10.13</version>
|
||||
<version>repo-5439v2-c2</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@@ -1,61 +1,62 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.web.scripts.solr;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
|
||||
import javax.servlet.FilterChain;
|
||||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.ServletOutputStream;
|
||||
import javax.servlet.ServletRequest;
|
||||
import javax.servlet.ServletResponse;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.servlet.http.HttpServletResponseWrapper;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.httpclient.HttpClientFactory;
|
||||
import org.alfresco.repo.web.filter.beans.DependencyInjectedFilter;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
|
||||
/**
|
||||
* This filter protects the solr callback urls by verifying a shared secret on the request header if
|
||||
* the secureComms property is set to "secret". If it is set to "https", this will will just verify
|
||||
* that the request came in through a "secure" tomcat connector. (but it will not validate the certificate
|
||||
* on the request; this done in a different filter).
|
||||
*
|
||||
* This filter protects the solr callback urls by verifying MACs on requests and encrypting responses
|
||||
* and generating MACs on responses, if the secureComms property is set to "md5". If it is set to "https"
|
||||
* or "none", the filter does nothing to the request and response.
|
||||
*
|
||||
* @since 4.0
|
||||
*
|
||||
*/
|
||||
public class SOLRAuthenticationFilter implements DependencyInjectedFilter, InitializingBean
|
||||
public class SOLRAuthenticationFilter implements DependencyInjectedFilter
|
||||
{
|
||||
public static enum SecureCommsType
|
||||
{
|
||||
HTTPS, SECRET, NONE;
|
||||
HTTPS, NONE;
|
||||
|
||||
public static SecureCommsType getType(String type)
|
||||
{
|
||||
@@ -63,10 +64,6 @@ public class SOLRAuthenticationFilter implements DependencyInjectedFilter, Initi
|
||||
{
|
||||
return HTTPS;
|
||||
}
|
||||
else if(type.equalsIgnoreCase("secret"))
|
||||
{
|
||||
return SECRET;
|
||||
}
|
||||
else if(type.equalsIgnoreCase("none"))
|
||||
{
|
||||
return NONE;
|
||||
@@ -82,11 +79,7 @@ public class SOLRAuthenticationFilter implements DependencyInjectedFilter, Initi
|
||||
private static Log logger = LogFactory.getLog(SOLRAuthenticationFilter.class);
|
||||
|
||||
private SecureCommsType secureComms = SecureCommsType.HTTPS;
|
||||
|
||||
private String sharedSecret;
|
||||
|
||||
private String sharedSecretHeader = HttpClientFactory.DEFAULT_SHAREDSECRET_HEADER;
|
||||
|
||||
|
||||
public void setSecureComms(String type)
|
||||
{
|
||||
try
|
||||
@@ -99,33 +92,6 @@ public class SOLRAuthenticationFilter implements DependencyInjectedFilter, Initi
|
||||
}
|
||||
}
|
||||
|
||||
public void setSharedSecret(String sharedSecret)
|
||||
{
|
||||
this.sharedSecret = sharedSecret;
|
||||
}
|
||||
|
||||
public void setSharedSecretHeader(String sharedSecretHeader)
|
||||
{
|
||||
this.sharedSecretHeader = sharedSecretHeader;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception
|
||||
{
|
||||
if(secureComms == SecureCommsType.SECRET)
|
||||
{
|
||||
if(sharedSecret == null || sharedSecret.length()==0)
|
||||
{
|
||||
logger.fatal("Missing value for solr.sharedSecret configuration property. If solr.secureComms is set to \"secret\", a value for solr.sharedSecret is required. See https://docs.alfresco.com/search-services/latest/install/options/");
|
||||
throw new AlfrescoRuntimeException("Missing value for solr.sharedSecret configuration property");
|
||||
}
|
||||
if(sharedSecretHeader == null || sharedSecretHeader.length()==0)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Missing value for sharedSecretHeader");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void doFilter(ServletContext context, ServletRequest request,
|
||||
ServletResponse response, FilterChain chain) throws IOException,
|
||||
ServletException
|
||||
@@ -133,22 +99,52 @@ public class SOLRAuthenticationFilter implements DependencyInjectedFilter, Initi
|
||||
HttpServletRequest httpRequest = (HttpServletRequest)request;
|
||||
HttpServletResponse httpResponse = (HttpServletResponse)response;
|
||||
|
||||
if(secureComms == SecureCommsType.SECRET)
|
||||
/* if(secureComms == SecureCommsType.ALFRESCO)
|
||||
{
|
||||
if(sharedSecret.equals(httpRequest.getHeader(sharedSecretHeader)))
|
||||
// Need to get as a byte array because we need to read the request twice, once for authentication
|
||||
// and again by the web service.
|
||||
SOLRHttpServletRequestWrapper requestWrapper = new SOLRHttpServletRequestWrapper(httpRequest, encryptionUtils);
|
||||
|
||||
if(logger.isDebugEnabled())
|
||||
{
|
||||
chain.doFilter(request, response);
|
||||
logger.debug("Authenticating " + httpRequest.getRequestURI());
|
||||
}
|
||||
|
||||
if(encryptionUtils.authenticate(httpRequest, requestWrapper.getDecryptedBody()))
|
||||
{
|
||||
try
|
||||
{
|
||||
OutputStream out = response.getOutputStream();
|
||||
|
||||
GenericResponseWrapper responseWrapper = new GenericResponseWrapper(httpResponse);
|
||||
|
||||
// TODO - do I need to chain to other authenticating filters - probably not?
|
||||
// Could also remove sending of credentials with http request
|
||||
chain.doFilter(requestWrapper, responseWrapper);
|
||||
|
||||
Pair<byte[], AlgorithmParameters> pair = encryptor.encrypt(KeyProvider.ALIAS_SOLR, null, responseWrapper.getData());
|
||||
|
||||
encryptionUtils.setResponseAuthentication(httpRequest, httpResponse, responseWrapper.getData(), pair.getSecond());
|
||||
|
||||
httpResponse.setHeader("Content-Length", Long.toString(pair.getFirst().length));
|
||||
out.write(pair.getFirst());
|
||||
out.close();
|
||||
}
|
||||
catch(Exception e)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("", e);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
httpResponse.sendError(HttpServletResponse.SC_FORBIDDEN, "Authentication failure");
|
||||
httpResponse.setStatus(401);
|
||||
}
|
||||
}
|
||||
else if(secureComms == SecureCommsType.HTTPS)
|
||||
else */if(secureComms == SecureCommsType.HTTPS)
|
||||
{
|
||||
if(httpRequest.isSecure())
|
||||
{
|
||||
// https authentication; cert got verified in X509 filter
|
||||
// https authentication
|
||||
chain.doFilter(request, response);
|
||||
}
|
||||
else
|
||||
@@ -162,4 +158,128 @@ public class SOLRAuthenticationFilter implements DependencyInjectedFilter, Initi
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean validateTimestamp(String timestampStr)
|
||||
{
|
||||
if(timestampStr == null || timestampStr.equals(""))
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Missing timestamp on request");
|
||||
}
|
||||
long timestamp = -1;
|
||||
try
|
||||
{
|
||||
timestamp = Long.valueOf(timestampStr);
|
||||
}
|
||||
catch(NumberFormatException e)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Invalid timestamp on request");
|
||||
}
|
||||
if(timestamp == -1)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Invalid timestamp on request");
|
||||
}
|
||||
long currentTime = System.currentTimeMillis();
|
||||
return((currentTime - timestamp) < 30 * 1000); // 5s
|
||||
}
|
||||
|
||||
/* private static class SOLRHttpServletRequestWrapper extends HttpServletRequestWrapper
|
||||
{
|
||||
private byte[] body;
|
||||
|
||||
SOLRHttpServletRequestWrapper(HttpServletRequest req, EncryptionUtils encryptionUtils) throws IOException
|
||||
{
|
||||
super(req);
|
||||
this.body = encryptionUtils.decryptBody(req);
|
||||
}
|
||||
|
||||
byte[] getDecryptedBody()
|
||||
{
|
||||
return body;
|
||||
}
|
||||
|
||||
public ServletInputStream getInputStream()
|
||||
{
|
||||
final InputStream in = (body != null ? new ByteArrayInputStream(body) : null);
|
||||
return new ServletInputStream()
|
||||
{
|
||||
public int read() throws IOException
|
||||
{
|
||||
if(in == null)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
else
|
||||
{
|
||||
int i = in.read();
|
||||
if(i == -1)
|
||||
{
|
||||
in.close();
|
||||
}
|
||||
return i;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}*/
|
||||
|
||||
private static class ByteArrayServletOutputStream extends ServletOutputStream
|
||||
{
|
||||
private ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
|
||||
ByteArrayServletOutputStream()
|
||||
{
|
||||
}
|
||||
|
||||
public byte[] getData()
|
||||
{
|
||||
return out.toByteArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(int b) throws IOException
|
||||
{
|
||||
out.write(b);
|
||||
}
|
||||
}
|
||||
|
||||
public static class GenericResponseWrapper extends HttpServletResponseWrapper {
|
||||
private ByteArrayServletOutputStream output;
|
||||
private int contentLength;
|
||||
private String contentType;
|
||||
|
||||
public GenericResponseWrapper(HttpServletResponse response) {
|
||||
super(response);
|
||||
output = new ByteArrayServletOutputStream();
|
||||
}
|
||||
|
||||
public byte[] getData() {
|
||||
return output.getData();
|
||||
}
|
||||
|
||||
public ServletOutputStream getOutputStream() {
|
||||
return output;
|
||||
}
|
||||
|
||||
public PrintWriter getWriter() {
|
||||
return new PrintWriter(getOutputStream(),true);
|
||||
}
|
||||
|
||||
public void setContentLength(int length) {
|
||||
this.contentLength = length;
|
||||
super.setContentLength(length);
|
||||
}
|
||||
|
||||
public int getContentLength() {
|
||||
return contentLength;
|
||||
}
|
||||
|
||||
public void setContentType(String type) {
|
||||
this.contentType = type;
|
||||
super.setContentType(type);
|
||||
}
|
||||
|
||||
|
||||
public String getContentType() {
|
||||
return contentType;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -716,7 +716,7 @@ public abstract class BaseSSOAuthenticationFilter extends BaseAuthenticationFilt
|
||||
}
|
||||
else
|
||||
{
|
||||
if((pathInfo.length() > 5 && !pathInfo.substring(0, 6).toLowerCase().equals("/cmis/")) && !pathInfo.equals("/discovery"))
|
||||
if(!pathInfo.substring(0, 6).toLowerCase().equals("/cmis/") && !pathInfo.equals("/discovery"))
|
||||
{
|
||||
// remove tenant
|
||||
int idx = pathInfo.indexOf('/', 1);
|
||||
|
@@ -30,22 +30,8 @@ import org.alfresco.rest.api.model.Aspect;
|
||||
import org.alfresco.rest.framework.resource.parameters.CollectionWithPagingInfo;
|
||||
import org.alfresco.rest.framework.resource.parameters.Parameters;
|
||||
|
||||
/**
|
||||
* Aspect API
|
||||
*/
|
||||
public interface Aspects
|
||||
{
|
||||
/**
|
||||
* Lists aspects
|
||||
* @param params
|
||||
* @return Collection of aspects
|
||||
*/
|
||||
CollectionWithPagingInfo<Aspect> listAspects(Parameters params);
|
||||
|
||||
/**
|
||||
* Gets an aspect by id
|
||||
* @param aspectId
|
||||
* @return an aspect
|
||||
*/
|
||||
Aspect getAspect(String aspectId);
|
||||
Aspect getAspectById(String aspectId);
|
||||
}
|
||||
|
@@ -30,24 +30,8 @@ import org.alfresco.rest.api.model.Type;
|
||||
import org.alfresco.rest.framework.resource.parameters.CollectionWithPagingInfo;
|
||||
import org.alfresco.rest.framework.resource.parameters.Parameters;
|
||||
|
||||
/**
|
||||
* Types API
|
||||
*/
|
||||
public interface Types
|
||||
{
|
||||
/**
|
||||
* Lists types
|
||||
*
|
||||
* @param params
|
||||
* @return Collection of types
|
||||
*/
|
||||
CollectionWithPagingInfo<Type> listTypes(Parameters params);
|
||||
|
||||
/**
|
||||
* Gets a type by id
|
||||
*
|
||||
* @param typeId
|
||||
* @return type
|
||||
*/
|
||||
Type getType(String typeId);
|
||||
Type getType(String aspectId);
|
||||
}
|
||||
|
@@ -61,6 +61,6 @@ public class AspectEntityResource implements EntityResourceAction.ReadById<Aspec
|
||||
@Override
|
||||
public Aspect readById(String id, Parameters parameters)
|
||||
{
|
||||
return aspects.getAspect(id);
|
||||
return aspects.getAspectById(id);
|
||||
}
|
||||
}
|
||||
|
@@ -26,25 +26,15 @@
|
||||
|
||||
package org.alfresco.rest.api.impl;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import org.alfresco.rest.api.ClassDefinitionMapper;
|
||||
import org.alfresco.rest.api.model.AssociationSource;
|
||||
import org.alfresco.rest.api.model.Association;
|
||||
import org.alfresco.rest.api.model.AbstractClass;
|
||||
import org.alfresco.rest.api.model.PropertyDefinition;
|
||||
import org.alfresco.rest.api.model.ClassDefinition;
|
||||
import org.alfresco.rest.framework.core.exceptions.InvalidArgumentException;
|
||||
import org.alfresco.rest.framework.resource.parameters.CollectionWithPagingInfo;
|
||||
import org.alfresco.rest.framework.resource.parameters.Paging;
|
||||
import org.alfresco.rest.framework.resource.parameters.where.Query;
|
||||
import org.alfresco.rest.framework.resource.parameters.where.QueryHelper;
|
||||
import org.alfresco.rest.workflow.api.impl.MapBasedQueryWalker;
|
||||
import org.alfresco.service.cmr.dictionary.AssociationDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.namespace.NamespacePrefixResolver;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.util.Arrays;
|
||||
@@ -52,36 +42,12 @@ import java.util.List;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.Map;
|
||||
import java.util.Collection;
|
||||
import java.util.ArrayList;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class AbstractClassImpl<T extends AbstractClass> {
|
||||
static String PARAM_MODEL_IDS = "modelId";
|
||||
static String PARAM_PARENT_IDS = "parentId";
|
||||
static String PARAM_MODEL_IDS = "modelIds";
|
||||
static String PARAM_PARENT_IDS = "parentIds";
|
||||
static String PARAM_NAMESPACE_URI = "namespaceUri";
|
||||
static String PARAM_INCLUDE_SUBASPECTS = "INCLUDESUBASPECTS";
|
||||
static String PARAM_INCLUDE_SUBTYPES = "INCLUDESUBTYPES";
|
||||
static String PARAM_INCLUDE_PROPERTIES = "properties";
|
||||
static String PARAM_INCLUDE_MANDATORY_ASPECTS = "mandatoryAspects";
|
||||
static String PARAM_INCLUDE_ASSOCIATIONS = "associations";
|
||||
static List<String> ALL_PROPERTIES = ImmutableList.of(PARAM_INCLUDE_PROPERTIES, PARAM_INCLUDE_MANDATORY_ASPECTS, PARAM_INCLUDE_ASSOCIATIONS);
|
||||
|
||||
private DictionaryService dictionaryService;
|
||||
private NamespacePrefixResolver namespaceService;
|
||||
private ClassDefinitionMapper classDefinitionMapper;
|
||||
|
||||
AbstractClassImpl(DictionaryService dictionaryService, NamespacePrefixResolver namespaceService, ClassDefinitionMapper classDefinitionMapper)
|
||||
{
|
||||
this.dictionaryService = dictionaryService;
|
||||
this.namespaceService = namespaceService;
|
||||
this.classDefinitionMapper = classDefinitionMapper;
|
||||
}
|
||||
|
||||
public CollectionWithPagingInfo<T> createPagedResult(List<T> list, Paging paging)
|
||||
{
|
||||
@@ -129,13 +95,7 @@ public class AbstractClassImpl<T extends AbstractClass> {
|
||||
{
|
||||
ClassQueryWalker propertyWalker = new ClassQueryWalker();
|
||||
QueryHelper.walk(queryParameters, propertyWalker);
|
||||
|
||||
return ModelApiFilter.builder()
|
||||
.withModelId(propertyWalker.getModelIds())
|
||||
.withParentIds(propertyWalker.getParentIds())
|
||||
.withMatchPrefix(propertyWalker.getMatchedPrefix())
|
||||
.withNotMatchPrefix(propertyWalker.getNotMatchedPrefix())
|
||||
.build();
|
||||
return new ModelApiFilter(propertyWalker.getModelIds(), propertyWalker.getParentIds(), propertyWalker.getMatchedPrefix(), propertyWalker.getNotMatchedPrefix());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -148,137 +108,13 @@ public class AbstractClassImpl<T extends AbstractClass> {
|
||||
}
|
||||
|
||||
listParam.stream()
|
||||
.filter(StringUtils::isBlank)
|
||||
.filter(String::isEmpty)
|
||||
.findAny()
|
||||
.ifPresent(qName -> {
|
||||
throw new IllegalArgumentException(StringUtils.capitalize(paramName) + " cannot be empty (i.e. '')");
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
protected Set<Pair<QName,Boolean>> parseModelIds(Set<String> modelIds, String apiSuffix)
|
||||
{
|
||||
return modelIds.stream().map(modelId ->
|
||||
{
|
||||
QName qName = null;
|
||||
boolean filterIncludeSubClass = false;
|
||||
|
||||
int idx = modelId.lastIndexOf(' ');
|
||||
if (idx > 0)
|
||||
{
|
||||
String suffix = modelId.substring(idx);
|
||||
if (suffix.equalsIgnoreCase(" " + apiSuffix))
|
||||
{
|
||||
filterIncludeSubClass = true;
|
||||
modelId = modelId.substring(0, idx);
|
||||
}
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
qName = QName.createQName(modelId, this.namespaceService);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new InvalidArgumentException(modelId + " isn't a valid QName. " + ex.getMessage());
|
||||
}
|
||||
|
||||
if (qName == null)
|
||||
throw new InvalidArgumentException(modelId + " isn't a valid QName. ");
|
||||
|
||||
return new Pair<>(qName, filterIncludeSubClass);
|
||||
}).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
|
||||
public T constructFromFilters(T abstractClass, org.alfresco.service.cmr.dictionary.ClassDefinition classDefinition, List<String> includes) {
|
||||
|
||||
if (includes != null && includes.contains(PARAM_INCLUDE_PROPERTIES))
|
||||
{
|
||||
List<PropertyDefinition> properties = Collections.emptyList();
|
||||
ClassDefinition _classDefinition = this.classDefinitionMapper.fromDictionaryClassDefinition(classDefinition, dictionaryService);
|
||||
if (_classDefinition.getProperties() != null)
|
||||
{
|
||||
properties = _classDefinition.getProperties();
|
||||
}
|
||||
abstractClass.setProperties(properties);
|
||||
}
|
||||
|
||||
if (includes != null && includes.contains(PARAM_INCLUDE_ASSOCIATIONS))
|
||||
{
|
||||
List<Association> associations = getAssociations(classDefinition.getAssociations());
|
||||
abstractClass.setAssociations(associations);
|
||||
}
|
||||
|
||||
if (includes != null && includes.contains(PARAM_INCLUDE_MANDATORY_ASPECTS))
|
||||
{
|
||||
if (classDefinition.getDefaultAspectNames() != null)
|
||||
{
|
||||
List<String> aspects = classDefinition.getDefaultAspectNames().stream().map(QName::toPrefixString).collect(Collectors.toList());
|
||||
abstractClass.setMandatoryAspects(aspects);
|
||||
}
|
||||
}
|
||||
|
||||
abstractClass.setIsContainer(classDefinition.isContainer());
|
||||
abstractClass.setIsArchive(classDefinition.getArchive());
|
||||
abstractClass.setIncludedInSupertypeQuery(classDefinition.getIncludedInSuperTypeQuery());
|
||||
return abstractClass;
|
||||
}
|
||||
|
||||
List<Association> getAssociations(Map<QName, AssociationDefinition> associationDefinitionMap)
|
||||
{
|
||||
Collection<AssociationDefinition> associationDefinitions = associationDefinitionMap.values();
|
||||
|
||||
if (associationDefinitions.size() == 0)
|
||||
return Collections.emptyList();
|
||||
|
||||
List<Association> associations = new ArrayList<Association>();
|
||||
|
||||
for (AssociationDefinition definition : associationDefinitions)
|
||||
{
|
||||
Association association = new Association();
|
||||
|
||||
association.setId(definition.getName().toPrefixString());
|
||||
association.setTitle(definition.getTitle());
|
||||
association.setDescription(definition.getDescription());
|
||||
association.setIsChild(definition.isChild());
|
||||
association.setIsProtected(definition.isProtected());
|
||||
|
||||
AssociationSource source = new AssociationSource();
|
||||
|
||||
String sourceRole = definition.getSourceRoleName() != null ? definition.getSourceRoleName().toPrefixString() : null;
|
||||
source.setRole(sourceRole);
|
||||
|
||||
String sourceClass = definition.getSourceClass() != null ? definition.getSourceClass().getName().toPrefixString() : null;
|
||||
source.setCls(sourceClass);
|
||||
|
||||
source.setIsMany(definition.isSourceMany());
|
||||
source.setIsMandatory(definition.isSourceMandatory());
|
||||
|
||||
AssociationSource target = new AssociationSource();
|
||||
String targetRole = definition.getTargetRoleName() != null ? definition.getTargetRoleName().toPrefixString() : null;
|
||||
target.setRole(targetRole);
|
||||
|
||||
String targetClass = definition.getTargetClass() != null ? definition.getTargetClass().getName().toPrefixString() : null;
|
||||
target.setCls(targetClass);
|
||||
|
||||
target.setIsMany(definition.isTargetMany());
|
||||
target.setIsMandatory(definition.isTargetMandatory());
|
||||
target.setIsMandatoryEnforced(definition.isTargetMandatoryEnforced());
|
||||
|
||||
association.setSource(source);
|
||||
association.setTarget(target);
|
||||
associations.add(association);
|
||||
}
|
||||
|
||||
return associations;
|
||||
}
|
||||
|
||||
public static <T> Predicate<T> distinctByKey(Function<? super T, ?> keyExtractor) {
|
||||
Map<Object, Boolean> seen = new ConcurrentHashMap<>();
|
||||
return t -> seen.putIfAbsent(keyExtractor.apply(t), Boolean.TRUE) == null;
|
||||
}
|
||||
|
||||
public static class ClassQueryWalker extends MapBasedQueryWalker
|
||||
{
|
||||
private Set<String> modelIds = null;
|
||||
@@ -351,8 +187,12 @@ public class AbstractClassImpl<T extends AbstractClass> {
|
||||
private String matchedPrefix;
|
||||
private String notMatchedPrefix;
|
||||
|
||||
public ModelApiFilter()
|
||||
public ModelApiFilter(Set<String> modelIds, Set<String> parentIds, String matchedPrefix, String notMatchedPrefix)
|
||||
{
|
||||
this.modelIds = modelIds;
|
||||
this.parentIds = parentIds;
|
||||
this.matchedPrefix = matchedPrefix;
|
||||
this.notMatchedPrefix = notMatchedPrefix;
|
||||
}
|
||||
|
||||
public Set<String> getModelIds()
|
||||
@@ -374,52 +214,5 @@ public class AbstractClassImpl<T extends AbstractClass> {
|
||||
{
|
||||
return parentIds;
|
||||
}
|
||||
|
||||
public static ModelApiFilterBuilder builder()
|
||||
{
|
||||
return new ModelApiFilterBuilder();
|
||||
}
|
||||
|
||||
public static class ModelApiFilterBuilder
|
||||
{
|
||||
private Set<String> modelIds;
|
||||
private Set<String> parentIds;
|
||||
private String matchedPrefix;
|
||||
private String notMatchedPrefix;
|
||||
|
||||
public ModelApiFilterBuilder withModelId(Set<String> modelIds)
|
||||
{
|
||||
this.modelIds = modelIds;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ModelApiFilterBuilder withParentIds(Set<String> parentIds)
|
||||
{
|
||||
this.parentIds = parentIds;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ModelApiFilterBuilder withMatchPrefix(String matchedPrefix)
|
||||
{
|
||||
this.matchedPrefix = matchedPrefix;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ModelApiFilterBuilder withNotMatchPrefix(String notMatchedPrefix)
|
||||
{
|
||||
this.notMatchedPrefix = notMatchedPrefix;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ModelApiFilter build()
|
||||
{
|
||||
ModelApiFilter modelApiFilter = new ModelApiFilter();
|
||||
modelApiFilter.modelIds = modelIds;
|
||||
modelApiFilter.parentIds = parentIds;
|
||||
modelApiFilter.matchedPrefix = matchedPrefix;
|
||||
modelApiFilter.notMatchedPrefix = notMatchedPrefix;
|
||||
return modelApiFilter;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -26,10 +26,10 @@
|
||||
|
||||
package org.alfresco.rest.api.impl;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.rest.api.Aspects;
|
||||
import org.alfresco.rest.api.ClassDefinitionMapper;
|
||||
import org.alfresco.rest.api.model.Aspect;
|
||||
import org.alfresco.rest.api.model.PropertyDefinition;
|
||||
import org.alfresco.rest.framework.core.exceptions.EntityNotFoundException;
|
||||
import org.alfresco.rest.framework.core.exceptions.InvalidArgumentException;
|
||||
import org.alfresco.rest.framework.resource.parameters.CollectionWithPagingInfo;
|
||||
@@ -41,12 +41,10 @@ import org.alfresco.service.cmr.dictionary.ModelDefinition;
|
||||
import org.alfresco.service.namespace.NamespaceException;
|
||||
import org.alfresco.service.namespace.NamespacePrefixResolver;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.alfresco.util.PropertyCheck;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.Collection;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@@ -78,44 +76,37 @@ public class AspectsImpl extends AbstractClassImpl<Aspect> implements Aspects
|
||||
PropertyCheck.mandatory(this, "classDefinitionMapper", classDefinitionMapper);
|
||||
}
|
||||
|
||||
AspectsImpl(DictionaryService dictionaryService, NamespacePrefixResolver namespaceService, ClassDefinitionMapper classDefinitionMapper)
|
||||
{
|
||||
super(dictionaryService, namespaceService, classDefinitionMapper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionWithPagingInfo<Aspect> listAspects(Parameters params)
|
||||
{
|
||||
Paging paging = params.getPaging();
|
||||
ModelApiFilter query = getQuery(params.getQuery());
|
||||
Stream<QName> aspectStream = null;
|
||||
Stream<QName> aspectList = null;
|
||||
|
||||
if (query != null && query.getModelIds() != null)
|
||||
{
|
||||
validateListParam(query.getModelIds(), PARAM_MODEL_IDS);
|
||||
Set<Pair<QName, Boolean>> modelsFilter = parseModelIds(query.getModelIds(), PARAM_INCLUDE_SUBASPECTS);
|
||||
aspectStream = modelsFilter.stream().map(this::getModelAspects).flatMap(Collection::stream);
|
||||
aspectList = query.getModelIds().parallelStream().map(this::getModelAspects).flatMap(Collection::parallelStream);
|
||||
}
|
||||
else if (query != null && query.getParentIds() != null)
|
||||
{
|
||||
validateListParam(query.getParentIds(), PARAM_PARENT_IDS);
|
||||
aspectStream = query.getParentIds().stream().map(this::getChildAspects).flatMap(Collection::stream);
|
||||
aspectList = query.getParentIds().parallelStream().map(this::getChildAspects).flatMap(Collection::parallelStream);
|
||||
}
|
||||
else
|
||||
{
|
||||
aspectStream = this.dictionaryService.getAllAspects().stream();
|
||||
aspectList = this.dictionaryService.getAllAspects().parallelStream();
|
||||
}
|
||||
|
||||
List<Aspect> allAspects = aspectStream.filter((qName) -> filterByNamespace(query, qName))
|
||||
.filter(distinctByKey(QName::getPrefixString))
|
||||
.map((qName) -> this.convertToAspect(dictionaryService.getAspect(qName), params.getInclude()))
|
||||
List<Aspect> allAspects = aspectList.filter((qName) -> filterByNamespace(query, qName))
|
||||
.map((qName) -> this.convertToAspect(dictionaryService.getAspect(qName)))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
return createPagedResult(allAspects, paging);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Aspect getAspect(String aspectId)
|
||||
public Aspect getAspectById(String aspectId)
|
||||
{
|
||||
if (aspectId == null)
|
||||
throw new InvalidArgumentException("Invalid parameter: unknown scheme specified");
|
||||
@@ -134,50 +125,32 @@ public class AspectsImpl extends AbstractClassImpl<Aspect> implements Aspects
|
||||
if (aspectDefinition == null)
|
||||
throw new EntityNotFoundException(aspectId);
|
||||
|
||||
return this.convertToAspect(aspectDefinition, ALL_PROPERTIES);
|
||||
return this.convertToAspect(aspectDefinition);
|
||||
}
|
||||
|
||||
public Aspect convertToAspect(AspectDefinition aspectDefinition, List<String> includes)
|
||||
public Aspect convertToAspect(AspectDefinition aspectDefinition)
|
||||
{
|
||||
try
|
||||
{
|
||||
Aspect aspect = new Aspect(aspectDefinition, dictionaryService);
|
||||
constructFromFilters(aspect, aspectDefinition, includes);
|
||||
return aspect;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Failed to parse Aspect: " + aspectDefinition.getName() + " . " + ex.getMessage());
|
||||
}
|
||||
List<PropertyDefinition> properties = this.classDefinitionMapper.fromDictionaryClassDefinition(aspectDefinition, dictionaryService).getProperties();
|
||||
return new Aspect(aspectDefinition, dictionaryService, properties);
|
||||
}
|
||||
|
||||
private Collection<QName> getModelAspects(Pair<QName,Boolean> model)
|
||||
private Collection<QName> getModelAspects(String modelId)
|
||||
{
|
||||
ModelDefinition modelDefinition = null;
|
||||
|
||||
if (modelId == null)
|
||||
throw new InvalidArgumentException("modelId is null");
|
||||
|
||||
try
|
||||
{
|
||||
modelDefinition = this.dictionaryService.getModel(model.getFirst());
|
||||
modelDefinition = this.dictionaryService.getModel(QName.createQName(modelId, this.namespaceService));
|
||||
}
|
||||
catch (Exception exception)
|
||||
catch (NamespaceException exception)
|
||||
{
|
||||
throw new InvalidArgumentException(exception.getMessage());
|
||||
}
|
||||
|
||||
if (modelDefinition == null)
|
||||
throw new EntityNotFoundException("model");
|
||||
|
||||
Collection<QName> aspects = this.dictionaryService.getAspects(modelDefinition.getName());
|
||||
|
||||
if (!model.getSecond()) // look for model aspects alone
|
||||
return aspects;
|
||||
|
||||
Stream<QName> aspectStream = aspects.stream();
|
||||
Stream<QName> childrenStream = aspects.stream()
|
||||
.map(aspect -> this.dictionaryService.getSubAspects(aspect, false))
|
||||
.flatMap(Collection::stream);
|
||||
|
||||
return Stream.concat(aspectStream, childrenStream).collect(Collectors.toList());
|
||||
return this.dictionaryService.getAspects(modelDefinition.getName());
|
||||
}
|
||||
|
||||
private Collection<QName> getChildAspects(String aspectId)
|
||||
|
@@ -26,10 +26,10 @@
|
||||
|
||||
package org.alfresco.rest.api.impl;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.rest.api.ClassDefinitionMapper;
|
||||
import org.alfresco.rest.api.Types;
|
||||
import org.alfresco.rest.api.ClassDefinitionMapper;
|
||||
import org.alfresco.rest.api.model.Type;
|
||||
import org.alfresco.rest.api.model.PropertyDefinition;
|
||||
import org.alfresco.rest.framework.core.exceptions.EntityNotFoundException;
|
||||
import org.alfresco.rest.framework.core.exceptions.InvalidArgumentException;
|
||||
import org.alfresco.rest.framework.resource.parameters.CollectionWithPagingInfo;
|
||||
@@ -41,12 +41,10 @@ import org.alfresco.service.cmr.dictionary.TypeDefinition;
|
||||
import org.alfresco.service.namespace.NamespaceException;
|
||||
import org.alfresco.service.namespace.NamespacePrefixResolver;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.alfresco.util.PropertyCheck;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.Collection;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@@ -78,40 +76,32 @@ public class TypesImpl extends AbstractClassImpl<Type> implements Types
|
||||
PropertyCheck.mandatory(this, "classDefinitionMapper", classDefinitionMapper);
|
||||
}
|
||||
|
||||
TypesImpl(DictionaryService dictionaryService, NamespacePrefixResolver namespaceService, ClassDefinitionMapper classDefinitionMapper)
|
||||
{
|
||||
super(dictionaryService, namespaceService, classDefinitionMapper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionWithPagingInfo<Type> listTypes(Parameters params)
|
||||
{
|
||||
Paging paging = params.getPaging();
|
||||
ModelApiFilter query = getQuery(params.getQuery());
|
||||
Stream<QName> typeStream = null;
|
||||
Stream<QName> typeList = null;
|
||||
|
||||
if (query != null && query.getModelIds() != null)
|
||||
{
|
||||
validateListParam(query.getModelIds(), PARAM_MODEL_IDS);
|
||||
Set<Pair<QName, Boolean>> modelsFilter = parseModelIds(query.getModelIds(), PARAM_INCLUDE_SUBTYPES);
|
||||
typeStream = modelsFilter.stream().map(this::getModelTypes).flatMap(Collection::stream);
|
||||
typeList = query.getModelIds().parallelStream().map(this::getModelTypes).flatMap(Collection::parallelStream);
|
||||
}
|
||||
else if (query != null && query.getParentIds() != null)
|
||||
{
|
||||
validateListParam(query.getParentIds(), PARAM_PARENT_IDS);
|
||||
typeStream = query.getParentIds().stream().map(this::getChildTypes).flatMap(Collection::stream);
|
||||
typeList = query.getParentIds().parallelStream().map(this::getChildTypes).flatMap(Collection::parallelStream);
|
||||
}
|
||||
else
|
||||
{
|
||||
typeStream = this.dictionaryService.getAllTypes().stream();
|
||||
typeList = this.dictionaryService.getAllTypes().parallelStream();
|
||||
}
|
||||
|
||||
List<Type> allTypes = typeStream
|
||||
.filter((qName) -> filterByNamespace(query, qName))
|
||||
.filter(distinctByKey(QName::getPrefixString))
|
||||
.map((qName) -> this.convertToType(dictionaryService.getType(qName), params.getInclude()))
|
||||
List<Type> allTypes = typeList.filter((qName) -> filterByNamespace(query, qName))
|
||||
.map((qName) -> this.convertToType(dictionaryService.getType(qName)))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
return createPagedResult(allTypes, paging);
|
||||
}
|
||||
|
||||
@@ -135,50 +125,32 @@ public class TypesImpl extends AbstractClassImpl<Type> implements Types
|
||||
if (typeDefinition == null)
|
||||
throw new EntityNotFoundException(typeId);
|
||||
|
||||
return this.convertToType(typeDefinition, ALL_PROPERTIES);
|
||||
return this.convertToType(typeDefinition);
|
||||
}
|
||||
|
||||
public Type convertToType(TypeDefinition typeDefinition, List<String> includes)
|
||||
public Type convertToType(TypeDefinition typeDefinition)
|
||||
{
|
||||
try
|
||||
{
|
||||
Type type = new Type(typeDefinition, dictionaryService);
|
||||
constructFromFilters(type, typeDefinition, includes);
|
||||
return type;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Failed to parse Type: " + typeDefinition.getName() + " . " + ex.getMessage());
|
||||
}
|
||||
List<PropertyDefinition> properties = this.classDefinitionMapper.fromDictionaryClassDefinition(typeDefinition, dictionaryService).getProperties();
|
||||
return new Type(typeDefinition, dictionaryService, properties);
|
||||
}
|
||||
|
||||
private Collection<QName> getModelTypes(Pair<QName,Boolean> model)
|
||||
private Collection<QName> getModelTypes(String modelId)
|
||||
{
|
||||
ModelDefinition modelDefinition = null;
|
||||
|
||||
if (modelId == null)
|
||||
throw new InvalidArgumentException("modelId is null");
|
||||
|
||||
try
|
||||
{
|
||||
modelDefinition = this.dictionaryService.getModel(model.getFirst());
|
||||
modelDefinition = this.dictionaryService.getModel(QName.createQName(modelId, this.namespaceService));
|
||||
}
|
||||
catch (Exception exception)
|
||||
catch (NamespaceException exception)
|
||||
{
|
||||
throw new InvalidArgumentException(exception.getMessage());
|
||||
}
|
||||
|
||||
if (modelDefinition == null)
|
||||
throw new EntityNotFoundException("model");
|
||||
|
||||
Collection<QName> aspects = this.dictionaryService.getTypes(modelDefinition.getName());
|
||||
|
||||
if (!model.getSecond()) //look for model types alone
|
||||
return aspects;
|
||||
|
||||
Stream<QName> aspectStream = aspects.stream();
|
||||
Stream<QName> childrenStream = aspects.stream()
|
||||
.map(aspect -> this.dictionaryService.getSubTypes(aspect, false))
|
||||
.flatMap(Collection::stream);
|
||||
|
||||
return Stream.concat(aspectStream, childrenStream).collect(Collectors.toList());
|
||||
return this.dictionaryService.getTypes(modelDefinition.getName());
|
||||
}
|
||||
|
||||
private Collection<QName> getChildTypes(String typeId)
|
||||
|
@@ -26,26 +26,18 @@
|
||||
|
||||
package org.alfresco.rest.api.model;
|
||||
|
||||
import org.alfresco.service.cmr.dictionary.ModelDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.NamespaceDefinition;
|
||||
import org.alfresco.service.cmr.i18n.MessageLookup;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public abstract class AbstractClass extends ClassDefinition implements Comparable<AbstractClass>
|
||||
{
|
||||
protected String id;
|
||||
protected String title;
|
||||
protected String description;
|
||||
protected String parentId;
|
||||
protected Boolean isContainer = null;
|
||||
protected Boolean isArchive = null;
|
||||
protected Boolean includedInSupertypeQuery = null;
|
||||
protected List<String> mandatoryAspects = null;
|
||||
protected List<Association> associations = null;
|
||||
protected Model model;
|
||||
String id;
|
||||
String title;
|
||||
String description;
|
||||
String parentId;
|
||||
|
||||
public String getId()
|
||||
{
|
||||
@@ -87,64 +79,13 @@ public abstract class AbstractClass extends ClassDefinition implements Comparabl
|
||||
this.parentId = parentId;
|
||||
}
|
||||
|
||||
public Model getModel()
|
||||
<T> List<T> setList(List<T> sourceList)
|
||||
{
|
||||
return model;
|
||||
}
|
||||
|
||||
public void setModel(Model model)
|
||||
{
|
||||
this.model = model;
|
||||
}
|
||||
|
||||
public Boolean getIsContainer()
|
||||
{
|
||||
return isContainer;
|
||||
}
|
||||
|
||||
public void setIsContainer(Boolean isContainer)
|
||||
{
|
||||
this.isContainer = isContainer;
|
||||
}
|
||||
|
||||
public Boolean getIsArchive()
|
||||
{
|
||||
return isArchive;
|
||||
}
|
||||
|
||||
public void setIsArchive(Boolean isArchive)
|
||||
{
|
||||
this.isArchive = isArchive;
|
||||
}
|
||||
|
||||
public Boolean getIncludedInSupertypeQuery()
|
||||
{
|
||||
return includedInSupertypeQuery;
|
||||
}
|
||||
|
||||
public void setIncludedInSupertypeQuery(Boolean includedInSupertypeQuery)
|
||||
{
|
||||
this.includedInSupertypeQuery = includedInSupertypeQuery;
|
||||
}
|
||||
|
||||
public List<String> getMandatoryAspects()
|
||||
{
|
||||
return mandatoryAspects;
|
||||
}
|
||||
|
||||
public void setMandatoryAspects(List<String> mandatoryAspects)
|
||||
{
|
||||
this.mandatoryAspects = mandatoryAspects;
|
||||
}
|
||||
|
||||
public List<Association> getAssociations()
|
||||
{
|
||||
return associations;
|
||||
}
|
||||
|
||||
public void setAssociations(List<Association> associations)
|
||||
{
|
||||
this.associations = associations;
|
||||
if (sourceList == null)
|
||||
{
|
||||
return Collections.<T> emptyList();
|
||||
}
|
||||
return new ArrayList<>(sourceList);
|
||||
}
|
||||
|
||||
String getParentNameAsString(QName parentQName)
|
||||
@@ -156,27 +97,13 @@ public abstract class AbstractClass extends ClassDefinition implements Comparabl
|
||||
return null;
|
||||
}
|
||||
|
||||
Model getModelInfo(org.alfresco.service.cmr.dictionary.ClassDefinition classDefinition, MessageLookup messageLookup)
|
||||
{
|
||||
final ModelDefinition modelDefinition = classDefinition.getModel();
|
||||
final String prefix = classDefinition.getName().toPrefixString().split(":")[0];
|
||||
|
||||
final NamespaceDefinition namespaceDefinition = modelDefinition.getNamespaces().stream()
|
||||
.filter(definition -> definition.getPrefix().equals(prefix))
|
||||
.findFirst()
|
||||
.get();
|
||||
|
||||
final String modelId = modelDefinition.getName().toPrefixString();
|
||||
final String author = modelDefinition.getAuthor();
|
||||
final String description = modelDefinition.getDescription(messageLookup);
|
||||
|
||||
return new Model(modelId, author, description, namespaceDefinition.getUri(), namespaceDefinition.getPrefix());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode()
|
||||
{
|
||||
return Objects.hash(id, title, description, parentId, properties, isContainer, isArchive, includedInSupertypeQuery, mandatoryAspects, associations, model);
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((this.id == null) ? 0 : this.id.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -29,19 +29,21 @@ package org.alfresco.rest.api.model;
|
||||
import org.alfresco.service.cmr.dictionary.AspectDefinition;
|
||||
import org.alfresco.service.cmr.i18n.MessageLookup;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class Aspect extends AbstractClass
|
||||
{
|
||||
public Aspect()
|
||||
{
|
||||
}
|
||||
|
||||
public Aspect(AspectDefinition aspectDefinition, MessageLookup messageLookup)
|
||||
public Aspect(AspectDefinition aspectDefinition, MessageLookup messageLookup, List<PropertyDefinition> properties)
|
||||
{
|
||||
this.id = aspectDefinition.getName().toPrefixString();
|
||||
this.title = aspectDefinition.getTitle(messageLookup);
|
||||
this.description = aspectDefinition.getDescription(messageLookup);
|
||||
this.parentId = getParentNameAsString(aspectDefinition.getParentName());
|
||||
this.model = getModelInfo(aspectDefinition, messageLookup);
|
||||
this.properties = setList(properties);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -53,12 +55,6 @@ public class Aspect extends AbstractClass
|
||||
.append(", description=").append(this.description)
|
||||
.append(", parentId=").append(parentId)
|
||||
.append(", properties=").append(properties)
|
||||
.append(", mandatoryAspects=").append(mandatoryAspects)
|
||||
.append(", isContainer=").append(isContainer)
|
||||
.append(", isArchive=").append(isArchive)
|
||||
.append(", associations=").append(associations)
|
||||
.append(", model=").append(model)
|
||||
.append(", includedInSupertypeQuery=").append(includedInSupertypeQuery)
|
||||
.append(']');
|
||||
return builder.toString();
|
||||
}
|
||||
|
@@ -1,158 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
package org.alfresco.rest.api.model;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class Association
|
||||
{
|
||||
private String id;
|
||||
private String title;
|
||||
private String description;
|
||||
private Boolean isChild;
|
||||
private Boolean isProtected;
|
||||
private AssociationSource source = null;
|
||||
private AssociationSource target = null;
|
||||
|
||||
public Association()
|
||||
{
|
||||
}
|
||||
|
||||
public Association(String id, String title, String description, Boolean isChild, Boolean isProtected, AssociationSource source, AssociationSource target)
|
||||
{
|
||||
this.id = id;
|
||||
this.title = title;
|
||||
this.description = description;
|
||||
this.isChild = isChild;
|
||||
this.isProtected = isProtected;
|
||||
this.source = source;
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
public String getId()
|
||||
{
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id)
|
||||
{
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getTitle()
|
||||
{
|
||||
return title;
|
||||
}
|
||||
|
||||
public void setTitle(String title)
|
||||
{
|
||||
this.title = title;
|
||||
}
|
||||
|
||||
public String getDescription()
|
||||
{
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description)
|
||||
{
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public Boolean getIsChild()
|
||||
{
|
||||
return isChild;
|
||||
}
|
||||
|
||||
public void setIsChild(Boolean isChild)
|
||||
{
|
||||
this.isChild = isChild;
|
||||
}
|
||||
|
||||
public Boolean getIsProtected()
|
||||
{
|
||||
return isProtected;
|
||||
}
|
||||
|
||||
public void setIsProtected(Boolean isProtected)
|
||||
{
|
||||
this.isProtected = isProtected;
|
||||
}
|
||||
|
||||
public AssociationSource getSource()
|
||||
{
|
||||
return source;
|
||||
}
|
||||
|
||||
public void setSource(AssociationSource source)
|
||||
{
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
public AssociationSource getTarget()
|
||||
{
|
||||
return target;
|
||||
}
|
||||
|
||||
public void setTarget(AssociationSource target)
|
||||
{
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj)
|
||||
{
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
Association other = (Association) obj;
|
||||
return Objects.equals(id, other.getId()) &&
|
||||
Objects.equals(title, other.getTitle()) &&
|
||||
Objects.equals(description, other.getDescription()) &&
|
||||
Objects.equals(isChild, other.getIsChild()) &&
|
||||
Objects.equals(isProtected, other.getIsProtected()) &&
|
||||
Objects.equals(source, other.getSource()) &&
|
||||
Objects.equals(target, other.getTarget());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder builder = new StringBuilder(512);
|
||||
builder.append("Association [id=").append(this.id)
|
||||
.append(", title=").append(this.title)
|
||||
.append(", description=").append(this.description)
|
||||
.append(", isChild=").append(isChild)
|
||||
.append(", isProtected=").append(isProtected)
|
||||
.append(", source=").append(source)
|
||||
.append(", target=").append(target)
|
||||
.append(']');
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
@@ -1,129 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
package org.alfresco.rest.api.model;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class AssociationSource {
|
||||
private String role = null;
|
||||
private String cls = null;
|
||||
private Boolean isMany = null;
|
||||
private Boolean isMandatory = null;
|
||||
private Boolean isMandatoryEnforced = null;
|
||||
|
||||
public AssociationSource()
|
||||
{
|
||||
}
|
||||
|
||||
public AssociationSource(String role, String cls, Boolean isMany, Boolean isMandatory, Boolean isMandatoryEnforced)
|
||||
{
|
||||
this.role = role;
|
||||
this.cls = cls;
|
||||
this.isMany = isMany;
|
||||
this.isMandatory = isMandatory;
|
||||
this.isMandatoryEnforced = isMandatoryEnforced;
|
||||
}
|
||||
|
||||
public String getRole()
|
||||
{
|
||||
return role;
|
||||
}
|
||||
|
||||
public void setRole(String role)
|
||||
{
|
||||
this.role = role;
|
||||
}
|
||||
|
||||
public String getCls()
|
||||
{
|
||||
return cls;
|
||||
}
|
||||
|
||||
public void setCls(String cls)
|
||||
{
|
||||
this.cls = cls;
|
||||
}
|
||||
|
||||
public Boolean getIsMany()
|
||||
{
|
||||
return isMany;
|
||||
}
|
||||
|
||||
public void setIsMany(Boolean isMany)
|
||||
{
|
||||
this.isMany = isMany;
|
||||
}
|
||||
|
||||
public Boolean getIsMandatory()
|
||||
{
|
||||
return isMandatory;
|
||||
}
|
||||
|
||||
public void setIsMandatory(Boolean isMandatory)
|
||||
{
|
||||
this.isMandatory = isMandatory;
|
||||
}
|
||||
|
||||
public Boolean getIsMandatoryEnforced()
|
||||
{
|
||||
return isMandatoryEnforced;
|
||||
}
|
||||
|
||||
public void setIsMandatoryEnforced(Boolean isMandatoryEnforced)
|
||||
{
|
||||
this.isMandatoryEnforced = isMandatoryEnforced;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj)
|
||||
{
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
AssociationSource other = (AssociationSource) obj;
|
||||
return Objects.equals(role, other.getRole()) &&
|
||||
Objects.equals(cls, other.getCls()) &&
|
||||
Objects.equals(isMany, other.getIsMany()) &&
|
||||
Objects.equals(isMandatory, other.getIsMandatory()) &&
|
||||
Objects.equals(isMandatoryEnforced, other.getIsMandatoryEnforced());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder builder = new StringBuilder(512);
|
||||
builder.append("AssociationSource [role=").append(this.role)
|
||||
.append(", cls=").append(this.cls)
|
||||
.append(", isMany=").append(this.isMany)
|
||||
.append(", isMandatory=").append(isMandatory)
|
||||
.append(", isMandatoryEnforced=").append(isMandatoryEnforced)
|
||||
.append(']');
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
@@ -1,105 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
package org.alfresco.rest.api.model;
|
||||
|
||||
public class Model implements Comparable<Model>
|
||||
{
|
||||
private String id;
|
||||
private String author;
|
||||
private String description;
|
||||
private String namespaceUri;
|
||||
private String namespacePrefix;
|
||||
|
||||
public Model()
|
||||
{
|
||||
}
|
||||
|
||||
public Model(String name, String author, String description, String namespaceUri, String namespacePrefix)
|
||||
{
|
||||
this.id = name;
|
||||
this.author = author;
|
||||
this.description = description;
|
||||
this.namespaceUri = namespaceUri;
|
||||
this.namespacePrefix = namespacePrefix;
|
||||
}
|
||||
|
||||
public String getId()
|
||||
{
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id)
|
||||
{
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getAuthor()
|
||||
{
|
||||
return author;
|
||||
}
|
||||
|
||||
public void setAuthor(String author)
|
||||
{
|
||||
this.author = author;
|
||||
}
|
||||
|
||||
public String getDescription()
|
||||
{
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description)
|
||||
{
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getNamespaceUri()
|
||||
{
|
||||
return namespaceUri;
|
||||
}
|
||||
|
||||
public void setNamespaceUri(String namespaceUri)
|
||||
{
|
||||
this.namespaceUri = namespaceUri;
|
||||
}
|
||||
|
||||
public String getNamespacePrefix()
|
||||
{
|
||||
return namespacePrefix;
|
||||
}
|
||||
|
||||
public void setNamespacePrefix(String namespacePrefix)
|
||||
{
|
||||
this.namespacePrefix = namespacePrefix;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(Model model)
|
||||
{
|
||||
return this.id.compareTo(model.getId());
|
||||
}
|
||||
}
|
@@ -29,19 +29,21 @@ package org.alfresco.rest.api.model;
|
||||
import org.alfresco.service.cmr.dictionary.TypeDefinition;
|
||||
import org.alfresco.service.cmr.i18n.MessageLookup;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class Type extends AbstractClass
|
||||
{
|
||||
public Type()
|
||||
{
|
||||
}
|
||||
|
||||
public Type(TypeDefinition typeDefinition, MessageLookup messageLookup)
|
||||
public Type(TypeDefinition typeDefinition, MessageLookup messageLookup, List<PropertyDefinition> properties)
|
||||
{
|
||||
this.id = typeDefinition.getName().toPrefixString();
|
||||
this.title = typeDefinition.getTitle(messageLookup);
|
||||
this.description = typeDefinition.getDescription(messageLookup);
|
||||
this.parentId = getParentNameAsString(typeDefinition.getParentName());
|
||||
this.model = getModelInfo(typeDefinition, messageLookup);
|
||||
this.properties = setList(properties);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -53,12 +55,6 @@ public class Type extends AbstractClass
|
||||
.append(", description=").append(this.description)
|
||||
.append(", parentId=").append(parentId)
|
||||
.append(", properties=").append(properties)
|
||||
.append(", mandatoryAspects=").append(mandatoryAspects)
|
||||
.append(", isContainer=").append(isContainer)
|
||||
.append(", isArchive=").append(isArchive)
|
||||
.append(", associations=").append(associations)
|
||||
.append(", model=").append(model)
|
||||
.append(", includedInSupertypeQuery=").append(includedInSupertypeQuery)
|
||||
.append(']');
|
||||
return builder.toString();
|
||||
}
|
||||
|
@@ -25,6 +25,12 @@
|
||||
*/
|
||||
package org.alfresco.rest.api.search;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.repo.search.impl.querymodel.impl.db.DBStats;
|
||||
import org.alfresco.repo.search.impl.querymodel.impl.db.SingleTaskRestartableWatch;
|
||||
import org.alfresco.rest.api.model.Node;
|
||||
import org.alfresco.rest.api.search.context.SearchRequestContext;
|
||||
import org.alfresco.rest.api.search.impl.ResultMapper;
|
||||
@@ -45,14 +51,14 @@ import org.alfresco.service.cmr.search.SearchParameters;
|
||||
import org.alfresco.service.cmr.search.SearchService;
|
||||
import org.alfresco.util.ParameterCheck;
|
||||
import org.alfresco.util.PropertyCheck;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.extensions.webscripts.AbstractWebScript;
|
||||
import org.springframework.extensions.webscripts.WebScriptRequest;
|
||||
import org.springframework.extensions.webscripts.WebScriptResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.springframework.util.StopWatch;
|
||||
import org.springframework.util.StopWatch.TaskInfo;
|
||||
|
||||
/**
|
||||
* An implementation of the {{baseUrl}}/{{networkId}}/public/search/versions/1/search endpoint
|
||||
@@ -62,12 +68,15 @@ import java.util.List;
|
||||
public class SearchApiWebscript extends AbstractWebScript implements RecognizedParamsExtractor, RequestReader, ResponseWriter,
|
||||
InitializingBean
|
||||
{
|
||||
protected static final Log logger = LogFactory.getLog(SearchApiWebscript.class);
|
||||
|
||||
private ServiceRegistry serviceRegistry;
|
||||
private SearchService searchService;
|
||||
private SearchMapper searchMapper;
|
||||
private ResultMapper resultMapper;
|
||||
protected ApiAssistant assistant;
|
||||
protected ResourceWebScriptHelper helper;
|
||||
private boolean statsEnabled;
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet()
|
||||
@@ -82,6 +91,7 @@ public class SearchApiWebscript extends AbstractWebScript implements RecognizedP
|
||||
@Override
|
||||
public void execute(WebScriptRequest webScriptRequest, WebScriptResponse webScriptResponse) throws IOException
|
||||
{
|
||||
StopWatch apiStopWatch = new StopWatch();
|
||||
try {
|
||||
//Turn JSON into a Java object respresentation
|
||||
SearchQuery searchQuery = extractJsonContent(webScriptRequest, assistant.getJsonHelper(), SearchQuery.class);
|
||||
@@ -96,12 +106,43 @@ public class SearchApiWebscript extends AbstractWebScript implements RecognizedP
|
||||
SearchParameters searchParams = searchMapper.toSearchParameters(params, searchQuery, searchRequestContext);
|
||||
|
||||
//Call searchService
|
||||
apiStopWatch.start("nodes");
|
||||
ResultSet results = searchService.query(searchParams);
|
||||
apiStopWatch.stop();
|
||||
|
||||
//Turn solr results into JSON
|
||||
apiStopWatch.start("props");
|
||||
CollectionWithPagingInfo<Node> resultJson = resultMapper.toCollectionWithPagingInfo(params, searchRequestContext, searchQuery, results);
|
||||
|
||||
//Post-process the request and pass in params, eg. params.getFilter()
|
||||
Object toRender = helper.processAdditionsToTheResponse(null, null, null, params, resultJson);
|
||||
apiStopWatch.stop();
|
||||
|
||||
// store execution stats in a special header if enabled
|
||||
if (statsEnabled)
|
||||
{
|
||||
// store execution time in a special header
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
sb.append("api={");
|
||||
sb.append("tot=").append(apiStopWatch.getTotalTimeMillis()).append("ms,");
|
||||
addStopWatchStats(sb, apiStopWatch);
|
||||
sb.append("}; ");
|
||||
|
||||
sb.append("db={");
|
||||
addStopWatchStats(sb, DBStats.queryStopWatch());
|
||||
sb.append("}; ");
|
||||
|
||||
sb.append("query={");
|
||||
addStopWatchStats(sb, DBStats.handlerStopWatch());
|
||||
sb.append(",");
|
||||
addStopWatchStats(sb, DBStats.aclReadStopWatch());
|
||||
sb.append(",");
|
||||
addStopWatchStats(sb, DBStats.aclOwnerStopWatch());
|
||||
sb.append("}");
|
||||
|
||||
webScriptResponse.addHeader("X-Response-Stats", sb.toString());
|
||||
}
|
||||
|
||||
//Write response
|
||||
setResponse(webScriptResponse, DEFAULT_SUCCESS);
|
||||
@@ -112,6 +153,44 @@ public class SearchApiWebscript extends AbstractWebScript implements RecognizedP
|
||||
}
|
||||
}
|
||||
|
||||
private void addStopWatchStats(StringBuilder sb, StopWatch watch)
|
||||
{
|
||||
boolean first = true;
|
||||
|
||||
for (TaskInfo task : watch.getTaskInfo())
|
||||
{
|
||||
if (first)
|
||||
{
|
||||
first = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
sb.append(",");
|
||||
}
|
||||
|
||||
sb.append(task.getTaskName())
|
||||
.append("=")
|
||||
.append(task.getTimeMillis())
|
||||
.append("ms");
|
||||
|
||||
int pc = Math.round(100 * task.getTimeNanos() / watch.getTotalTimeNanos());
|
||||
sb.append("(")
|
||||
.append(pc).append("%")
|
||||
.append(")");
|
||||
}
|
||||
}
|
||||
|
||||
private void addStopWatchStats(StringBuilder sb, SingleTaskRestartableWatch watch)
|
||||
{
|
||||
long decimillis = (watch.getTotalTimeMicros()+5)/100;
|
||||
double millis = decimillis/10.0;
|
||||
|
||||
sb.append(watch.getName())
|
||||
.append("=")
|
||||
.append(millis)
|
||||
.append("ms");
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the Params object, parameters come from the SearchQuery json not the request
|
||||
* @param webScriptRequest
|
||||
@@ -164,4 +243,10 @@ public class SearchApiWebscript extends AbstractWebScript implements RecognizedP
|
||||
{
|
||||
this.helper = helper;
|
||||
}
|
||||
|
||||
// receiving as a string because of known issue: https://jira.spring.io/browse/SPR-9989
|
||||
public void setStatsEnabled(String enabled) {
|
||||
this.statsEnabled = Boolean.valueOf(enabled);
|
||||
logger.info("API stats header: " + (this.statsEnabled ? "enabled" : "disabled"));
|
||||
}
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -26,8 +26,6 @@
|
||||
|
||||
package org.alfresco.rest.api.search.impl;
|
||||
|
||||
import static java.util.Optional.empty;
|
||||
import static java.util.Optional.of;
|
||||
import static org.alfresco.rest.api.search.impl.StoreMapper.DELETED;
|
||||
import static org.alfresco.rest.api.search.impl.StoreMapper.HISTORY;
|
||||
import static org.alfresco.rest.api.search.impl.StoreMapper.LIVE_NODES;
|
||||
@@ -44,10 +42,9 @@ import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.alfresco.repo.search.SearchEngineResultSet;
|
||||
import org.alfresco.repo.search.impl.solr.SolrJSONResultSet;
|
||||
import org.alfresco.repo.search.impl.solr.facet.facetsresponse.GenericBucket;
|
||||
import org.alfresco.repo.search.impl.solr.facet.facetsresponse.GenericFacetResponse;
|
||||
import org.alfresco.repo.search.impl.solr.facet.facetsresponse.GenericFacetResponse.FACET_TYPE;
|
||||
@@ -156,10 +153,12 @@ public class ResultMapper
|
||||
*/
|
||||
public CollectionWithPagingInfo<Node> toCollectionWithPagingInfo(Params params, SearchRequestContext searchRequestContext, SearchQuery searchQuery, ResultSet results)
|
||||
{
|
||||
List<Node> noderesults = new ArrayList<>();
|
||||
SearchContext context = null;
|
||||
Integer total = null;
|
||||
List<Node> noderesults = new ArrayList<Node>();
|
||||
Map<String, UserInfo> mapUserInfo = new HashMap<>(10);
|
||||
Map<NodeRef, List<Pair<String, List<String>>>> highLighting = results.getHighlighting();
|
||||
final AtomicInteger unknownNodeRefsCount = new AtomicInteger();
|
||||
Map<NodeRef, List<Pair<String, List<String>>>> hightLighting = results.getHighlighting();
|
||||
int notFound = 0;
|
||||
boolean isHistory = searchRequestContext.getStores().contains(StoreMapper.HISTORY);
|
||||
|
||||
for (ResultSetRow row:results)
|
||||
@@ -170,7 +169,7 @@ public class ResultMapper
|
||||
{
|
||||
float f = row.getScore();
|
||||
List<HighlightEntry> highlightEntries = null;
|
||||
List<Pair<String, List<String>>> high = highLighting.get(row.getNodeRef());
|
||||
List<Pair<String, List<String>>> high = hightLighting.get(row.getNodeRef());
|
||||
|
||||
if (high != null && !high.isEmpty())
|
||||
{
|
||||
@@ -186,21 +185,26 @@ public class ResultMapper
|
||||
else
|
||||
{
|
||||
logger.debug("Unknown noderef returned from search results "+row.getNodeRef());
|
||||
unknownNodeRefsCount.incrementAndGet();
|
||||
notFound++;
|
||||
}
|
||||
}
|
||||
|
||||
SearchContext context =
|
||||
toSearchEngineResultSet(results)
|
||||
.map(resultSet -> toSearchContext(resultSet, searchRequestContext, searchQuery))
|
||||
.orElse(null);
|
||||
SolrJSONResultSet solrResultSet = findSolrResultSet(results);
|
||||
|
||||
return CollectionWithPagingInfo.asPaged(params.getPaging(), noderesults, results.hasMore(), setTotal(results), null, context);
|
||||
if (solrResultSet != null)
|
||||
{
|
||||
//We used Solr for this query
|
||||
context = toSearchContext(solrResultSet, searchRequestContext, searchQuery, notFound);
|
||||
}
|
||||
|
||||
total = setTotal(results);
|
||||
|
||||
return CollectionWithPagingInfo.asPaged(params.getPaging(), noderesults, results.hasMore(), total, null, context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a node representation based on a ResultSetRow;
|
||||
*
|
||||
* @param searchRequestContext
|
||||
* @param aRow
|
||||
* @param params
|
||||
* @param mapUserInfo
|
||||
@@ -281,14 +285,14 @@ public class ResultMapper
|
||||
|
||||
/**
|
||||
* Uses the results from Solr to set the Search Context
|
||||
*
|
||||
* @param SolrJSONResultSet
|
||||
* @param searchQuery
|
||||
* @return SearchContext
|
||||
*/
|
||||
public SearchContext toSearchContext(SearchEngineResultSet resultSet, SearchRequestContext searchRequestContext, SearchQuery searchQuery)
|
||||
public SearchContext toSearchContext(SolrJSONResultSet solrResultSet, SearchRequestContext searchRequestContext, SearchQuery searchQuery, int notFound)
|
||||
{
|
||||
SearchContext context = null;
|
||||
Map<String, Integer> facetQueries = resultSet.getFacetQueries();
|
||||
Map<String, Integer> facetQueries = solrResultSet.getFacetQueries();
|
||||
List<GenericFacetResponse> facets = new ArrayList<>();
|
||||
List<FacetQueryContext> facetResults = null;
|
||||
SpellCheckContext spellCheckContext = null;
|
||||
@@ -326,7 +330,7 @@ public class ResultMapper
|
||||
}
|
||||
|
||||
//Field Facets
|
||||
Map<String, List<Pair<String, Integer>>> facetFields = resultSet.getFieldFacets();
|
||||
Map<String, List<Pair<String, Integer>>> facetFields = solrResultSet.getFieldFacets();
|
||||
if(FacetFormat.V2 == searchQuery.getFacetFormat())
|
||||
{
|
||||
facets.addAll(getFacetBucketsForFacetFieldsAsFacets(facetFields, searchQuery));
|
||||
@@ -336,29 +340,28 @@ public class ResultMapper
|
||||
ffcs.addAll(getFacetBucketsForFacetFields(facetFields, searchQuery));
|
||||
}
|
||||
|
||||
Map<String, List<Pair<String, Integer>>> facetInterval = resultSet.getFacetIntervals();
|
||||
Map<String, List<Pair<String, Integer>>> facetInterval = solrResultSet.getFacetIntervals();
|
||||
facets.addAll(getGenericFacetsForIntervals(facetInterval, searchQuery));
|
||||
|
||||
Map<String,List<Map<String,String>>> facetRanges = resultSet.getFacetRanges();
|
||||
Map<String,List<Map<String,String>>> facetRanges = solrResultSet.getFacetRanges();
|
||||
facets.addAll(RangeResultMapper.getGenericFacetsForRanges(facetRanges, searchQuery.getFacetRanges()));
|
||||
|
||||
List<GenericFacetResponse> stats = getFieldStats(searchRequestContext, resultSet.getStats());
|
||||
List<GenericFacetResponse> pimped = getPivots(searchRequestContext, resultSet.getPivotFacets(), stats);
|
||||
List<GenericFacetResponse> stats = getFieldStats(searchRequestContext, solrResultSet.getStats());
|
||||
List<GenericFacetResponse> pimped = getPivots(searchRequestContext, solrResultSet.getPivotFacets(), stats);
|
||||
facets.addAll(pimped);
|
||||
facets.addAll(stats);
|
||||
|
||||
//Spelling
|
||||
SpellCheckResult spell = resultSet.getSpellCheckResult();
|
||||
SpellCheckResult spell = solrResultSet.getSpellCheckResult();
|
||||
if (spell != null && spell.getResultName() != null && !spell.getResults().isEmpty())
|
||||
{
|
||||
spellCheckContext = new SpellCheckContext(spell.getResultName(),spell.getResults());
|
||||
}
|
||||
|
||||
//Put it all together
|
||||
context = new SearchContext(resultSet.getLastIndexedTxId(), facets, facetResults, ffcs, spellCheckContext, searchRequestContext.includeRequest()?searchQuery:null);
|
||||
context = new SearchContext(solrResultSet.getLastIndexedTxId(), facets, facetResults, ffcs, spellCheckContext, searchRequestContext.includeRequest()?searchQuery:null);
|
||||
return isNullContext(context)?null:context;
|
||||
}
|
||||
|
||||
public static boolean hasGroup(SearchQuery searchQuery)
|
||||
{
|
||||
if(searchQuery != null && searchQuery.getFacetQueries() != null)
|
||||
@@ -615,32 +618,26 @@ public class ResultMapper
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to see if the input {@link ResultSet} or one of the wrapped {@link ResultSet}
|
||||
* is an instance of {@link SearchEngineResultSet}.
|
||||
* Since some concrete ResultSet implements the decorator patterns, the code
|
||||
* assumes (in those cases) a nested structure with a maximum of 3 levels.
|
||||
* Probably the code could be generalised better in order to scan a decorator
|
||||
* chain with an unlimited depth, but that would require a change in the ResultSet interface.
|
||||
* Gets SolrJSONResultSet class if there is one.
|
||||
* @param results
|
||||
* @return
|
||||
*/
|
||||
protected Optional<SearchEngineResultSet> toSearchEngineResultSet(ResultSet results)
|
||||
protected SolrJSONResultSet findSolrResultSet(ResultSet results)
|
||||
{
|
||||
ResultSet theResultSet = results;
|
||||
|
||||
if (results instanceof FilteringResultSet)
|
||||
{
|
||||
// 1st level
|
||||
results = ((FilteringResultSet) results).getUnFilteredResultSet();
|
||||
|
||||
// 2nd level
|
||||
if (results instanceof FilteringResultSet)
|
||||
{
|
||||
results = ((FilteringResultSet) results).getUnFilteredResultSet();
|
||||
}
|
||||
theResultSet = ((FilteringResultSet) results).getUnFilteredResultSet();
|
||||
}
|
||||
|
||||
return results instanceof SearchEngineResultSet
|
||||
? of(results).map(SearchEngineResultSet.class::cast)
|
||||
: empty();
|
||||
}
|
||||
if (theResultSet instanceof SolrJSONResultSet)
|
||||
{
|
||||
return (SolrJSONResultSet) theResultSet;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
public CollectionWithPagingInfo<TupleList> toCollectionWithPagingInfo(JSONArray docs, SearchSQLQuery searchQuery) throws JSONException
|
||||
{
|
||||
if(docs == null )
|
||||
|
@@ -41,11 +41,7 @@ public class DefaultExceptionResolver implements ExceptionResolver<Exception>
|
||||
@Override
|
||||
public ErrorResponse resolveException(Exception ex)
|
||||
{
|
||||
return new ErrorResponse(DEFAULT_MESSAGE_ID,
|
||||
HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
|
||||
ex.getLocalizedMessage(),
|
||||
ex.getStackTrace(),
|
||||
null);
|
||||
return new ErrorResponse(DEFAULT_MESSAGE_ID, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ex.getLocalizedMessage(), ex.getStackTrace(), null);
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -26,12 +26,10 @@
|
||||
package org.alfresco.rest.framework.tools;
|
||||
|
||||
import org.alfresco.metrics.rest.RestMetricsReporter;
|
||||
import org.alfresco.repo.search.QueryParserException;
|
||||
import org.alfresco.rest.framework.Api;
|
||||
import org.alfresco.rest.framework.core.exceptions.DefaultExceptionResolver;
|
||||
import org.alfresco.rest.framework.core.exceptions.ErrorResponse;
|
||||
import org.alfresco.rest.framework.core.exceptions.ExceptionResolver;
|
||||
import org.alfresco.rest.framework.core.exceptions.QueryParserExceptionResolver;
|
||||
import org.alfresco.rest.framework.jacksonextensions.JacksonHelper;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
@@ -50,7 +48,6 @@ public class ApiAssistant {
|
||||
|
||||
private ExceptionResolver<Exception> defaultResolver = new DefaultExceptionResolver();
|
||||
private ExceptionResolver<WebScriptException> webScriptExceptionResolver;
|
||||
private ExceptionResolver<QueryParserException> queryParserExceptionResolver;
|
||||
private ExceptionResolver<Exception> resolver;
|
||||
private JacksonHelper jsonHelper;
|
||||
private RestMetricsReporter restMetricsReporter;
|
||||
@@ -80,10 +77,6 @@ public class ApiAssistant {
|
||||
{
|
||||
error = webScriptExceptionResolver.resolveException((WebScriptException) ex);
|
||||
}
|
||||
else if (ex instanceof QueryParserException)
|
||||
{
|
||||
error = queryParserExceptionResolver.resolveException((QueryParserException) ex);
|
||||
}
|
||||
else
|
||||
{
|
||||
error = resolver.resolveException(ex);
|
||||
@@ -107,11 +100,6 @@ public class ApiAssistant {
|
||||
this.webScriptExceptionResolver = webScriptExceptionResolver;
|
||||
}
|
||||
|
||||
public void setQueryParserExceptionResolver(ExceptionResolver<QueryParserException> queryParserExceptionResolver)
|
||||
{
|
||||
this.queryParserExceptionResolver = queryParserExceptionResolver;
|
||||
}
|
||||
|
||||
public void setResolver(ExceptionResolver<Exception> resolver) {
|
||||
this.resolver = resolver;
|
||||
}
|
||||
|
@@ -31,7 +31,6 @@ import java.util.Properties;
|
||||
|
||||
import javax.servlet.ServletContext;
|
||||
|
||||
import org.alfresco.httpclient.HttpClientFactory.SecureCommsType;
|
||||
import org.alfresco.web.scripts.servlet.X509ServletFilterBase;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
@@ -71,9 +70,7 @@ public class AlfrescoX509ServletFilter extends X509ServletFilterBase
|
||||
* Return true or false based on the property. This will switch on/off X509 enforcement in the X509ServletFilterBase.
|
||||
*/
|
||||
|
||||
if (prop == null ||
|
||||
SecureCommsType.getType(prop) == SecureCommsType.NONE ||
|
||||
SecureCommsType.getType(prop) == SecureCommsType.SECRET)
|
||||
if (prop == null || "none".equals(prop))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@@ -23,4 +23,7 @@
|
||||
# See issue REPO-2575 for details.
|
||||
alfresco.restApi.basicAuthScheme=false
|
||||
# REPO-4388 allow CORS headers in transaction response
|
||||
webscripts.transaction.preserveHeadersPattern=Access-Control-.*
|
||||
webscripts.transaction.preserveHeadersPattern=Access-Control-.*
|
||||
|
||||
# REPO-5371 enable stats header in API response (only search atm)
|
||||
webscripts.stats.enabled=false
|
@@ -134,8 +134,6 @@
|
||||
</bean>
|
||||
<bean id="webScriptExceptionResolver" class="org.alfresco.rest.framework.core.exceptions.WebScriptExceptionResolver">
|
||||
</bean>
|
||||
<bean id="queryParserExceptionResolver" class="org.alfresco.rest.framework.core.exceptions.QueryParserExceptionResolver">
|
||||
</bean>
|
||||
<bean id="simpleMappingExceptionResolverParent" abstract="true" class="org.alfresco.rest.framework.core.exceptions.SimpleMappingExceptionResolver">
|
||||
<property name="exceptionMappings">
|
||||
<map>
|
||||
@@ -184,7 +182,6 @@
|
||||
<property name="resolver" ref="simpleMappingExceptionResolver" />
|
||||
<property name="webScriptExceptionResolver" ref="webScriptExceptionResolver" />
|
||||
<property name="restMetricsReporter" ref="restMetricsReporter"/>
|
||||
<property name="queryParserExceptionResolver" ref="queryParserExceptionResolver" />
|
||||
</bean>
|
||||
|
||||
<!-- Using annotation-config=false means AutowiredAnnotationBeanPostProcessor
|
||||
@@ -1027,6 +1024,7 @@
|
||||
<property name="helper" ref="webscriptHelper" />
|
||||
<property name="resultMapper" ref="searchapiResultMapper" />
|
||||
<property name="searchMapper" ref="searchapiSearchMapper" />
|
||||
<property name="statsEnabled" value="${webscripts.stats.enabled}" />
|
||||
</bean>
|
||||
|
||||
<bean id="webscript.org.alfresco.api.SearchSQLApiWebscript.post"
|
||||
@@ -1083,7 +1081,6 @@
|
||||
<property name="authorityService" ref="AuthorityService" />
|
||||
<property name="tenantAdminService" ref="tenantAdminService"/>
|
||||
<property name="networksService" ref="networksService"/>
|
||||
<property name="cmisCreateDocRequestRenditionsSet" value="${cmis.create.doc.request.renditions.set}" />
|
||||
</bean>
|
||||
|
||||
<bean id="cmisDispatcherRegistry" class="org.alfresco.opencmis.CMISDispatcherRegistryImpl">
|
||||
|
@@ -3,7 +3,7 @@ communitysummary.system-information=Informaci\u00f3n del sistema
|
||||
communitysummary.system-information.free-memory=Memoria libre (GB)
|
||||
communitysummary.system-information.maximum-memory=Memoria m\u00e1xima (GB)
|
||||
communitysummary.system-information.total-memory=Memoria total (GB)
|
||||
communitysummary.system-information.cpus=CPUs
|
||||
communitysummary.system-information.cpus=UPCs
|
||||
|
||||
communitysummary.system-information.java-home=Inicio de Java
|
||||
communitysummary.system-information.java-version=Versi\u00f3n de Java
|
||||
|
@@ -1,76 +1,34 @@
|
||||
<#assign null><span style="color:red">${msg("nodebrowser.null")?html}</span></#assign>
|
||||
<#assign none><span style="color:red">${msg("nodebrowser.none")?html}</span></#assign>
|
||||
<#assign collection>${msg("nodebrowser.collection")?html}</#assign>
|
||||
<#assign maxDepth=1000 />
|
||||
|
||||
<#macro dateFormat date>${date?string("dd MMM yyyy HH:mm:ss 'GMT'Z '('zzz')'")}</#macro>
|
||||
<#macro propValue p>
|
||||
<#attempt>
|
||||
<#if p.value??>
|
||||
<#if p.value?is_date>
|
||||
<@dateFormat p.value />
|
||||
<#elseif p.value?is_boolean>
|
||||
${p.value?string}
|
||||
<#elseif p.value?is_number>
|
||||
${p.value?c}
|
||||
<#elseif p.value?is_string>
|
||||
${p.value?html}
|
||||
<#elseif p.value?is_hash || p.value?is_enumerable>
|
||||
<@convertToJSON p.value />
|
||||
</#if>
|
||||
<#else>
|
||||
${null}
|
||||
</#if>
|
||||
<#recover>
|
||||
<span style="color:red">${.error}</span>
|
||||
</#attempt>
|
||||
</#macro>
|
||||
<#macro convertToJSON v>
|
||||
<#if v??>
|
||||
<#if v?is_date>
|
||||
<@dateFormat v />
|
||||
<#elseif v?is_boolean>
|
||||
${v?string}
|
||||
<#elseif v?is_number>
|
||||
${v?c}
|
||||
<#elseif v?is_string>
|
||||
"${v?string}"
|
||||
<#elseif v?is_hash>
|
||||
<#if v?keys?size gt maxDepth >
|
||||
<#stop "Max depth of object achieved">
|
||||
<#if p.value??>
|
||||
<#if p.value?is_date>
|
||||
<@dateFormat p.value />
|
||||
<#elseif p.value?is_boolean>
|
||||
${p.value?string}
|
||||
<#elseif p.value?is_number>
|
||||
${p.value?c}
|
||||
<#elseif p.value?is_string>
|
||||
${p.value?html}
|
||||
<#elseif p.value?is_hash>
|
||||
<#assign result = "{"/>
|
||||
<#assign first = true />
|
||||
<#list p.value?keys as key>
|
||||
<#if first = false>
|
||||
<#assign result = result + ", "/>
|
||||
</#if>
|
||||
<@compress single_line=true>
|
||||
{
|
||||
<#assign first = true />
|
||||
<#list v?keys as key>
|
||||
<#if first = false>,</#if>
|
||||
"${key}":
|
||||
<#if v[key]??>
|
||||
<@convertToJSON v[key] />
|
||||
<#else>
|
||||
${null}
|
||||
</#if>
|
||||
<#assign first = false/>
|
||||
</#list>
|
||||
}
|
||||
</@compress>
|
||||
<#elseif v?is_enumerable>
|
||||
<#if v?size gt maxDepth>
|
||||
<#stop "Max depth of object achieved" >
|
||||
</#if>
|
||||
<#assign first = true />
|
||||
<@compress single_line=true>
|
||||
[
|
||||
<#list v as item>
|
||||
<#if first = false>,</#if>
|
||||
<@convertToJSON item />
|
||||
<#assign first = false/>
|
||||
</#list>
|
||||
]
|
||||
</@compress>
|
||||
</#if>
|
||||
<#else>
|
||||
${null}
|
||||
<#assign result = result + "${key}=${p.value[key]?html}" />
|
||||
<#assign first = false/>
|
||||
</#list>
|
||||
<#assign result = result + "}"/>
|
||||
${result}
|
||||
</#if>
|
||||
<#else>
|
||||
${null}
|
||||
</#if>
|
||||
</#macro>
|
||||
<#macro contentUrl nodeRef prop>
|
||||
${url.serviceContext}/api/node/${nodeRef?replace("://","/")}/content;${prop?url}
|
||||
|
@@ -66,8 +66,6 @@
|
||||
|
||||
<bean id="SOLRAuthenticationFilter" class="org.alfresco.repo.web.scripts.solr.SOLRAuthenticationFilter">
|
||||
<property name="secureComms" value="${solr.secureComms}"/>
|
||||
<property name="sharedSecret" value="${solr.sharedSecret}"/>
|
||||
<property name="sharedSecretHeader" value="${solr.sharedSecret.header}"/>
|
||||
</bean>
|
||||
|
||||
<bean id="WebscriptAuthenticationFilter" class="org.alfresco.repo.management.subsystems.ChainingSubsystemProxyFactory">
|
||||
|
@@ -39,7 +39,6 @@ import org.junit.runners.Suite;
|
||||
org.alfresco.repo.web.scripts.workflow.WorkflowModelBuilderTest.class,
|
||||
org.alfresco.repo.web.scripts.solr.StatsGetTest.class,
|
||||
org.alfresco.repo.web.scripts.solr.SOLRSerializerTest.class,
|
||||
org.alfresco.repo.web.scripts.solr.SOLRAuthenticationFilterTest.class,
|
||||
org.alfresco.repo.web.util.PagingCursorTest.class,
|
||||
org.alfresco.repo.web.util.paging.PagingTest.class,
|
||||
org.alfresco.repo.webdav.GetMethodTest.class,
|
||||
|
@@ -1,176 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.web.scripts.solr;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.junit.Test;
|
||||
import org.mockito.Mockito;
|
||||
import org.springframework.mock.web.MockHttpServletRequest;
|
||||
import org.springframework.mock.web.MockHttpServletResponse;
|
||||
|
||||
import javax.servlet.FilterChain;
|
||||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.ServletRequest;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class SOLRAuthenticationFilterTest
|
||||
{
|
||||
@Test(expected = AlfrescoRuntimeException.class)
|
||||
public void testSharedSecretNotConfigured() throws Exception
|
||||
{
|
||||
SOLRAuthenticationFilter filter = new SOLRAuthenticationFilter();
|
||||
filter.setSecureComms(SOLRAuthenticationFilter.SecureCommsType.SECRET.name());
|
||||
filter.afterPropertiesSet();
|
||||
}
|
||||
|
||||
@Test(expected = AlfrescoRuntimeException.class)
|
||||
public void testSharedHeaderNotConfigured() throws Exception
|
||||
{
|
||||
SOLRAuthenticationFilter filter = new SOLRAuthenticationFilter();
|
||||
filter.setSecureComms(SOLRAuthenticationFilter.SecureCommsType.SECRET.name());
|
||||
filter.setSharedSecret("shared-secret");
|
||||
filter.setSharedSecretHeader("");
|
||||
filter.afterPropertiesSet();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHTTPSFilterAndSharedSecretSet() throws Exception
|
||||
{
|
||||
String headerKey = "test-header";
|
||||
String sharedSecret = "shared-secret";
|
||||
SOLRAuthenticationFilter filter = new SOLRAuthenticationFilter();
|
||||
filter.setSecureComms(SOLRAuthenticationFilter.SecureCommsType.HTTPS.name());
|
||||
filter.setSharedSecret(sharedSecret);
|
||||
filter.setSharedSecretHeader(headerKey);
|
||||
filter.afterPropertiesSet();
|
||||
|
||||
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
|
||||
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
||||
Mockito.when(request.getHeader(headerKey)).thenReturn(sharedSecret);
|
||||
Mockito.when(request.isSecure()).thenReturn(true);
|
||||
|
||||
FilterChain chain = Mockito.mock(FilterChain.class);
|
||||
|
||||
filter.doFilter(Mockito.mock(ServletContext.class), request, response, chain);
|
||||
Mockito.verify(chain, Mockito.times(1)).doFilter(request, response);
|
||||
}
|
||||
|
||||
@Test(expected = AlfrescoRuntimeException.class)
|
||||
public void testHTTPSFilterAndInsecureRequest() throws Exception
|
||||
{
|
||||
SOLRAuthenticationFilter filter = new SOLRAuthenticationFilter();
|
||||
filter.setSecureComms(SOLRAuthenticationFilter.SecureCommsType.HTTPS.name());
|
||||
filter.afterPropertiesSet();
|
||||
|
||||
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
|
||||
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
||||
Mockito.when(request.isSecure()).thenReturn(false);
|
||||
|
||||
FilterChain chain = Mockito.mock(FilterChain.class);
|
||||
|
||||
filter.doFilter(Mockito.mock(ServletContext.class), request, response, chain);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNoAuthentication() throws Exception
|
||||
{
|
||||
SOLRAuthenticationFilter filter = new SOLRAuthenticationFilter();
|
||||
filter.setSecureComms(SOLRAuthenticationFilter.SecureCommsType.NONE.name());
|
||||
filter.afterPropertiesSet();
|
||||
|
||||
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
|
||||
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
||||
|
||||
FilterChain chain = Mockito.mock(FilterChain.class);
|
||||
|
||||
filter.doFilter(Mockito.mock(ServletContext.class), request, response, chain);
|
||||
Mockito.verify(chain, Mockito.times(1)).doFilter(request, response);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSharedSecretFilter() throws Exception
|
||||
{
|
||||
String headerKey = "test-header";
|
||||
String sharedSecret = "shared-secret";
|
||||
SOLRAuthenticationFilter filter = new SOLRAuthenticationFilter();
|
||||
filter.setSecureComms(SOLRAuthenticationFilter.SecureCommsType.SECRET.name());
|
||||
filter.setSharedSecret(sharedSecret);
|
||||
filter.setSharedSecretHeader(headerKey);
|
||||
filter.afterPropertiesSet();
|
||||
|
||||
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
|
||||
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
||||
Mockito.when(request.getHeader(headerKey)).thenReturn(sharedSecret);
|
||||
|
||||
FilterChain chain = Mockito.mock(FilterChain.class);
|
||||
|
||||
filter.doFilter(Mockito.mock(ServletContext.class), request, response, chain);
|
||||
Mockito.verify(chain, Mockito.times(1)).doFilter(request, response);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSharedSecretDontMatch() throws Exception
|
||||
{
|
||||
String headerKey = "test-header";
|
||||
String sharedSecret = "shared-secret";
|
||||
SOLRAuthenticationFilter filter = new SOLRAuthenticationFilter();
|
||||
filter.setSecureComms(SOLRAuthenticationFilter.SecureCommsType.SECRET.name());
|
||||
filter.setSharedSecret(sharedSecret);
|
||||
filter.setSharedSecretHeader(headerKey);
|
||||
filter.afterPropertiesSet();
|
||||
|
||||
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
|
||||
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
||||
Mockito.when(request.getHeader(headerKey)).thenReturn("wrong-secret");
|
||||
|
||||
FilterChain chain = Mockito.mock(FilterChain.class);
|
||||
|
||||
filter.doFilter(Mockito.mock(ServletContext.class), request, response, chain);
|
||||
Mockito.verify(chain, Mockito.times(0)).doFilter(request, response);
|
||||
Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_FORBIDDEN), Mockito.anyString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSharedHeaderNotPresent() throws Exception
|
||||
{
|
||||
String headerKey = "test-header";
|
||||
String sharedSecret = "shared-secret";
|
||||
SOLRAuthenticationFilter filter = new SOLRAuthenticationFilter();
|
||||
filter.setSecureComms(SOLRAuthenticationFilter.SecureCommsType.SECRET.name());
|
||||
filter.setSharedSecret(sharedSecret);
|
||||
filter.setSharedSecretHeader(headerKey);
|
||||
filter.afterPropertiesSet();
|
||||
|
||||
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
|
||||
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
||||
|
||||
FilterChain chain = Mockito.mock(FilterChain.class);
|
||||
|
||||
filter.doFilter(Mockito.mock(ServletContext.class), request, response, chain);
|
||||
Mockito.verify(chain, Mockito.times(0)).doFilter(request, response);
|
||||
Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_FORBIDDEN), Mockito.anyString());
|
||||
}
|
||||
}
|
@@ -52,7 +52,6 @@ import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.alfresco.repo.search.EmptyResultSet;
|
||||
import org.alfresco.repo.search.SearchEngineResultSet;
|
||||
import org.alfresco.repo.search.impl.solr.SolrJSONResultSet;
|
||||
import org.alfresco.repo.search.impl.solr.facet.facetsresponse.GenericBucket;
|
||||
import org.alfresco.repo.search.impl.solr.facet.facetsresponse.GenericFacetResponse;
|
||||
@@ -304,7 +303,7 @@ public class ResultMapperTests
|
||||
SearchQuery searchQuery = helper.searchQueryFromJson();
|
||||
SearchRequestContext searchRequest = SearchRequestContext.from(searchQuery);
|
||||
SearchParameters searchParams = searchMapper.toSearchParameters(EMPTY_PARAMS, searchQuery, searchRequest);
|
||||
SearchContext searchContext = mapper.toSearchContext((SearchEngineResultSet) results, searchRequest, searchQuery);
|
||||
SearchContext searchContext = mapper.toSearchContext((SolrJSONResultSet) results, searchRequest, searchQuery, 0);
|
||||
assertEquals(34l, searchContext.getConsistency().getlastTxId());
|
||||
assertEquals(6, searchContext.getFacetQueries().size());
|
||||
assertEquals(0,searchContext.getFacetQueries().get(0).getCount());
|
||||
@@ -438,7 +437,7 @@ public class ResultMapperTests
|
||||
SearchQuery searchQuery = helper.searchQueryFromJson();
|
||||
SearchRequestContext searchRequest = SearchRequestContext.from(searchQuery);
|
||||
SearchParameters searchParams = searchMapper.toSearchParameters(EMPTY_PARAMS, searchQuery, searchRequest);
|
||||
SearchContext searchContext = mapper.toSearchContext((SearchEngineResultSet) results, searchRequest, searchQuery);
|
||||
SearchContext searchContext = mapper.toSearchContext((SolrJSONResultSet) results, searchRequest, searchQuery, 0);
|
||||
|
||||
//Facet intervals
|
||||
List<GenericFacetResponse> intervalFacets = searchContext.getFacets().stream()
|
||||
@@ -478,7 +477,7 @@ public class ResultMapperTests
|
||||
SearchQuery searchQuery = helper.searchQueryFromJson();
|
||||
SearchRequestContext searchRequest = SearchRequestContext.from(searchQuery);
|
||||
SearchParameters searchParams = searchMapper.toSearchParameters(EMPTY_PARAMS, searchQuery, searchRequest);
|
||||
SearchContext searchContext = mapper.toSearchContext((SearchEngineResultSet) results, searchRequest, searchQuery);
|
||||
SearchContext searchContext = mapper.toSearchContext((SolrJSONResultSet) results, searchRequest, searchQuery, 0);
|
||||
|
||||
//Numeric facet range
|
||||
List<GenericFacetResponse> rangeFacets = searchContext.getFacets().stream()
|
||||
@@ -532,7 +531,7 @@ public class ResultMapperTests
|
||||
SearchQuery searchQuery = helper.extractFromJson(updatedJSON);
|
||||
SearchRequestContext searchRequest = SearchRequestContext.from(searchQuery);
|
||||
SearchParameters searchParams = searchMapper.toSearchParameters(EMPTY_PARAMS, searchQuery, searchRequest);
|
||||
SearchContext searchContext = mapper.toSearchContext((SearchEngineResultSet) results, searchRequest, searchQuery);
|
||||
SearchContext searchContext = mapper.toSearchContext((SolrJSONResultSet) results, searchRequest, searchQuery, 0);
|
||||
|
||||
//Numeric facet range
|
||||
List<GenericFacetResponse> rangeFacets = searchContext.getFacets().stream()
|
||||
@@ -576,7 +575,7 @@ public class ResultMapperTests
|
||||
ResultSet results = mockResultset(expectedResponse);
|
||||
SearchQuery searchQuery = helper.extractFromJson(jsonQuery);
|
||||
SearchRequestContext searchRequest = SearchRequestContext.from(searchQuery);
|
||||
SearchContext searchContext = mapper.toSearchContext((SearchEngineResultSet) results, searchRequest, searchQuery);
|
||||
SearchContext searchContext = mapper.toSearchContext((SolrJSONResultSet) results, searchRequest, searchQuery, 0);
|
||||
assertEquals(34l, searchContext.getConsistency().getlastTxId());
|
||||
assertEquals(null, searchContext.getFacetQueries());
|
||||
assertEquals(1, searchContext.getFacets().size());
|
||||
@@ -611,7 +610,7 @@ public class ResultMapperTests
|
||||
ResultSet results = mockResultset(expectedResponse);
|
||||
SearchQuery searchQuery = helper.extractFromJson(jsonQuery);
|
||||
SearchRequestContext searchRequest = SearchRequestContext.from(searchQuery);
|
||||
SearchContext searchContext = mapper.toSearchContext((SearchEngineResultSet) results, searchRequest, searchQuery);
|
||||
SearchContext searchContext = mapper.toSearchContext((SolrJSONResultSet) results, searchRequest, searchQuery, 0);
|
||||
assertEquals(34l, searchContext.getConsistency().getlastTxId());
|
||||
assertEquals(null, searchContext.getFacetQueries());
|
||||
assertEquals(2, searchContext.getFacets().size());
|
||||
@@ -649,7 +648,7 @@ public class ResultMapperTests
|
||||
ResultSet results = mockResultset(expectedResponse);
|
||||
SearchQuery searchQuery = helper.extractFromJson(jsonQuery);
|
||||
SearchRequestContext searchRequest = SearchRequestContext.from(searchQuery);
|
||||
SearchContext searchContext = mapper.toSearchContext((SearchEngineResultSet) results, searchRequest, searchQuery);
|
||||
SearchContext searchContext = mapper.toSearchContext((SolrJSONResultSet) results, searchRequest, searchQuery, 0);
|
||||
assertEquals(34l, searchContext.getConsistency().getlastTxId());
|
||||
assertTrue(searchContext.getFacets().isEmpty());
|
||||
assertEquals(3,searchContext.getFacetQueries().size());
|
||||
@@ -723,7 +722,7 @@ public class ResultMapperTests
|
||||
ResultSet results = mockResultset(expectedResponse);
|
||||
SearchQuery searchQuery = helper.extractFromJson(jsonQuery);
|
||||
SearchRequestContext searchRequest = SearchRequestContext.from(searchQuery);
|
||||
SearchContext searchContext = mapper.toSearchContext((SearchEngineResultSet) results, searchRequest, searchQuery);
|
||||
SearchContext searchContext = mapper.toSearchContext((SolrJSONResultSet) results, searchRequest, searchQuery, 0);
|
||||
assertEquals(34l, searchContext.getConsistency().getlastTxId());
|
||||
assertEquals(null, searchContext.getFacetQueries());
|
||||
assertEquals(1, searchContext.getFacets().size());
|
||||
@@ -739,7 +738,7 @@ public class ResultMapperTests
|
||||
searchQuery = helper.extractFromJson(jsonQuery);
|
||||
results = mockResultset(expectedResponse);
|
||||
searchRequest = SearchRequestContext.from(searchQuery);
|
||||
searchContext = mapper.toSearchContext((SearchEngineResultSet) results, searchRequest, searchQuery);
|
||||
searchContext = mapper.toSearchContext((SolrJSONResultSet) results, searchRequest, searchQuery, 0);
|
||||
assertEquals(34l, searchContext.getConsistency().getlastTxId());
|
||||
assertEquals(3,searchContext.getFacetQueries().size());
|
||||
assertEquals("small",searchContext.getFacetQueries().get(0).getLabel());
|
||||
@@ -760,7 +759,7 @@ public class ResultMapperTests
|
||||
+ "\"processedDenies\":true, \"lastIndexedTx\":34}";
|
||||
results = mockResultset(expectedResponse);
|
||||
searchQuery = helper.extractFromJson(jsonQuery);
|
||||
searchContext = mapper.toSearchContext((SearchEngineResultSet) results, searchRequest, searchQuery);
|
||||
searchContext = mapper.toSearchContext((SolrJSONResultSet) results, searchRequest, searchQuery, 0);
|
||||
assertFalse(searchContext.getFacetsFields().isEmpty());
|
||||
assertTrue(searchContext.getFacets().isEmpty());
|
||||
assertEquals("creator",searchContext.getFacetsFields().get(0).getLabel());
|
||||
@@ -771,7 +770,7 @@ public class ResultMapperTests
|
||||
assertEquals("modifier",searchContext.getFacetsFields().get(1).getLabel());
|
||||
jsonQuery = jsonQuery.replace("V1", "V2");
|
||||
searchQuery = helper.extractFromJson(jsonQuery);
|
||||
searchContext = mapper.toSearchContext((SearchEngineResultSet) results, searchRequest, searchQuery);
|
||||
searchContext = mapper.toSearchContext((SolrJSONResultSet) results, searchRequest, searchQuery, 0);
|
||||
assertTrue(searchContext.getFacetsFields().isEmpty());
|
||||
assertFalse(searchContext.getFacets().isEmpty());
|
||||
assertEquals("creator",searchContext.getFacets().get(0).getLabel());
|
||||
@@ -836,7 +835,7 @@ public class ResultMapperTests
|
||||
ResultSet results = mockResultset(expectedResponse);
|
||||
SearchQuery searchQuery = helper.extractFromJson(jsonQuery);
|
||||
SearchRequestContext searchRequest = SearchRequestContext.from(searchQuery);
|
||||
SearchContext searchContext = mapper.toSearchContext((SearchEngineResultSet) results, searchRequest, searchQuery);
|
||||
SearchContext searchContext = mapper.toSearchContext((SolrJSONResultSet) results, searchRequest, searchQuery, 0);
|
||||
assertEquals(34l, searchContext.getConsistency().getlastTxId());
|
||||
assertEquals(null, searchContext.getFacetQueries());
|
||||
assertEquals(3, searchContext.getFacets().size());
|
||||
|
@@ -1,289 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
package org.alfresco.rest.api.tests;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import org.alfresco.rest.api.model.Association;
|
||||
import org.alfresco.rest.api.model.AssociationSource;
|
||||
import org.alfresco.rest.api.model.Model;
|
||||
import org.alfresco.rest.api.tests.client.PublicApiClient;
|
||||
import org.alfresco.rest.api.tests.client.data.Aspect;
|
||||
import org.alfresco.rest.api.tests.client.data.Type;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Collections;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public class BaseModelApiTest extends AbstractBaseApiTest
|
||||
{
|
||||
PublicApiClient.ListResponse<Aspect> aspects = null;
|
||||
Aspect aspect = null, childAspect = null, smartFilterAspect = null,
|
||||
rescanAspect = null, testAspect = null, testAllAspect = null;
|
||||
|
||||
PublicApiClient.ListResponse<Type> types = null;
|
||||
Type type = null, whitePaperType = null, docType = null, publishableType = null, apiBaseType = null,
|
||||
apiFileType = null, apiFileDerivedType = null, apiForcedType = null, apiFileDerivedNoArchiveType = null,
|
||||
apiFolderType = null, apiOverrideType = null, apiOverride2Type = null, apiOverride3Type = null, apiNamedPropConstraintType = null;
|
||||
|
||||
List<Type> allTypes = null;
|
||||
|
||||
PublicApiClient.Paging paging = getPaging(0, 10);
|
||||
Map<String, String> otherParams = new HashMap<>();
|
||||
|
||||
@Before
|
||||
public void setup() throws Exception
|
||||
{
|
||||
super.setup();
|
||||
|
||||
Model myCompanyModel = new Model();
|
||||
myCompanyModel.setAuthor("Administrator");
|
||||
myCompanyModel.setId("mycompany:model");
|
||||
myCompanyModel.setNamespaceUri("http://www.mycompany.com/model/finance/1.0");
|
||||
myCompanyModel.setNamespacePrefix("mycompany");
|
||||
|
||||
Model scanModel = new Model();
|
||||
scanModel.setAuthor("Administrator");
|
||||
scanModel.setId("test:scan");
|
||||
scanModel.setNamespaceUri("http://www.test.com/model/account/1.0");
|
||||
scanModel.setNamespacePrefix("test");
|
||||
|
||||
testAspect = new org.alfresco.rest.api.tests.client.data.Aspect();
|
||||
testAspect.setId("mycompany:testAspect");
|
||||
testAspect.setTitle("Test Aspect");
|
||||
testAspect.setModel(myCompanyModel);
|
||||
testAspect.setIsContainer(false);
|
||||
testAspect.setIncludedInSupertypeQuery(true);
|
||||
testAspect.setIsArchive(true);
|
||||
|
||||
childAspect = new org.alfresco.rest.api.tests.client.data.Aspect();
|
||||
childAspect.setId("mycompany:childAspect");
|
||||
childAspect.setTitle("Child Aspect");
|
||||
childAspect.setDescription("Child Aspect Description");
|
||||
childAspect.setParentId("smf:smartFolder");
|
||||
childAspect.setModel(myCompanyModel);
|
||||
childAspect.setIsContainer(false);
|
||||
childAspect.setIncludedInSupertypeQuery(true);
|
||||
|
||||
rescanAspect = new org.alfresco.rest.api.tests.client.data.Aspect();
|
||||
rescanAspect.setId("test:rescan");
|
||||
rescanAspect.setTitle("rescan");
|
||||
rescanAspect.setDescription("Doc that required to scan ");
|
||||
rescanAspect.setModel(scanModel);
|
||||
rescanAspect.setIsContainer(false);
|
||||
rescanAspect.setIncludedInSupertypeQuery(true);
|
||||
|
||||
smartFilterAspect = new org.alfresco.rest.api.tests.client.data.Aspect();
|
||||
smartFilterAspect.setId("test:smartFilter");
|
||||
smartFilterAspect.setTitle("Smart filter");
|
||||
smartFilterAspect.setDescription("Smart Filter");
|
||||
smartFilterAspect.setParentId("mycompany:testAspect");
|
||||
smartFilterAspect.setModel(scanModel);
|
||||
smartFilterAspect.setIsContainer(false);
|
||||
smartFilterAspect.setIsArchive(true);
|
||||
smartFilterAspect.setIncludedInSupertypeQuery(true);
|
||||
|
||||
whitePaperType = new org.alfresco.rest.api.tests.client.data.Type();
|
||||
whitePaperType.setId("mycompany:whitepaper");
|
||||
whitePaperType.setTitle("whitepaper");
|
||||
whitePaperType.setDescription("Whitepaper");
|
||||
whitePaperType.setParentId("mycompany:doc");
|
||||
whitePaperType.setModel(myCompanyModel);
|
||||
whitePaperType.setIsContainer(false);
|
||||
whitePaperType.setIsArchive(true);
|
||||
whitePaperType.setIncludedInSupertypeQuery(true);
|
||||
|
||||
docType = new org.alfresco.rest.api.tests.client.data.Type();
|
||||
docType.setId("mycompany:doc");
|
||||
docType.setTitle("doc");
|
||||
docType.setDescription("Doc");
|
||||
docType.setParentId("cm:content");
|
||||
docType.setModel(myCompanyModel);
|
||||
docType.setIsContainer(false);
|
||||
docType.setIsArchive(true);
|
||||
docType.setIncludedInSupertypeQuery(true);
|
||||
|
||||
publishableType = new org.alfresco.rest.api.tests.client.data.Type();
|
||||
publishableType.setId("test:publishable");
|
||||
publishableType.setParentId("mycompany:doc");
|
||||
publishableType.setIsContainer(false);
|
||||
publishableType.setIsArchive(true);
|
||||
publishableType.setIncludedInSupertypeQuery(true);
|
||||
|
||||
Model testModel = new Model();
|
||||
testModel.setAuthor("Administrator");
|
||||
testModel.setId("api:apiModel");
|
||||
testModel.setNamespaceUri("http://www.api.t2/model/1.0");
|
||||
testModel.setNamespacePrefix("test2");
|
||||
|
||||
Model apiModel = new Model();
|
||||
apiModel.setAuthor("Administrator");
|
||||
apiModel.setId("api:apiModel");
|
||||
apiModel.setNamespaceUri("http://www.api.t1/model/1.0");
|
||||
apiModel.setNamespacePrefix("api");
|
||||
|
||||
AssociationSource testAllAspectSource = new AssociationSource(null, "test2:aspect-all", true, true, null);
|
||||
AssociationSource testAllAspectTarget = new AssociationSource(null, "api:referenceable", false, false, false);
|
||||
Association testAllAspectAssociation = new Association("api:assoc-all", null, null, null, false, testAllAspectSource, testAllAspectTarget);
|
||||
testAllAspect = new org.alfresco.rest.api.tests.client.data.Aspect();
|
||||
testAllAspect.setId("test2:aspect-all");
|
||||
testAllAspect.setTitle("Aspect derived from other namespace");
|
||||
testAllAspect.setIsArchive(false);
|
||||
testAllAspect.setIncludedInSupertypeQuery(false);
|
||||
testAllAspect.setIsContainer(false);
|
||||
testAllAspect.setModel(testModel);
|
||||
testAllAspect.setAssociations(Collections.singletonList(testAllAspectAssociation));
|
||||
testAllAspect.setMandatoryAspects(Arrays.asList("test2:aspect-three", "api:aspect-one", "api:aspect-two"));
|
||||
|
||||
AssociationSource apiBaseSource = new AssociationSource(null, "api:base", false, true, null);
|
||||
AssociationSource apiBaseTarget = new AssociationSource(null, "api:base", true, false, false);
|
||||
Association apiBaseAssociation = new Association("api:assoc1", null, null, false, false, apiBaseSource, apiBaseTarget);
|
||||
|
||||
AssociationSource apiChildSource = new AssociationSource(null, "api:base", true, true, null);
|
||||
AssociationSource apiChildTarget = new AssociationSource(null, "api:referenceable", false, false, false);
|
||||
Association apiChildAssociation = new Association("api:childassoc1", null, null, true, false, apiChildSource, apiChildTarget);
|
||||
|
||||
AssociationSource apiBaseSource2 = new AssociationSource(null, "api:base", true, true, null);
|
||||
AssociationSource apiBaseTarget2 = new AssociationSource(null, "api:referenceable", false, false, false);
|
||||
Association apiBaseAssociation2 = new Association("api:assoc2", null, null, false, false, apiBaseSource2, apiBaseTarget2);
|
||||
|
||||
AssociationSource apiChildPropagateSource = new AssociationSource(null, "api:base", true, true, null);
|
||||
AssociationSource apiChildPropagateTarget = new AssociationSource(null, "api:referenceable", false, false, false);
|
||||
Association apiChildPropagateAssociation = new Association("api:childassocPropagate", null, null, true, false, apiChildPropagateSource, apiChildPropagateTarget);
|
||||
|
||||
apiBaseType = new org.alfresco.rest.api.tests.client.data.Type();
|
||||
apiBaseType.setId("api:base");
|
||||
apiBaseType.setTitle("Base");
|
||||
apiBaseType.setDescription("The Base Type");
|
||||
apiBaseType.setIncludedInSupertypeQuery(true);
|
||||
apiBaseType.setIsContainer(true);
|
||||
apiBaseType.setModel(apiModel);
|
||||
apiBaseType.setAssociations(Arrays.asList(apiBaseAssociation, apiChildAssociation, apiBaseAssociation2, apiChildPropagateAssociation));
|
||||
apiBaseType.setMandatoryAspects(Collections.singletonList("api:referenceable"));
|
||||
|
||||
apiForcedType = new org.alfresco.rest.api.tests.client.data.Type();
|
||||
apiForcedType.setId("api:enforced");
|
||||
apiForcedType.setParentId("api:base");
|
||||
apiForcedType.setIncludedInSupertypeQuery(true);
|
||||
apiForcedType.setIsContainer(true);
|
||||
apiForcedType.setModel(apiModel);
|
||||
apiForcedType.setAssociations(Arrays.asList(apiBaseAssociation2, apiChildPropagateAssociation, apiBaseAssociation, apiChildAssociation));
|
||||
apiForcedType.setMandatoryAspects(Collections.singletonList("api:referenceable"));
|
||||
|
||||
AssociationSource apiChildSource2 = new AssociationSource(null, "api:file", false, true, null);
|
||||
AssociationSource apiChildTarget2 = new AssociationSource(null, "api:referenceable", true, false, false);
|
||||
Association apiChildAssociation2 = new Association("api:childassoc2", null, null, true, false, apiChildSource2, apiChildTarget2);
|
||||
apiFileType = new org.alfresco.rest.api.tests.client.data.Type();
|
||||
apiFileType.setId("api:file");
|
||||
apiFileType.setParentId("api:base");
|
||||
apiFileType.setIsArchive(true);
|
||||
apiFileType.setIncludedInSupertypeQuery(true);
|
||||
apiFileType.setIsContainer(true);
|
||||
apiFileType.setModel(apiModel);
|
||||
apiFileType.setAssociations(Arrays.asList(apiBaseAssociation2, apiChildAssociation2, apiChildPropagateAssociation, apiBaseAssociation, apiChildAssociation));
|
||||
apiFileType.setMandatoryAspects(Collections.singletonList("api:referenceable"));
|
||||
|
||||
apiFileDerivedType = new org.alfresco.rest.api.tests.client.data.Type();
|
||||
apiFileDerivedType.setId("api:file-derived");
|
||||
apiFileDerivedType.setParentId("api:file");
|
||||
apiFileDerivedType.setIsArchive(true);
|
||||
apiFileDerivedType.setIncludedInSupertypeQuery(true);
|
||||
apiFileDerivedType.setIsContainer(true);
|
||||
apiFileDerivedType.setModel(apiModel);
|
||||
apiFileDerivedType.setAssociations(Arrays.asList(apiBaseAssociation2, apiChildAssociation2, apiChildPropagateAssociation, apiBaseAssociation, apiChildAssociation));
|
||||
apiFileDerivedType.setMandatoryAspects(Collections.singletonList("api:referenceable"));
|
||||
|
||||
apiFileDerivedNoArchiveType = new org.alfresco.rest.api.tests.client.data.Type();
|
||||
apiFileDerivedNoArchiveType.setId("api:file-derived-no-archive");
|
||||
apiFileDerivedNoArchiveType.setParentId("api:file");
|
||||
apiFileDerivedNoArchiveType.setIsArchive(false);
|
||||
apiFileDerivedNoArchiveType.setIncludedInSupertypeQuery(true);
|
||||
apiFileDerivedNoArchiveType.setIsContainer(true);
|
||||
apiFileDerivedNoArchiveType.setModel(apiModel);
|
||||
apiFileDerivedNoArchiveType.setAssociations(Arrays.asList(apiBaseAssociation2, apiChildAssociation2, apiChildPropagateAssociation, apiBaseAssociation, apiChildAssociation));
|
||||
apiFileDerivedNoArchiveType.setMandatoryAspects(Collections.singletonList("api:referenceable"));
|
||||
|
||||
apiFolderType = new org.alfresco.rest.api.tests.client.data.Type();
|
||||
apiFolderType.setId("api:folder");
|
||||
apiFolderType.setParentId("api:base");
|
||||
apiFolderType.setIncludedInSupertypeQuery(true);
|
||||
apiFolderType.setIsContainer(true);
|
||||
apiFolderType.setModel(apiModel);
|
||||
apiFolderType.setAssociations(Arrays.asList(apiBaseAssociation2, apiChildPropagateAssociation, apiBaseAssociation, apiChildAssociation));
|
||||
apiFolderType.setMandatoryAspects(Collections.singletonList("api:referenceable"));
|
||||
|
||||
apiOverrideType = new org.alfresco.rest.api.tests.client.data.Type();
|
||||
apiOverrideType.setId("api:overridetype1");
|
||||
apiOverrideType.setParentId("api:base");
|
||||
apiOverrideType.setIncludedInSupertypeQuery(true);
|
||||
apiOverrideType.setIsContainer(false);
|
||||
apiOverrideType.setModel(apiModel);
|
||||
apiOverrideType.setAssociations(Collections.emptyList());
|
||||
apiOverrideType.setMandatoryAspects(Collections.emptyList());
|
||||
|
||||
apiOverride2Type = new org.alfresco.rest.api.tests.client.data.Type();
|
||||
apiOverride2Type.setId("api:overridetype2");
|
||||
apiOverride2Type.setParentId("api:overridetype1");
|
||||
apiOverride2Type.setIncludedInSupertypeQuery(true);
|
||||
apiOverride2Type.setIsContainer(false);
|
||||
apiOverride2Type.setModel(apiModel);
|
||||
apiOverride2Type.setAssociations(Collections.emptyList());
|
||||
apiOverride2Type.setMandatoryAspects(Collections.emptyList());
|
||||
|
||||
apiOverride3Type = new org.alfresco.rest.api.tests.client.data.Type();
|
||||
apiOverride3Type.setId("api:overridetype3");
|
||||
apiOverride3Type.setParentId("api:overridetype2");
|
||||
apiOverride3Type.setIncludedInSupertypeQuery(true);
|
||||
apiOverride3Type.setIsContainer(false);
|
||||
apiOverride3Type.setModel(apiModel);
|
||||
apiOverride3Type.setAssociations(Collections.emptyList());
|
||||
apiOverride3Type.setMandatoryAspects(Collections.emptyList());
|
||||
|
||||
apiNamedPropConstraintType = new org.alfresco.rest.api.tests.client.data.Type();
|
||||
apiNamedPropConstraintType.setId("api:typeWithNamedPropConstraint");
|
||||
apiNamedPropConstraintType.setTitle("Type with named property-defined constraint.");
|
||||
apiNamedPropConstraintType.setDescription("A type with a named constraint defined within one of its properties.");
|
||||
apiNamedPropConstraintType.setParentId("api:overridetype2");
|
||||
apiNamedPropConstraintType.setIncludedInSupertypeQuery(true);
|
||||
apiNamedPropConstraintType.setIsContainer(false);
|
||||
apiNamedPropConstraintType.setModel(apiModel);
|
||||
apiNamedPropConstraintType.setAssociations(Collections.emptyList());
|
||||
apiNamedPropConstraintType.setMandatoryAspects(Collections.emptyList());
|
||||
|
||||
allTypes = ImmutableList.of(apiBaseType, apiForcedType, apiFileType, apiFileDerivedType,
|
||||
apiFileDerivedNoArchiveType, apiFolderType, apiOverrideType, apiOverride2Type,
|
||||
apiOverride3Type, apiNamedPropConstraintType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getScope()
|
||||
{
|
||||
return "public";
|
||||
}
|
||||
}
|
@@ -27,22 +27,53 @@
|
||||
package org.alfresco.rest.api.tests;
|
||||
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.rest.api.tests.client.PublicApiClient;
|
||||
import org.alfresco.rest.api.tests.client.PublicApiException;
|
||||
import org.alfresco.rest.api.tests.client.RequestContext;
|
||||
import org.alfresco.rest.api.tests.client.data.Aspect;
|
||||
import org.apache.commons.httpclient.HttpStatus;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
|
||||
public class TestAspects extends BaseModelApiTest
|
||||
public class TestAspects extends AbstractBaseApiTest
|
||||
{
|
||||
|
||||
private PublicApiClient.Paging paging = getPaging(0, 10);
|
||||
PublicApiClient.ListResponse<org.alfresco.rest.api.tests.client.data.Aspect> aspects = null;
|
||||
org.alfresco.rest.api.tests.client.data.Aspect aspect, childAspect = null, smartFilter = null, rescanAspect = null;
|
||||
Map<String, String> otherParams = new HashMap<>();
|
||||
|
||||
@Before
|
||||
public void setup() throws Exception
|
||||
{
|
||||
super.setup();
|
||||
|
||||
childAspect = new org.alfresco.rest.api.tests.client.data.Aspect();
|
||||
childAspect.setId("mycompany:childAspect");
|
||||
childAspect.setTitle("Child Aspect");
|
||||
childAspect.setDescription("Child Aspect Description");
|
||||
childAspect.setParentId("smf:smartFolder");
|
||||
|
||||
rescanAspect = new org.alfresco.rest.api.tests.client.data.Aspect();
|
||||
rescanAspect.setId("test:rescan");
|
||||
rescanAspect.setTitle("rescan");
|
||||
rescanAspect.setDescription("Doc that required to scan ");
|
||||
|
||||
smartFilter = new org.alfresco.rest.api.tests.client.data.Aspect();
|
||||
smartFilter.setId("test:smartFilter");
|
||||
smartFilter.setTitle("Smart filter");
|
||||
smartFilter.setDescription("Smart Filter");
|
||||
smartFilter.setParentId("cm:auditable");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAllAspects() throws PublicApiException
|
||||
{
|
||||
@@ -82,32 +113,29 @@ public class TestAspects extends BaseModelApiTest
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
otherParams.put("where", "(parentId in ('smf:smartFolder','mycompany:testAspect'))");
|
||||
otherParams.put("where", "(parentIds in ('smf:smartFolder','cm:auditable'))");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
aspects.getList().get(0).expected(childAspect);
|
||||
aspects.getList().get(1).expected(testAspect);
|
||||
aspects.getList().get(3).expected(smartFilterAspect);
|
||||
aspects.getList().get(1).expected(childAspect);
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(4));
|
||||
assertFalse(aspects.getPaging().getHasMoreItems());
|
||||
|
||||
otherParams.put("where", "(parentId in ('smf:smartFolder','mycompany:testAspect') AND namespaceUri matches('http://www.test.*'))");
|
||||
otherParams.put("where", "(parentIds in ('smf:smartFolder','cm:auditable') AND namespaceUri matches('http://www.test.*'))");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
aspects.getList().get(0).expected(smartFilterAspect);
|
||||
aspects.getList().get(0).expected(smartFilter);
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(1));
|
||||
|
||||
otherParams.put("where", "(parentId in ('smf:smartFolder', 'mycompany:testAspect') AND not namespaceUri matches('http://www.test.*'))");
|
||||
otherParams.put("where", "(parentIds in ('smf:smartFolder','cm:auditable') AND not namespaceUri matches('http://www.test.*'))");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
aspects.getList().get(0).expected(childAspect);
|
||||
aspects.getList().get(1).expected(testAspect);
|
||||
aspects.getList().get(1).expected(childAspect);
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(3));
|
||||
|
||||
// match everything
|
||||
otherParams.put("where", "(parentId in ('smf:smartFolder','mycompany:testAspect') AND namespaceUri matches('.*'))");
|
||||
otherParams.put("where", "(parentIds in ('smf:smartFolder','cm:auditable') AND namespaceUri matches('.*'))");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(4));
|
||||
|
||||
// match nothing
|
||||
otherParams.put("where", "(parentId in ('smf:smartFolder', 'mycompany:testAspect') AND not namespaceUri matches('.*'))");
|
||||
otherParams.put("where", "(parentIds in ('smf:smartFolder,cm:auditable') AND not namespaceUri matches('.*'))");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(0));
|
||||
}
|
||||
@@ -118,148 +146,31 @@ public class TestAspects extends BaseModelApiTest
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model','test:scan'))");
|
||||
otherParams.put("where", "(modelIds in ('mycompany:model','test:scan'))");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(6));
|
||||
assertFalse(aspects.getPaging().getHasMoreItems());
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model','test:scan') AND namespaceUri matches('http://www.test.*'))");
|
||||
|
||||
otherParams.put("where", "(modelIds in ('mycompany:model','test:scan') AND namespaceUri matches('http://www.test.*'))");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
aspects.getList().get(0).expected(rescanAspect);
|
||||
aspects.getList().get(1).expected(smartFilterAspect);
|
||||
aspects.getList().get(1).expected(smartFilter);
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(2));
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model','test:scan') AND not namespaceUri matches('http://www.test.*'))");
|
||||
otherParams.put("where", "(modelIds in ('mycompany:model','test:scan') AND not namespaceUri matches('http://www.test.*'))");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(4));
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model','test:scan') AND namespaceUri matches('.*'))");
|
||||
otherParams.put("where", "(modelIds in ('mycompany:model','test:scan') AND namespaceUri matches('.*'))");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(6));
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model','test:scan') AND not namespaceUri matches('.*'))");
|
||||
otherParams.put("where", "(modelIds in ('mycompany:model','test:scan') AND not namespaceUri matches('.*'))");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(0));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIncludeProperty() throws PublicApiException
|
||||
{
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model','test:scan') AND namespaceUri matches('http://www.test.*'))");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
aspects.getList().get(0).expected(rescanAspect);
|
||||
assertNull(aspects.getList().get(0).getProperties());
|
||||
aspects.getList().get(1).expected(smartFilterAspect);
|
||||
assertNull(aspects.getList().get(1).getProperties());
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model','test:scan') AND namespaceUri matches('http://www.test.*'))");
|
||||
otherParams.put("include", "properties");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
aspects.getList().get(0).expected(rescanAspect);
|
||||
assertNotNull(aspects.getList().get(0).getProperties());
|
||||
aspects.getList().get(1).expected(smartFilterAspect);
|
||||
assertNotNull(aspects.getList().get(0).getProperties());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIncludeAssociation() throws PublicApiException
|
||||
{
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
otherParams.put("where", "(modelId in ('api:apiModel'))");
|
||||
otherParams.put("include", "associations");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(6));
|
||||
|
||||
for (Aspect aspect : aspects.getList())
|
||||
{
|
||||
assertNotNull(aspect.getAssociations());
|
||||
assertNull(aspect.getProperties());
|
||||
assertNull(aspect.getMandatoryAspects());
|
||||
}
|
||||
assertTrue(aspects.getList().get(0).getAssociations().isEmpty());
|
||||
assertTrue(aspects.getList().get(1).getAssociations().isEmpty());
|
||||
assertTrue(aspects.getList().get(2).getAssociations().isEmpty());
|
||||
assertTrue(aspects.getList().get(3).getAssociations().isEmpty());
|
||||
assertEquals(aspects.getList().get(4).getAssociations(), testAllAspect.getAssociations());
|
||||
assertTrue(aspects.getList().get(5).getAssociations().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIncludeMandatoryAspect() throws PublicApiException
|
||||
{
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
otherParams.put("where", "(modelId in ('api:apiModel'))");
|
||||
otherParams.put("include", "mandatoryAspects");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(6));
|
||||
|
||||
for (Aspect aspect : aspects.getList())
|
||||
{
|
||||
assertNotNull(aspect.getMandatoryAspects());
|
||||
assertNull(aspect.getProperties());
|
||||
assertNull(aspect.getAssociations());
|
||||
}
|
||||
assertTrue(aspects.getList().get(0).getMandatoryAspects().isEmpty());
|
||||
assertTrue(aspects.getList().get(1).getMandatoryAspects().isEmpty());
|
||||
assertTrue(aspects.getList().get(2).getMandatoryAspects().isEmpty());
|
||||
assertTrue(aspects.getList().get(3).getMandatoryAspects().isEmpty());
|
||||
assertEquals(aspects.getList().get(4).getMandatoryAspects(), testAllAspect.getMandatoryAspects());
|
||||
assertTrue(aspects.getList().get(5).getMandatoryAspects().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIncludes() throws PublicApiException
|
||||
{
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
otherParams.put("where", "(modelId in ('api:apiModel'))");
|
||||
otherParams.put("include", "associations,mandatoryAspects");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(6));
|
||||
|
||||
for (Aspect aspect : aspects.getList())
|
||||
{
|
||||
assertNotNull(aspect.getAssociations());
|
||||
assertNotNull(aspect.getMandatoryAspects());
|
||||
assertNull(aspect.getProperties());
|
||||
}
|
||||
assertTrue(aspects.getList().get(0).getAssociations().isEmpty());
|
||||
assertTrue(aspects.getList().get(1).getAssociations().isEmpty());
|
||||
assertTrue(aspects.getList().get(2).getAssociations().isEmpty());
|
||||
assertTrue(aspects.getList().get(3).getAssociations().isEmpty());
|
||||
assertEquals(aspects.getList().get(4).getAssociations(), testAllAspect.getAssociations());
|
||||
assertEquals(aspects.getList().get(4).getMandatoryAspects(), testAllAspect.getMandatoryAspects());
|
||||
assertTrue(aspects.getList().get(5).getAssociations().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubAspects() throws PublicApiException
|
||||
{
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model'))");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(4));
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model INCLUDESUBASPECTS'))");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(5));
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model INCLUDESUBASPECTS') AND namespaceUri matches('http://www.test.*'))");
|
||||
aspects = publicApiClient.aspects().getAspects(createParams(paging, otherParams));
|
||||
aspects.getList().get(0).expected(smartFilterAspect);
|
||||
assertEquals(aspects.getPaging().getTotalItems(), Integer.valueOf(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAspectsById() throws PublicApiException
|
||||
{
|
||||
@@ -268,11 +179,6 @@ public class TestAspects extends BaseModelApiTest
|
||||
|
||||
aspect = publicApiClient.aspects().getAspect("mycompany:childAspect");
|
||||
aspect.expected(childAspect);
|
||||
|
||||
aspect = publicApiClient.aspects().getAspect("test2:aspect-all");
|
||||
assertEquals("mandatoryAspects not matched", aspect.getMandatoryAspects(), testAllAspect.getMandatoryAspects());
|
||||
assertEquals("association not matched", aspect.getAssociations(), testAllAspect.getAssociations());
|
||||
aspect.expected(testAllAspect);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -281,12 +187,12 @@ public class TestAspects extends BaseModelApiTest
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
testListAspectException("(modelId in ('mycompany:model','unknown:model','known:model'))");
|
||||
testListAspectException("(modelId in ('unknown:model','mycompany:model'))");
|
||||
testListAspectException("(modelId in (' ',' ',' ')");
|
||||
testListAspectException("(parentId in ('smf:smartFolder','unknown:aspect'))");
|
||||
testListAspectException("(parentId in ('unknown:aspect','smf:smartFolder'))");
|
||||
testListAspectException("(parentId in (' ',' ',' ')");
|
||||
testListAspectException("(modelIds in ('mycompany:model','unknown:model','known:model'))");
|
||||
testListAspectException("(modelIds in ('unknown:model','mycompany:model'))");
|
||||
testListAspectException("(modelIds in (' ',' ',' ')");
|
||||
testListAspectException("(parentIds in ('smf:smartFolder','unknown:aspect'))");
|
||||
testListAspectException("(parentIds in ('unknown:aspect','smf:smartFolder'))");
|
||||
testListAspectException("(parentIds in (' ',' ',' ')");
|
||||
testListAspectException("(namespaceUri matches('*'))"); // wrong pattern
|
||||
}
|
||||
|
||||
@@ -328,4 +234,11 @@ public class TestAspects extends BaseModelApiTest
|
||||
assertEquals(HttpStatus.SC_BAD_REQUEST, e.getHttpResponse().getStatusCode());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getScope()
|
||||
{
|
||||
return "public";
|
||||
}
|
||||
}
|
||||
|
@@ -27,21 +27,47 @@
|
||||
package org.alfresco.rest.api.tests;
|
||||
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.rest.api.tests.client.PublicApiClient;
|
||||
import org.alfresco.rest.api.tests.client.PublicApiException;
|
||||
import org.alfresco.rest.api.tests.client.RequestContext;
|
||||
import org.alfresco.rest.api.tests.client.data.Type;
|
||||
import org.apache.commons.httpclient.HttpStatus;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class TestTypes extends BaseModelApiTest
|
||||
|
||||
public class TestTypes extends AbstractBaseApiTest
|
||||
{
|
||||
|
||||
private PublicApiClient.Paging paging = getPaging(0, 10);
|
||||
PublicApiClient.ListResponse<org.alfresco.rest.api.tests.client.data.Type> types = null;
|
||||
org.alfresco.rest.api.tests.client.data.Type type = null, whitePaperType = null, docType = null;
|
||||
Map<String, String> otherParams = new HashMap<>();
|
||||
|
||||
@Before
|
||||
public void setup() throws Exception
|
||||
{
|
||||
super.setup();
|
||||
whitePaperType = new org.alfresco.rest.api.tests.client.data.Type();
|
||||
whitePaperType.setId("mycompany:whitepaper");
|
||||
whitePaperType.setTitle("whitepaper");
|
||||
whitePaperType.setDescription("Whitepaper");
|
||||
whitePaperType.setParentId("mycompany:doc");
|
||||
|
||||
docType = new org.alfresco.rest.api.tests.client.data.Type();
|
||||
docType.setId("mycompany:doc");
|
||||
docType.setTitle("doc");
|
||||
docType.setDescription("Doc");
|
||||
docType.setParentId("cm:content");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAllTypes() throws PublicApiException
|
||||
{
|
||||
@@ -81,27 +107,27 @@ public class TestTypes extends BaseModelApiTest
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
otherParams.put("where", "(parentId in ('cm:content'))");
|
||||
otherParams.put("where", "(parentIds in ('cm:content'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
int total = types.getPaging().getTotalItems();
|
||||
|
||||
otherParams.put("where", "(parentId in ('cm:content') AND namespaceUri matches('http://www.mycompany.com/model.*'))");
|
||||
otherParams.put("where", "(parentIds in ('cm:content') AND namespaceUri matches('http://www.mycompany.com/model.*'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
types.getList().get(0).expected(docType);
|
||||
types.getList().get(1).expected(whitePaperType);
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(2));
|
||||
|
||||
otherParams.put("where", "(parentId in ('cm:content') AND not namespaceUri matches('http://www.mycompany.com/model.*'))");
|
||||
otherParams.put("where", "(parentIds in ('cm:content') AND not namespaceUri matches('http://www.mycompany.com/model.*'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(total - 2));
|
||||
|
||||
// match everything
|
||||
otherParams.put("where", "(parentId in ('cm:content') AND namespaceUri matches('.*'))");
|
||||
otherParams.put("where", "(parentIds in ('cm:content') AND namespaceUri matches('.*'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(total));
|
||||
|
||||
// match nothing
|
||||
otherParams.put("where", "(parentId in ('cm:content') AND not namespaceUri matches('.*'))");
|
||||
otherParams.put("where", "(parentIds in ('cm:content') AND not namespaceUri matches('.*'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(0));
|
||||
}
|
||||
@@ -112,154 +138,29 @@ public class TestTypes extends BaseModelApiTest
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model','test:scan'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(4));
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model','test:scan') AND namespaceUri matches('http://www.mycompany.com/model.*'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
types.getList().get(0).expected(docType);
|
||||
types.getList().get(1).expected(whitePaperType);
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(2));
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model','test:scan') AND not namespaceUri matches('http://www.mycompany.com/model.*'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(2));
|
||||
|
||||
// match everything
|
||||
otherParams.put("where", "(modelId in ('mycompany:model','test:scan') AND namespaceUri matches('.*'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(4));
|
||||
|
||||
// match nothing
|
||||
otherParams.put("where", "(modelId in ('mycompany:model','test:scan') AND not namespaceUri matches('.*'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(0));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIncludeProperty() throws PublicApiException
|
||||
{
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model','test:scan'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(4));
|
||||
assertNull(types.getList().get(0).getProperties());
|
||||
assertNull(types.getList().get(1).getProperties());
|
||||
assertNull(types.getList().get(2).getProperties());
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model','test:scan') AND namespaceUri matches('http://www.mycompany.com/model.*'))");
|
||||
otherParams.put("include", "properties");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
types.getList().get(0).expected(docType);
|
||||
types.getList().get(1).expected(whitePaperType);
|
||||
assertNotNull(types.getList().get(0).getProperties());
|
||||
assertNotNull(types.getList().get(1).getProperties());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIncludeAssociation() throws PublicApiException
|
||||
{
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
otherParams.put("where", "(modelId in ('api:apiModel'))");
|
||||
otherParams.put("include", "associations");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(10));
|
||||
|
||||
|
||||
for (int i = 0; i < types.getList().size(); i++)
|
||||
{
|
||||
Type type = types.getList().get(i);
|
||||
|
||||
assertNotNull(type.getAssociations());
|
||||
assertNull(type.getProperties());
|
||||
assertNull(type.getMandatoryAspects());
|
||||
|
||||
type.expected(allTypes.get(i));
|
||||
assertEquals(type.getAssociations(), allTypes.get(i).getAssociations());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIncludeMandatoryAspect() throws PublicApiException
|
||||
{
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
otherParams.put("where", "(modelId in ('api:apiModel'))");
|
||||
otherParams.put("include", "mandatoryAspects");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
|
||||
for (int i = 0; i < types.getList().size(); i++)
|
||||
{
|
||||
Type type = types.getList().get(i);
|
||||
|
||||
assertNotNull(type.getMandatoryAspects());
|
||||
assertNull(type.getProperties());
|
||||
assertNull(type.getAssociations());
|
||||
|
||||
type.expected(allTypes.get(i));
|
||||
assertEquals(type.getMandatoryAspects(), allTypes.get(i).getMandatoryAspects());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIncludes() throws PublicApiException
|
||||
{
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
otherParams.put("where", "(modelId in ('api:apiModel'))");
|
||||
otherParams.put("include", "associations,mandatoryAspects");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(10));
|
||||
|
||||
for (int i = 0; i < types.getList().size(); i++)
|
||||
{
|
||||
Type type = types.getList().get(i);
|
||||
|
||||
assertNotNull(type.getAssociations());
|
||||
assertNull(type.getProperties());
|
||||
assertNotNull(type.getMandatoryAspects());
|
||||
|
||||
type.expected(allTypes.get(i));
|
||||
assertEquals(type.getMandatoryAspects(), allTypes.get(i).getMandatoryAspects());
|
||||
assertEquals(type.getAssociations(), allTypes.get(i).getAssociations());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubTypes() throws PublicApiException
|
||||
{
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(2));
|
||||
types.getList().get(0).expected(docType);
|
||||
types.getList().get(1).expected(whitePaperType);
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model INCLUDESUBTYPES'))");
|
||||
otherParams.put("where", "(modelIds in ('mycompany:model','test:scan'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(3));
|
||||
|
||||
otherParams.put("where", "(modelIds in ('mycompany:model','test:scan') AND namespaceUri matches('http://www.mycompany.com/model.*'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
types.getList().get(0).expected(docType);
|
||||
types.getList().get(1).expected(whitePaperType);
|
||||
types.getList().get(2).expected(publishableType);
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(2));
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model INCLUDESUBTYPES') AND namespaceUri matches('http://www.test.*'))");
|
||||
otherParams.put("where", "(modelIds in ('mycompany:model','test:scan') AND not namespaceUri matches('http://www.mycompany.com/model.*'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(1));
|
||||
types.getList().get(0).expected(publishableType);
|
||||
|
||||
otherParams.put("where", "(modelId in ('mycompany:model INCLUDESUBTYPES') AND not namespaceUri matches('http://www.test.*'))");
|
||||
// match everything
|
||||
otherParams.put("where", "(modelIds in ('mycompany:model','test:scan') AND namespaceUri matches('.*'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
types.getList().get(0).expected(docType);
|
||||
types.getList().get(1).expected(whitePaperType);
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(3));
|
||||
|
||||
// match nothing
|
||||
otherParams.put("where", "(modelIds in ('mycompany:model','test:scan') AND not namespaceUri matches('.*'))");
|
||||
types = publicApiClient.types().getTypes(createParams(paging, otherParams));
|
||||
assertEquals(types.getPaging().getTotalItems(), Integer.valueOf(0));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -270,12 +171,6 @@ public class TestTypes extends BaseModelApiTest
|
||||
|
||||
type = publicApiClient.types().getType("mycompany:whitepaper");
|
||||
type.expected(whitePaperType);
|
||||
|
||||
type = publicApiClient.types().getType(apiBaseType.getId());
|
||||
type.expected(apiBaseType);
|
||||
assertNotNull(type.getProperties());
|
||||
assertEquals(type.getMandatoryAspects(), apiBaseType.getMandatoryAspects());
|
||||
assertEquals(type.getAssociations(), apiBaseType.getAssociations());
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -284,13 +179,13 @@ public class TestTypes extends BaseModelApiTest
|
||||
AuthenticationUtil.setRunAsUser(user1);
|
||||
publicApiClient.setRequestContext(new RequestContext(networkOne.getId(), user1));
|
||||
|
||||
testListTypeException("(modelId in ('mycompany:model','unknown:model'))");
|
||||
testListTypeException("(modelId in ('unknown:model','unknown1:another'))");
|
||||
testListTypeException("(modelId in (' ', '')");
|
||||
testListTypeException("(parentId in ('cm:content','unknown:type')");
|
||||
testListTypeException("(parentId in ('unknown:type','cm:content'))");
|
||||
testListTypeException("(parentId in ('unknown:type','unknown:types'))");
|
||||
testListTypeException("(parentId in (' ',' ',' '))");
|
||||
testListTypeException("(modelIds in ('mycompany:model','unknown:model'))");
|
||||
testListTypeException("(modelIds in ('unknown:model','unknown1:another'))");
|
||||
testListTypeException("(modelIds=' , , ')");
|
||||
testListTypeException("(parentIds in ('cm:content','unknown:type')");
|
||||
testListTypeException("(parentIds in ('unknown:type','cm:content'))");
|
||||
testListTypeException("(parentIds in ('unknown:type','unknown:types'))");
|
||||
testListTypeException("(parentIds in (' ',' ',' '))");
|
||||
testListTypeException("");
|
||||
testListTypeException("(namespaceUri matches('*'))"); // wrong pattern
|
||||
}
|
||||
@@ -333,4 +228,11 @@ public class TestTypes extends BaseModelApiTest
|
||||
assertEquals(HttpStatus.SC_BAD_REQUEST, e.getHttpResponse().getStatusCode());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getScope()
|
||||
{
|
||||
return "public";
|
||||
}
|
||||
}
|
||||
|
@@ -25,9 +25,6 @@
|
||||
*/
|
||||
package org.alfresco.rest.api.tests.client.data;
|
||||
|
||||
import org.alfresco.rest.api.model.Association;
|
||||
import org.alfresco.rest.api.model.AssociationSource;
|
||||
import org.alfresco.rest.api.model.Model;
|
||||
import org.alfresco.rest.api.model.PropertyDefinition;
|
||||
import org.alfresco.rest.api.tests.client.PublicApiClient;
|
||||
import org.json.simple.JSONArray;
|
||||
@@ -54,17 +51,6 @@ public class Aspect extends org.alfresco.rest.api.model.Aspect implements Serial
|
||||
AssertUtil.assertEquals("title", getTitle(), other.getTitle());
|
||||
AssertUtil.assertEquals("description", getDescription(), other.getDescription());
|
||||
AssertUtil.assertEquals("parenId", getParentId(), other.getParentId());
|
||||
AssertUtil.assertEquals("isArchive", getIsArchive(), other.getIsArchive());
|
||||
AssertUtil.assertEquals("isContainer", getIsContainer(), other.getIsContainer());
|
||||
AssertUtil.assertEquals("includedInSupertypeQuery", getIncludedInSupertypeQuery(), other.getIncludedInSupertypeQuery());
|
||||
|
||||
if (getModel() != null && other.getModel() != null)
|
||||
{
|
||||
AssertUtil.assertEquals("modelId", getModel().getId(), other.getModel().getId());
|
||||
AssertUtil.assertEquals("author", getModel().getAuthor(), other.getModel().getAuthor());
|
||||
AssertUtil.assertEquals("namespaceUri", getModel().getNamespaceUri(), other.getModel().getNamespaceUri());
|
||||
AssertUtil.assertEquals("namespacePrefix", getModel().getNamespacePrefix(), other.getModel().getNamespacePrefix());
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@@ -93,36 +79,6 @@ public class Aspect extends org.alfresco.rest.api.model.Aspect implements Serial
|
||||
jsonObject.put("properties", getProperties());
|
||||
}
|
||||
|
||||
if (getModel() != null)
|
||||
{
|
||||
jsonObject.put("model", getModel());
|
||||
}
|
||||
|
||||
if (getMandatoryAspects() != null)
|
||||
{
|
||||
jsonObject.put("mandatoryAspects", getMandatoryAspects());
|
||||
}
|
||||
|
||||
if (getIsContainer() != null)
|
||||
{
|
||||
jsonObject.put("isContainer", getIsContainer());
|
||||
}
|
||||
|
||||
if (getIsArchive() != null)
|
||||
{
|
||||
jsonObject.put("isArchive", getIsArchive());
|
||||
}
|
||||
|
||||
if (getIncludedInSupertypeQuery() != null)
|
||||
{
|
||||
jsonObject.put("includedInSupertypeQuery", getIncludedInSupertypeQuery());
|
||||
}
|
||||
|
||||
if (getAssociations() != null)
|
||||
{
|
||||
jsonObject.put("associations", getAssociations());
|
||||
}
|
||||
|
||||
return jsonObject;
|
||||
}
|
||||
|
||||
@@ -134,75 +90,15 @@ public class Aspect extends org.alfresco.rest.api.model.Aspect implements Serial
|
||||
String description = (String) jsonObject.get("description");
|
||||
String parentId = (String) jsonObject.get("parentId");
|
||||
List<PropertyDefinition> properties = (List<PropertyDefinition>) jsonObject.get("properties");
|
||||
List<String> mandatoryAspects = jsonObject.get("mandatoryAspects") != null ? new ArrayList((List<String>)jsonObject.get("mandatoryAspects")) : null;
|
||||
Boolean isContainer = (Boolean) jsonObject.get("isContainer");
|
||||
Boolean isArchive = (Boolean) jsonObject.get("isArchive");
|
||||
Boolean includedInSupertypeQuery = (Boolean) jsonObject.get("includedInSupertypeQuery");
|
||||
|
||||
List<Association> associations = null;
|
||||
Aspect action = new Aspect();
|
||||
action.setId(id);
|
||||
action.setTitle(title);
|
||||
action.setDescription(description);
|
||||
action.setParentId(parentId);
|
||||
action.setProperties(properties);
|
||||
|
||||
if (jsonObject.get("associations") != null)
|
||||
{
|
||||
associations = new ArrayList<>();
|
||||
JSONArray jsonArray = (JSONArray) jsonObject.get("associations");
|
||||
for(int i = 0; i < jsonArray.size(); i++)
|
||||
{
|
||||
Association association = new Association();
|
||||
JSONObject object = (JSONObject) jsonArray.get(i);
|
||||
association.setId((String) object.get("id"));
|
||||
association.setTitle((String) object.get("title"));
|
||||
association.setDescription((String) object.get("description"));
|
||||
association.setIsChild((Boolean) object.get("child"));
|
||||
association.setIsProtected((Boolean) object.get("isProtected"));
|
||||
|
||||
JSONObject sourceModel = (JSONObject) object.get("source");
|
||||
if (sourceModel != null)
|
||||
{
|
||||
AssociationSource source = new AssociationSource();
|
||||
source.setCls((String) sourceModel.get("cls"));
|
||||
source.setRole((String) sourceModel.get("role"));
|
||||
source.setIsMandatory((Boolean) sourceModel.get("isMandatory"));
|
||||
source.setIsMany((Boolean) sourceModel.get("isMany"));
|
||||
source.setIsMandatoryEnforced((Boolean) sourceModel.get("isMandatoryEnforced"));
|
||||
association.setSource(source);
|
||||
}
|
||||
|
||||
JSONObject targetModel = (JSONObject) object.get("target");
|
||||
{
|
||||
AssociationSource target = new AssociationSource();
|
||||
target.setCls((String) targetModel.get("cls"));
|
||||
target.setRole((String) targetModel.get("role"));
|
||||
target.setIsMandatory((Boolean) targetModel.get("isMandatory"));
|
||||
target.setIsMany((Boolean) targetModel.get("isMany"));
|
||||
target.setIsMandatoryEnforced((Boolean) targetModel.get("isMandatoryEnforced"));
|
||||
association.setTarget(target);
|
||||
}
|
||||
associations.add(association);
|
||||
}
|
||||
}
|
||||
|
||||
JSONObject jsonModel = (JSONObject) jsonObject.get("model");
|
||||
Model model = new Model();
|
||||
model.setId((String) jsonModel.get("id"));
|
||||
model.setDescription((String) jsonModel.get("description"));
|
||||
model.setNamespacePrefix((String) jsonModel.get("namespacePrefix"));
|
||||
model.setNamespaceUri((String) jsonModel.get("namespaceUri"));
|
||||
model.setAuthor((String) jsonModel.get("author"));
|
||||
|
||||
Aspect aspect = new Aspect();
|
||||
aspect.setId(id);
|
||||
aspect.setTitle(title);
|
||||
aspect.setDescription(description);
|
||||
aspect.setParentId(parentId);
|
||||
aspect.setProperties(properties);
|
||||
aspect.setMandatoryAspects(mandatoryAspects);
|
||||
aspect.setIsContainer(isContainer);
|
||||
aspect.setIsArchive(isArchive);
|
||||
aspect.setIncludedInSupertypeQuery(includedInSupertypeQuery);
|
||||
aspect.setAssociations(associations);
|
||||
aspect.setModel(model);
|
||||
|
||||
return aspect;
|
||||
return action;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@@ -25,9 +25,6 @@
|
||||
*/
|
||||
package org.alfresco.rest.api.tests.client.data;
|
||||
|
||||
import org.alfresco.rest.api.model.Association;
|
||||
import org.alfresco.rest.api.model.AssociationSource;
|
||||
import org.alfresco.rest.api.model.Model;
|
||||
import org.alfresco.rest.api.model.PropertyDefinition;
|
||||
import org.alfresco.rest.api.tests.client.PublicApiClient;
|
||||
import org.json.simple.JSONArray;
|
||||
@@ -54,17 +51,6 @@ public class Type extends org.alfresco.rest.api.model.Type implements Serializab
|
||||
AssertUtil.assertEquals("title", getTitle(), other.getTitle());
|
||||
AssertUtil.assertEquals("description", getDescription(), other.getDescription());
|
||||
AssertUtil.assertEquals("parenId", getParentId(), other.getParentId());
|
||||
AssertUtil.assertEquals("isArchive", getIsArchive(), other.getIsArchive());
|
||||
AssertUtil.assertEquals("isContainer", getIsContainer(), other.getIsContainer());
|
||||
AssertUtil.assertEquals("includedInSupertypeQuery", getIncludedInSupertypeQuery(), other.getIncludedInSupertypeQuery());
|
||||
|
||||
if (getModel() != null && other.getModel() != null)
|
||||
{
|
||||
AssertUtil.assertEquals("modelId", getModel().getId(), other.getModel().getId());
|
||||
AssertUtil.assertEquals("author", getModel().getAuthor(), other.getModel().getAuthor());
|
||||
AssertUtil.assertEquals("namespaceUri", getModel().getNamespaceUri(), other.getModel().getNamespaceUri());
|
||||
AssertUtil.assertEquals("namespacePrefix", getModel().getNamespacePrefix(), other.getModel().getNamespacePrefix());
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@@ -93,36 +79,6 @@ public class Type extends org.alfresco.rest.api.model.Type implements Serializab
|
||||
jsonObject.put("properties", getProperties());
|
||||
}
|
||||
|
||||
if (getModel() != null)
|
||||
{
|
||||
jsonObject.put("model", getModel());
|
||||
}
|
||||
|
||||
if (getMandatoryAspects() != null)
|
||||
{
|
||||
jsonObject.put("mandatoryAspects", getMandatoryAspects());
|
||||
}
|
||||
|
||||
if (getIsContainer() != null)
|
||||
{
|
||||
jsonObject.put("isContainer", getIsContainer());
|
||||
}
|
||||
|
||||
if (getIsArchive() != null)
|
||||
{
|
||||
jsonObject.put("isArchive", getIsArchive());
|
||||
}
|
||||
|
||||
if (getIncludedInSupertypeQuery() != null)
|
||||
{
|
||||
jsonObject.put("includedInSupertypeQuery", getIncludedInSupertypeQuery());
|
||||
}
|
||||
|
||||
if (getAssociations() != null)
|
||||
{
|
||||
jsonObject.put("associations", getAssociations());
|
||||
}
|
||||
|
||||
return jsonObject;
|
||||
}
|
||||
|
||||
@@ -134,75 +90,15 @@ public class Type extends org.alfresco.rest.api.model.Type implements Serializab
|
||||
String description = (String) jsonObject.get("description");
|
||||
String parentId = (String) jsonObject.get("parentId");
|
||||
List<PropertyDefinition> properties = (List<PropertyDefinition>) jsonObject.get("properties");
|
||||
List<String> mandatoryAspects = jsonObject.get("mandatoryAspects") != null ? new ArrayList((List<String>)jsonObject.get("mandatoryAspects")) : null;
|
||||
Boolean isContainer = (Boolean) jsonObject.get("isContainer");
|
||||
Boolean isArchive = (Boolean) jsonObject.get("isArchive");
|
||||
Boolean includedInSupertypeQuery = (Boolean) jsonObject.get("includedInSupertypeQuery");
|
||||
|
||||
List<org.alfresco.rest.api.model.Association> associations = null;
|
||||
Type action = new Type();
|
||||
action.setId(id);
|
||||
action.setTitle(title);
|
||||
action.setDescription(description);
|
||||
action.setParentId(parentId);
|
||||
action.setProperties(properties);
|
||||
|
||||
if (jsonObject.get("associations") != null)
|
||||
{
|
||||
associations = new ArrayList<>();
|
||||
JSONArray jsonArray = (JSONArray) jsonObject.get("associations");
|
||||
for(int i = 0; i < jsonArray.size(); i++)
|
||||
{
|
||||
org.alfresco.rest.api.model.Association association = new Association();
|
||||
JSONObject object = (JSONObject) jsonArray.get(i);
|
||||
association.setId((String) object.get("id"));
|
||||
association.setTitle((String) object.get("title"));
|
||||
association.setDescription((String) object.get("description"));
|
||||
association.setIsChild((Boolean) object.get("isChild"));
|
||||
association.setIsProtected((Boolean) object.get("isProtected"));
|
||||
|
||||
JSONObject sourceModel = (JSONObject) object.get("source");
|
||||
if (sourceModel != null)
|
||||
{
|
||||
AssociationSource source = new AssociationSource();
|
||||
source.setCls((String) sourceModel.get("cls"));
|
||||
source.setRole((String) sourceModel.get("role"));
|
||||
source.setIsMandatory((Boolean) sourceModel.get("isMandatory"));
|
||||
source.setIsMany((Boolean) sourceModel.get("isMany"));
|
||||
source.setIsMandatoryEnforced((Boolean) sourceModel.get("isMandatoryEnforced"));
|
||||
association.setSource(source);
|
||||
}
|
||||
|
||||
JSONObject targetModel = (JSONObject) object.get("target");
|
||||
{
|
||||
AssociationSource target = new AssociationSource();
|
||||
target.setCls((String) targetModel.get("cls"));
|
||||
target.setRole((String) targetModel.get("role"));
|
||||
target.setIsMandatory((Boolean) targetModel.get("isMandatory"));
|
||||
target.setIsMany((Boolean) targetModel.get("isMany"));
|
||||
target.setIsMandatoryEnforced((Boolean) targetModel.get("isMandatoryEnforced"));
|
||||
association.setTarget(target);
|
||||
}
|
||||
associations.add(association);
|
||||
}
|
||||
}
|
||||
|
||||
JSONObject jsonModel = (JSONObject) jsonObject.get("model");
|
||||
Model model = new Model();
|
||||
model.setId((String) jsonModel.get("id"));
|
||||
model.setDescription((String) jsonModel.get("description"));
|
||||
model.setNamespacePrefix((String) jsonModel.get("namespacePrefix"));
|
||||
model.setNamespaceUri((String) jsonModel.get("namespaceUri"));
|
||||
model.setAuthor((String) jsonModel.get("author"));
|
||||
|
||||
Type type = new Type();
|
||||
type.setId(id);
|
||||
type.setTitle(title);
|
||||
type.setDescription(description);
|
||||
type.setParentId(parentId);
|
||||
type.setProperties(properties);
|
||||
type.setMandatoryAspects(mandatoryAspects);
|
||||
type.setIsContainer(isContainer);
|
||||
type.setIsArchive(isArchive);
|
||||
type.setIncludedInSupertypeQuery(includedInSupertypeQuery);
|
||||
type.setAssociations(associations);
|
||||
type.setModel(model);
|
||||
|
||||
return type;
|
||||
return action;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@@ -23,46 +23,45 @@
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.rest.framework.tests.core;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
import org.alfresco.repo.forms.FormNotFoundException;
|
||||
package org.alfresco.rest.framework.tests.core;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
import org.alfresco.repo.forms.FormNotFoundException;
|
||||
import org.alfresco.repo.node.integrity.IntegrityException;
|
||||
import org.alfresco.repo.search.QueryParserException;
|
||||
import org.alfresco.rest.framework.core.exceptions.ApiException;
|
||||
import org.alfresco.rest.framework.core.exceptions.ConstraintViolatedException;
|
||||
import org.alfresco.rest.framework.core.exceptions.DeletedResourceException;
|
||||
import org.alfresco.rest.framework.core.exceptions.EntityNotFoundException;
|
||||
import org.alfresco.rest.framework.core.exceptions.ErrorResponse;
|
||||
import org.alfresco.rest.framework.core.exceptions.ApiException;
|
||||
import org.alfresco.rest.framework.core.exceptions.ConstraintViolatedException;
|
||||
import org.alfresco.rest.framework.core.exceptions.DeletedResourceException;
|
||||
import org.alfresco.rest.framework.core.exceptions.EntityNotFoundException;
|
||||
import org.alfresco.rest.framework.core.exceptions.ErrorResponse;
|
||||
import org.alfresco.rest.framework.core.exceptions.InsufficientStorageException;
|
||||
import org.alfresco.rest.framework.core.exceptions.InvalidArgumentException;
|
||||
import org.alfresco.rest.framework.core.exceptions.NotFoundException;
|
||||
import org.alfresco.rest.framework.core.exceptions.PermissionDeniedException;
|
||||
import org.alfresco.rest.framework.core.exceptions.RelationshipResourceNotFoundException;
|
||||
import org.alfresco.rest.framework.core.exceptions.StaleEntityException;
|
||||
import org.alfresco.rest.framework.core.exceptions.UnsupportedResourceOperationException;
|
||||
import org.alfresco.rest.framework.resource.parameters.where.InvalidQueryException;
|
||||
import org.alfresco.rest.framework.core.exceptions.InvalidArgumentException;
|
||||
import org.alfresco.rest.framework.core.exceptions.NotFoundException;
|
||||
import org.alfresco.rest.framework.core.exceptions.PermissionDeniedException;
|
||||
import org.alfresco.rest.framework.core.exceptions.RelationshipResourceNotFoundException;
|
||||
import org.alfresco.rest.framework.core.exceptions.StaleEntityException;
|
||||
import org.alfresco.rest.framework.core.exceptions.UnsupportedResourceOperationException;
|
||||
import org.alfresco.rest.framework.resource.parameters.where.InvalidQueryException;
|
||||
import org.alfresco.rest.framework.tools.ApiAssistant;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.extensions.webscripts.WebScriptException;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.extensions.webscripts.WebScriptException;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration(locations = { "classpath:test-rest-context.xml" })
|
||||
public class ExceptionResolverTests
|
||||
{
|
||||
@Autowired
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration(locations = { "classpath:test-rest-context.xml" })
|
||||
public class ExceptionResolverTests
|
||||
{
|
||||
@Autowired
|
||||
ApiAssistant assistant;
|
||||
|
||||
@Test
|
||||
@Test
|
||||
public void testWebscriptException()
|
||||
{
|
||||
ErrorResponse response = assistant.resolveException(new WebScriptException(null));
|
||||
@@ -76,43 +75,43 @@ public class ExceptionResolverTests
|
||||
|
||||
//04180006 Authentication failed for Web Script org/alfresco/api/ResourceWebScript.get
|
||||
@Test
|
||||
public void testMatchException()
|
||||
{
|
||||
public void testMatchException()
|
||||
{
|
||||
ErrorResponse response = assistant.resolveException(new ApiException(null));
|
||||
assertNotNull(response);
|
||||
assertEquals(500, response.getStatusCode()); //default to INTERNAL_SERVER_ERROR
|
||||
|
||||
assertNotNull(response);
|
||||
assertEquals(500, response.getStatusCode()); //default to INTERNAL_SERVER_ERROR
|
||||
|
||||
response = assistant.resolveException(new InvalidArgumentException(null));
|
||||
assertEquals(400, response.getStatusCode()); //default to STATUS_BAD_REQUEST
|
||||
|
||||
assertEquals(400, response.getStatusCode()); //default to STATUS_BAD_REQUEST
|
||||
|
||||
response = assistant.resolveException(new InvalidQueryException(null));
|
||||
assertEquals(400, response.getStatusCode()); //default to STATUS_BAD_REQUEST
|
||||
|
||||
assertEquals(400, response.getStatusCode()); //default to STATUS_BAD_REQUEST
|
||||
|
||||
response = assistant.resolveException(new NotFoundException(null));
|
||||
assertEquals(404, response.getStatusCode()); //default to STATUS_NOT_FOUND
|
||||
|
||||
assertEquals(404, response.getStatusCode()); //default to STATUS_NOT_FOUND
|
||||
|
||||
response = assistant.resolveException(new EntityNotFoundException(null));
|
||||
assertEquals(404, response.getStatusCode()); //default to STATUS_NOT_FOUND
|
||||
|
||||
assertEquals(404, response.getStatusCode()); //default to STATUS_NOT_FOUND
|
||||
|
||||
response = assistant.resolveException(new RelationshipResourceNotFoundException(null, null));
|
||||
assertEquals(404, response.getStatusCode()); //default to STATUS_NOT_FOUND
|
||||
|
||||
assertEquals(404, response.getStatusCode()); //default to STATUS_NOT_FOUND
|
||||
|
||||
response = assistant.resolveException(new PermissionDeniedException(null));
|
||||
assertEquals(403, response.getStatusCode()); //default to STATUS_FORBIDDEN
|
||||
|
||||
assertEquals(403, response.getStatusCode()); //default to STATUS_FORBIDDEN
|
||||
|
||||
response = assistant.resolveException(new UnsupportedResourceOperationException(null));
|
||||
assertEquals(405, response.getStatusCode()); //default to STATUS_METHOD_NOT_ALLOWED
|
||||
|
||||
assertEquals(405, response.getStatusCode()); //default to STATUS_METHOD_NOT_ALLOWED
|
||||
|
||||
response = assistant.resolveException(new DeletedResourceException(null));
|
||||
assertEquals(405, response.getStatusCode()); //default to STATUS_METHOD_NOT_ALLOWED
|
||||
|
||||
assertEquals(405, response.getStatusCode()); //default to STATUS_METHOD_NOT_ALLOWED
|
||||
|
||||
response = assistant.resolveException(new ConstraintViolatedException(null));
|
||||
assertEquals(409, response.getStatusCode()); //default to STATUS_CONFLICT
|
||||
|
||||
assertEquals(409, response.getStatusCode()); //default to STATUS_CONFLICT
|
||||
|
||||
response = assistant.resolveException(new StaleEntityException(null));
|
||||
assertEquals(409, response.getStatusCode()); //default to STATUS_CONFLICT
|
||||
assertEquals(409, response.getStatusCode()); //default to STATUS_CONFLICT
|
||||
|
||||
//Try a random exception
|
||||
//Try a random exception
|
||||
response = assistant.resolveException(new FormNotFoundException(null));
|
||||
assertEquals(500, response.getStatusCode()); //default to INTERNAL_SERVER_ERROR
|
||||
|
||||
@@ -121,15 +120,6 @@ public class ExceptionResolverTests
|
||||
|
||||
response = assistant.resolveException(new IntegrityException(null));
|
||||
assertEquals(422, response.getStatusCode());
|
||||
|
||||
}
|
||||
|
||||
/** Check that the status code from SS is passed back to the caller. */
|
||||
@Test
|
||||
public void testQueryParserException()
|
||||
{
|
||||
ErrorResponse response = assistant.resolveException(new QueryParserException("Endpoint not found", 404));
|
||||
assertNotNull(response);
|
||||
assertEquals("Expected status code to be passed through from query parser.", 404, response.getStatusCode());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
@@ -97,8 +97,6 @@
|
||||
<mandatory-aspects/>
|
||||
</aspect>
|
||||
<aspect name="mycompany:testAspect">
|
||||
<title>Test Aspect</title>
|
||||
<archive>true</archive>
|
||||
<properties>
|
||||
<property name="mycompany:testProperty">
|
||||
<title>Test Property</title>
|
||||
|
@@ -1,358 +0,0 @@
|
||||
<model name="api:apiModel" xmlns="http://www.alfresco.org/model/dictionary/1.0">
|
||||
<author>Administrator</author>
|
||||
|
||||
<imports>
|
||||
<import uri="http://www.alfresco.org/model/dictionary/1.0" prefix="d"/>
|
||||
</imports>
|
||||
|
||||
<namespaces>
|
||||
<namespace uri="http://www.api.t1/model/1.0" prefix="api"/>
|
||||
<namespace uri="http://www.api.t2/model/1.0" prefix="test2"/>
|
||||
</namespaces>
|
||||
|
||||
<constraints>
|
||||
<constraint name="api:regex1" type="REGEX">
|
||||
<title>Regex1 title</title>
|
||||
<description>Regex1 description</description>
|
||||
<parameter name="expression"><value>[A-Z]*</value></parameter>
|
||||
<parameter name="requiresMatch"><value>false</value></parameter>
|
||||
</constraint>
|
||||
<constraint name="api:regex2" type="REGEX">
|
||||
<parameter name="expression"><value>[a-z]*</value></parameter>
|
||||
<parameter name="requiresMatch"><value>false</value></parameter>
|
||||
</constraint>
|
||||
<constraint name="api:stringLength1" type="LENGTH">
|
||||
<parameter name="minLength"><value>0</value></parameter>
|
||||
<parameter name="maxLength"><value>256</value></parameter>
|
||||
</constraint>
|
||||
<constraint name="api:stringLength2" type="LENGTH">
|
||||
<parameter name="minLength"><value>0</value></parameter>
|
||||
<parameter name="maxLength"><value>128</value></parameter>
|
||||
</constraint>
|
||||
<constraint name="api:minMax1" type="MINMAX">
|
||||
<parameter name="minValue"><value>0</value></parameter>
|
||||
<parameter name="maxValue"><value>256</value></parameter>
|
||||
</constraint>
|
||||
<constraint name="api:list1" type="LIST">
|
||||
<title>List1 title</title>
|
||||
<description>List1 description</description>
|
||||
<parameter name="allowedValues">
|
||||
<list>
|
||||
<value>ABC</value>
|
||||
<value>DEF</value>
|
||||
<value>VALUE WITH SPACES</value>
|
||||
<value>VALUE WITH TRAILING SPACE </value>
|
||||
</list>
|
||||
</parameter>
|
||||
<parameter name="caseSensitive"><value>true</value></parameter>
|
||||
</constraint>
|
||||
<constraint name="api:list2" type="LIST">
|
||||
<parameter name="allowedValues">
|
||||
<list>
|
||||
<value>HIJ</value>
|
||||
</list>
|
||||
</parameter>
|
||||
<parameter name="caseSensitive"><value>true</value></parameter>
|
||||
</constraint>
|
||||
<constraint name="test2:list3" type="LIST">
|
||||
<parameter name="allowedValues">
|
||||
<list>
|
||||
<value>XYZ</value>
|
||||
</list>
|
||||
</parameter>
|
||||
<parameter name="caseSensitive"><value>true</value></parameter>
|
||||
</constraint>
|
||||
</constraints>
|
||||
|
||||
<types>
|
||||
<type name="api:base">
|
||||
<title>Base</title>
|
||||
<description>The Base Type</description>
|
||||
<properties>
|
||||
<property name="api:prop1">
|
||||
<type>d:text</type>
|
||||
<protected>true</protected>
|
||||
<default/>
|
||||
<constraints>
|
||||
<constraint ref="api:regex1"/>
|
||||
<constraint ref="api:stringLength1">
|
||||
<title>Prop1 Strlen1 title</title>
|
||||
<description>Prop1 Strlen1 description</description>
|
||||
</constraint>
|
||||
</constraints>
|
||||
</property>
|
||||
</properties>
|
||||
<associations>
|
||||
<association name="api:assoc1">
|
||||
<source>
|
||||
<mandatory>true</mandatory>
|
||||
<many>false</many>
|
||||
</source>
|
||||
<target>
|
||||
<class>api:base</class>
|
||||
<mandatory>false</mandatory>
|
||||
<many>true</many>
|
||||
</target>
|
||||
</association>
|
||||
<association name="api:assoc2">
|
||||
<source>
|
||||
<mandatory>true</mandatory>
|
||||
<many>true</many>
|
||||
</source>
|
||||
<target>
|
||||
<class>api:referenceable</class>
|
||||
<mandatory>false</mandatory>
|
||||
<many>false</many>
|
||||
</target>
|
||||
</association>
|
||||
<child-association name="api:childassoc1">
|
||||
<source>
|
||||
<mandatory>true</mandatory>
|
||||
<many>true</many>
|
||||
</source>
|
||||
<target>
|
||||
<class>api:referenceable</class>
|
||||
<mandatory>false</mandatory>
|
||||
<many>false</many>
|
||||
</target>
|
||||
<child-name>fred</child-name>
|
||||
<duplicate>true</duplicate>
|
||||
</child-association>
|
||||
<child-association name="api:childassocPropagate">
|
||||
<source>
|
||||
<mandatory>true</mandatory>
|
||||
<many>true</many>
|
||||
</source>
|
||||
<target>
|
||||
<class>api:referenceable</class>
|
||||
<mandatory>false</mandatory>
|
||||
<many>false</many>
|
||||
</target>
|
||||
<child-name>fred</child-name>
|
||||
<duplicate>true</duplicate>
|
||||
<propagateTimestamps>true</propagateTimestamps>
|
||||
</child-association>
|
||||
</associations>
|
||||
|
||||
<mandatory-aspects>
|
||||
<aspect>api:referenceable</aspect>
|
||||
</mandatory-aspects>
|
||||
</type>
|
||||
|
||||
<type name="api:file">
|
||||
<parent>api:base</parent>
|
||||
<archive>true</archive>
|
||||
|
||||
<properties>
|
||||
<property name="api:fileprop">
|
||||
<type>d:text</type>
|
||||
<protected>true</protected>
|
||||
<default></default>
|
||||
</property>
|
||||
|
||||
</properties>
|
||||
|
||||
<associations>
|
||||
<child-association name="api:childassoc2">
|
||||
<target>
|
||||
<class>api:referenceable</class>
|
||||
</target>
|
||||
<child-name>fred</child-name>
|
||||
<duplicate>true</duplicate>
|
||||
</child-association>
|
||||
</associations>
|
||||
|
||||
<overrides>
|
||||
<property name="api:prop1">
|
||||
<default>an overriden default value</default>
|
||||
<constraints>
|
||||
<constraint ref="api:stringLength2"/>
|
||||
<constraint ref="api:regex2"/>
|
||||
</constraints>
|
||||
</property>
|
||||
</overrides>
|
||||
</type>
|
||||
|
||||
<type name="api:file-derived">
|
||||
<parent>api:file</parent>
|
||||
</type>
|
||||
|
||||
<type name="api:file-derived-no-archive">
|
||||
<parent>api:file</parent>
|
||||
<archive>false</archive>
|
||||
</type>
|
||||
|
||||
<type name="api:folder">
|
||||
<parent>api:base</parent>
|
||||
<properties>
|
||||
<property name="api:folderprop">
|
||||
<type>d:text</type>
|
||||
<protected>true</protected>
|
||||
<default></default>
|
||||
</property>
|
||||
</properties>
|
||||
</type>
|
||||
|
||||
<type name="api:enforced">
|
||||
<parent>api:base</parent>
|
||||
<properties>
|
||||
<property name="api:mandatory-enforced">
|
||||
<type>d:text</type>
|
||||
<mandatory enforced="true">true</mandatory>
|
||||
</property>
|
||||
<property name="api:mandatory-not-enforced">
|
||||
<type>d:text</type>
|
||||
<mandatory enforced="false">true</mandatory>
|
||||
</property>
|
||||
<property name="api:mandatory-default-enforced">
|
||||
<type>d:text</type>
|
||||
<mandatory>true</mandatory>
|
||||
</property>
|
||||
</properties>
|
||||
</type>
|
||||
|
||||
<type name="api:overridetype1">
|
||||
<properties>
|
||||
<property name="api:propoverride">
|
||||
<type>d:text</type>
|
||||
<default>one</default>
|
||||
</property>
|
||||
</properties>
|
||||
</type>
|
||||
|
||||
<type name="api:overridetype2">
|
||||
<parent>api:overridetype1</parent>
|
||||
<overrides>
|
||||
<property name="api:propoverride">
|
||||
<default>two</default>
|
||||
</property>
|
||||
</overrides>
|
||||
</type>
|
||||
|
||||
<type name="api:overridetype3">
|
||||
<parent>api:overridetype2</parent>
|
||||
<overrides>
|
||||
<property name="api:propoverride">
|
||||
<default>three</default>
|
||||
</property>
|
||||
</overrides>
|
||||
</type>
|
||||
|
||||
<type name="api:typeWithNamedPropConstraint">
|
||||
<title>Type with named property-defined constraint.</title>
|
||||
<description>A type with a named constraint defined within one of its properties.</description>
|
||||
<parent></parent>
|
||||
|
||||
<properties>
|
||||
<property name="api:constrainedProp">
|
||||
<type>d:text</type>
|
||||
<protected>true</protected>
|
||||
<default></default>
|
||||
<constraints>
|
||||
<constraint name="api:inlineConstraint" type="LIST">
|
||||
<title>Inline constraint</title>
|
||||
<description>An inline constraint</description>
|
||||
<parameter name="allowedValues">
|
||||
<list>
|
||||
<value>ALPHA</value>
|
||||
<value>BETA</value>
|
||||
<value>GAMMA, DELTA</value>
|
||||
<value>OMEGA</value>
|
||||
</list>
|
||||
</parameter>
|
||||
<parameter name="caseSensitive"><value>true</value></parameter>
|
||||
</constraint>
|
||||
</constraints>
|
||||
</property>
|
||||
</properties>
|
||||
</type>
|
||||
</types>
|
||||
|
||||
<aspects>
|
||||
<aspect name="api:referenceable">
|
||||
<title>Referenceable</title>
|
||||
<description>The referenceable aspect</description>
|
||||
<parent></parent>
|
||||
|
||||
<properties>
|
||||
<property name="api:id">
|
||||
<type>d:int</type>
|
||||
<protected>true</protected>
|
||||
<mandatory>true</mandatory>
|
||||
<constraints>
|
||||
<constraint ref="api:minMax1"/>
|
||||
</constraints>
|
||||
</property>
|
||||
</properties>
|
||||
</aspect>
|
||||
<aspect name="api:aspect-base">
|
||||
<title>Aspect Base</title>
|
||||
<parent></parent>
|
||||
<properties>
|
||||
<property name="api:aspect-base-p1">
|
||||
<type>d:text</type>
|
||||
<constraints>
|
||||
<constraint ref="api:list1"/>
|
||||
</constraints>
|
||||
</property>
|
||||
</properties>
|
||||
</aspect>
|
||||
<aspect name="api:aspect-one">
|
||||
<title>Aspect One</title>
|
||||
<parent>api:aspect-base</parent>
|
||||
<overrides>
|
||||
<property name="api:aspect-base-p1">
|
||||
<constraints>
|
||||
<constraint ref="api:list2"/>
|
||||
</constraints>
|
||||
</property>
|
||||
</overrides>
|
||||
</aspect>
|
||||
<aspect name="api:aspect-two">
|
||||
<title>Aspect Two</title>
|
||||
<parent>api:aspect-base</parent>
|
||||
<overrides>
|
||||
<property name="api:aspect-base-p1">
|
||||
<constraints>
|
||||
<constraint ref="api:list1"/>
|
||||
<constraint ref="api:list2"/>
|
||||
</constraints>
|
||||
</property>
|
||||
</overrides>
|
||||
</aspect>
|
||||
<aspect name="test2:aspect-three">
|
||||
<title>Aspect derived from other namespace</title>
|
||||
<parent>api:aspect-base</parent>
|
||||
<overrides>
|
||||
<property name="api:aspect-base-p1">
|
||||
<constraints>
|
||||
<constraint ref="test2:list3"/>
|
||||
</constraints>
|
||||
</property>
|
||||
</overrides>
|
||||
</aspect>
|
||||
<aspect name="test2:aspect-all">
|
||||
<title>Aspect derived from other namespace</title>
|
||||
<archive>false</archive>
|
||||
<includedInSuperTypeQuery>false</includedInSuperTypeQuery>
|
||||
<associations>
|
||||
<association name="api:assoc-all">
|
||||
<source>
|
||||
<mandatory>true</mandatory>
|
||||
<many>true</many>
|
||||
</source>
|
||||
<target>
|
||||
<class>api:referenceable</class>
|
||||
<mandatory>false</mandatory>
|
||||
<many>false</many>
|
||||
</target>
|
||||
</association>
|
||||
</associations>
|
||||
<mandatory-aspects>
|
||||
<aspect>test2:aspect-three</aspect>
|
||||
<aspect>api:aspect-two</aspect>
|
||||
<aspect>api:aspect-one</aspect>
|
||||
</mandatory-aspects>
|
||||
</aspect>
|
||||
</aspects>
|
||||
</model>
|
@@ -5,7 +5,6 @@
|
||||
<import uri="http://www.alfresco.org/model/content/1.0" prefix="cm"/>
|
||||
<import uri="http://www.alfresco.org/model/dictionary/1.0" prefix="d"/>
|
||||
<import uri="http://www.alfresco.org/model/content/smartfolder/1.0" prefix="smf"/>
|
||||
<import uri="http://www.mycompany.com/model/finance/1.0" prefix="mycompany"/>
|
||||
</imports>
|
||||
<namespaces>
|
||||
<namespace uri="http://www.test.com/model/account/1.0" prefix="test"/>
|
||||
@@ -50,9 +49,6 @@
|
||||
<overrides/>
|
||||
<mandatory-aspects/>
|
||||
</type>
|
||||
<type name="test:publishable">
|
||||
<parent>mycompany:doc</parent>
|
||||
</type>
|
||||
</types>
|
||||
<aspects>
|
||||
<aspect name="test:rescan">
|
||||
@@ -78,7 +74,7 @@
|
||||
<aspect name="test:smartFilter">
|
||||
<title>Smart filter</title>
|
||||
<description>Smart Filter</description>
|
||||
<parent>mycompany:testAspect</parent>
|
||||
<parent>cm:auditable</parent>
|
||||
<properties/>
|
||||
<associations/>
|
||||
<overrides/>
|
||||
|
@@ -18,7 +18,6 @@
|
||||
<value>models/people-api.xml</value>
|
||||
<value>models/mycompany-model.xml</value>
|
||||
<value>models/test-scan.xml</value>
|
||||
<value>models/test-api-model.xml</value>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
@@ -33,12 +33,10 @@
|
||||
</bean>
|
||||
<bean id="webScriptExceptionResolver" class="org.alfresco.rest.framework.core.exceptions.WebScriptExceptionResolver">
|
||||
</bean>
|
||||
<bean id="queryParserExceptionResolver" class="org.alfresco.rest.framework.core.exceptions.QueryParserExceptionResolver" />
|
||||
<bean id="apiAssistant" class="org.alfresco.rest.framework.tools.ApiAssistant">
|
||||
<property name="jsonHelper" ref="jsonHelper" />
|
||||
<property name="resolver" ref="simpleMappingExceptionResolver" />
|
||||
<property name="webScriptExceptionResolver" ref="webScriptExceptionResolver" />
|
||||
<property name="queryParserExceptionResolver" ref="queryParserExceptionResolver" />
|
||||
</bean>
|
||||
<bean id="simpleMappingExceptionResolver" class="org.alfresco.rest.framework.core.exceptions.SimpleMappingExceptionResolver">
|
||||
<property name="exceptionMappings">
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>10.13</version>
|
||||
<version>repo-5439v2-c2</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
@@ -245,7 +245,7 @@
|
||||
<dependency>
|
||||
<groupId>org.freemarker</groupId>
|
||||
<artifactId>freemarker</artifactId>
|
||||
<version>2.3.20-alfresco-patched-20220413</version>
|
||||
<version>2.3.20-alfresco-patched-20200421</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.xmlbeans</groupId>
|
||||
@@ -383,7 +383,7 @@
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.woodstox</groupId>
|
||||
<artifactId>woodstox-core</artifactId>
|
||||
<version>6.2.6</version>
|
||||
<version>6.2.4</version>
|
||||
</dependency>
|
||||
|
||||
<!-- GData -->
|
||||
@@ -687,10 +687,6 @@
|
||||
<groupId>org.apache.camel</groupId>
|
||||
<artifactId>camel-direct</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.camel</groupId>
|
||||
<artifactId>camel-management</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.camel</groupId>
|
||||
<artifactId>camel-mock</artifactId>
|
||||
|
@@ -40,7 +40,6 @@ public class IdsEntity
|
||||
private Long idThree;
|
||||
private Long idFour;
|
||||
private List<Long> ids;
|
||||
private boolean ordered;
|
||||
public Long getIdOne()
|
||||
{
|
||||
return idOne;
|
||||
@@ -81,12 +80,4 @@ public class IdsEntity
|
||||
{
|
||||
this.ids = ids;
|
||||
}
|
||||
public boolean isOrdered()
|
||||
{
|
||||
return ordered;
|
||||
}
|
||||
public void setOrdered(boolean ordered)
|
||||
{
|
||||
this.ordered = ordered;
|
||||
}
|
||||
}
|
||||
|
@@ -32,8 +32,6 @@ import org.alfresco.sync.repo.Client;
|
||||
import org.alfresco.sync.repo.Client.ClientType;
|
||||
import org.alfresco.repo.activities.ActivityType;
|
||||
import org.alfresco.repo.model.filefolder.HiddenAspect;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.tenant.TenantService;
|
||||
import org.alfresco.service.cmr.activities.ActivityInfo;
|
||||
import org.alfresco.service.cmr.activities.ActivityPoster;
|
||||
@@ -230,7 +228,7 @@ public class ActivityPosterImpl implements CmisActivityPoster, InitializingBean
|
||||
{
|
||||
if(activitiesEnabled && !hiddenAspect.hasHiddenAspect(nodeRef))
|
||||
{
|
||||
SiteInfo siteInfo = getSiteAsSystem(nodeRef);
|
||||
SiteInfo siteInfo = siteService.getSite(nodeRef);
|
||||
String siteId = (siteInfo != null ? siteInfo.getShortName() : null);
|
||||
if(siteId != null && !siteId.equals(""))
|
||||
{
|
||||
@@ -292,16 +290,5 @@ public class ActivityPosterImpl implements CmisActivityPoster, InitializingBean
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private SiteInfo getSiteAsSystem(NodeRef nodeRef)
|
||||
{
|
||||
return AuthenticationUtil.runAsSystem(new RunAsWork<SiteInfo>()
|
||||
{
|
||||
@Override
|
||||
public SiteInfo doWork() throws Exception
|
||||
{
|
||||
return siteService.getSite(nodeRef);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
@@ -211,16 +211,6 @@ public class AlfrescoCmisServiceFactory extends AbstractServiceFactory
|
||||
}
|
||||
|
||||
AlfrescoCmisService service = getCmisServiceTarget(connector);
|
||||
if (service instanceof AlfrescoCmisServiceImpl)
|
||||
{
|
||||
Set<String> stringSet = parseCommaSeparatedSet(getCmisCreateDocRequestRenditionsSet());
|
||||
((AlfrescoCmisServiceImpl)service).setCmisRequestRenditionsOnCreateDoc(stringSet);
|
||||
|
||||
if (logger.isTraceEnabled())
|
||||
{
|
||||
logger.trace("getService: cmis.create.doc.request.renditions.set=" + stringSet);
|
||||
}
|
||||
}
|
||||
|
||||
// Wrap it
|
||||
ProxyFactory proxyFactory = new ProxyFactory(service);
|
||||
@@ -245,7 +235,13 @@ public class AlfrescoCmisServiceFactory extends AbstractServiceFactory
|
||||
|
||||
protected AlfrescoCmisService getCmisServiceTarget(CMISConnector connector)
|
||||
{
|
||||
return new AlfrescoCmisServiceImpl(connector);
|
||||
AlfrescoCmisServiceImpl cmisService = new AlfrescoCmisServiceImpl(connector);
|
||||
|
||||
Set<String> stringSet = parseCommaSeparatedSet(getCmisCreateDocRequestRenditionsSet());
|
||||
logger.trace("getCmisServiceTarget: cmis.create.doc.request.renditions.set=" + stringSet);
|
||||
cmisService.setCmisRequestRenditionsOnCreateDoc(stringSet);
|
||||
|
||||
return cmisService;
|
||||
}
|
||||
|
||||
private Set<String> parseCommaSeparatedSet(String str)
|
||||
|
@@ -34,10 +34,7 @@ import org.alfresco.repo.action.ParameterDefinitionImpl;
|
||||
import org.alfresco.repo.admin.SysAdminParams;
|
||||
import org.alfresco.repo.jscript.ScriptAction;
|
||||
import org.alfresco.service.ServiceRegistry;
|
||||
import org.alfresco.service.cmr.action.Action;
|
||||
import org.alfresco.service.cmr.action.ActionDefinition;
|
||||
import org.alfresco.service.cmr.action.ActionService;
|
||||
import org.alfresco.service.cmr.action.ParameterConstraint;
|
||||
import org.alfresco.service.cmr.action.Action;
|
||||
import org.alfresco.service.cmr.action.ParameterDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
@@ -129,10 +126,6 @@ public class ScriptActionExecuter extends ActionExecuterAbstractBase
|
||||
if (nodeService.exists(actionedUponNodeRef))
|
||||
{
|
||||
NodeRef scriptRef = (NodeRef)action.getParameterValue(PARAM_SCRIPTREF);
|
||||
if(!isValidScriptRef(action))
|
||||
{
|
||||
throw new IllegalStateException("Invalid script ref path: " + scriptRef);
|
||||
}
|
||||
NodeRef spaceRef = this.serviceRegistry.getRuleService().getOwningNodeRef(action);
|
||||
if (spaceRef == null)
|
||||
{
|
||||
@@ -229,19 +222,4 @@ public class ScriptActionExecuter extends ActionExecuterAbstractBase
|
||||
|
||||
return companyHomeRef;
|
||||
}
|
||||
|
||||
private boolean isValidScriptRef(Action action)
|
||||
{
|
||||
NodeRef scriptRef = (NodeRef) action.getParameterValue(PARAM_SCRIPTREF);
|
||||
ActionService actionService = this.serviceRegistry.getActionService();
|
||||
ActionDefinition actDef = actionService.getActionDefinition(action.getActionDefinitionName());
|
||||
ParameterDefinition parameterDef = actDef.getParameterDefintion(PARAM_SCRIPTREF);
|
||||
String paramConstraintName = parameterDef.getParameterConstraintName();
|
||||
if (paramConstraintName != null)
|
||||
{
|
||||
ParameterConstraint paramConstraint = actionService.getParameterConstraint(paramConstraintName);
|
||||
return paramConstraint.isValidValue(scriptRef.toString());
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@@ -1483,17 +1483,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
|
||||
|
||||
// Update ACLs for moved tree
|
||||
Long newParentAclId = newParentNode.getAclId();
|
||||
|
||||
// Verify if parent has aspect applied and ACL's are pending
|
||||
if (hasNodeAspect(oldParentNodeId, ContentModel.ASPECT_PENDING_FIX_ACL))
|
||||
{
|
||||
Long oldParentSharedAclId = (Long) this.getNodeProperty(oldParentNodeId, ContentModel.PROP_SHARED_ACL_TO_REPLACE);
|
||||
accessControlListDAO.updateInheritance(newChildNodeId, oldParentSharedAclId, newParentAclId);
|
||||
}
|
||||
else
|
||||
{
|
||||
accessControlListDAO.updateInheritance(newChildNodeId, oldParentAclId, newParentAclId);
|
||||
}
|
||||
accessControlListDAO.updateInheritance(newChildNodeId, oldParentAclId, newParentAclId);
|
||||
}
|
||||
|
||||
// Done
|
||||
@@ -2756,22 +2746,6 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
|
||||
selectNodesWithAspects(qnameIds, minNodeId, maxNodeId, resultsCallback);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getNodesWithAspects(
|
||||
Set<QName> aspectQNames,
|
||||
Long minNodeId, Long maxNodeId, boolean ordered,
|
||||
NodeRefQueryCallback resultsCallback)
|
||||
{
|
||||
Set<Long> qnameIdsSet = qnameDAO.convertQNamesToIds(aspectQNames, false);
|
||||
if (qnameIdsSet.size() == 0)
|
||||
{
|
||||
// No point running a query
|
||||
return;
|
||||
}
|
||||
List<Long> qnameIds = new ArrayList<Long>(qnameIdsSet);
|
||||
selectNodesWithAspects(qnameIds, minNodeId, maxNodeId, ordered, resultsCallback);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Returns a writable copy of the cached aspects set
|
||||
*/
|
||||
@@ -4943,10 +4917,6 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
|
||||
List<Long> qnameIds,
|
||||
Long minNodeId, Long maxNodeId,
|
||||
NodeRefQueryCallback resultsCallback);
|
||||
protected abstract void selectNodesWithAspects(
|
||||
List<Long> qnameIds,
|
||||
Long minNodeId, Long maxNodeId, boolean ordered,
|
||||
NodeRefQueryCallback resultsCallback);
|
||||
protected abstract Long insertNodeAssoc(Long sourceNodeId, Long targetNodeId, Long assocTypeQNameId, int assocIndex);
|
||||
protected abstract int updateNodeAssoc(Long id, int assocIndex);
|
||||
protected abstract int deleteNodeAssoc(Long sourceNodeId, Long targetNodeId, Long assocTypeQNameId);
|
||||
|
@@ -405,20 +405,6 @@ public interface NodeDAO extends NodeBulkLoader
|
||||
Long minNodeId, Long maxNodeId,
|
||||
NodeRefQueryCallback resultsCallback);
|
||||
|
||||
/**
|
||||
* Get nodes with aspects between the given ranges, ordering the results optionally
|
||||
*
|
||||
* @param aspectQNames the aspects that must be on the nodes
|
||||
* @param minNodeId the minimum node ID (inclusive)
|
||||
* @param maxNodeId the maximum node ID (exclusive)
|
||||
* @param ordered if the results are to be ordered by nodeID
|
||||
* @param resultsCallback callback to process results
|
||||
*/
|
||||
public void getNodesWithAspects(
|
||||
Set<QName> aspectQNames,
|
||||
Long minNodeId, Long maxNodeId, boolean ordered,
|
||||
NodeRefQueryCallback resultsCallback);
|
||||
|
||||
/*
|
||||
* Node Assocs
|
||||
*/
|
||||
|
@@ -427,6 +427,10 @@ public class NodeDAOImpl extends AbstractNodeDAOImpl
|
||||
NodeEntity node = new NodeEntity();
|
||||
node.setId(id);
|
||||
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("+ Read node with id: "+id);
|
||||
}
|
||||
return template.selectOne(SELECT_NODE_BY_ID, node);
|
||||
}
|
||||
|
||||
@@ -450,6 +454,10 @@ public class NodeDAOImpl extends AbstractNodeDAOImpl
|
||||
}
|
||||
node.setUuid(uuid);
|
||||
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("+ Read node with uuid: "+uuid);
|
||||
}
|
||||
return template.selectOne(SELECT_NODE_BY_NODEREF, node);
|
||||
}
|
||||
|
||||
@@ -764,31 +772,6 @@ public class NodeDAOImpl extends AbstractNodeDAOImpl
|
||||
template.select(SELECT_NODES_WITH_ASPECT_IDS, parameters, resultHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void selectNodesWithAspects(
|
||||
List<Long> qnameIds,
|
||||
Long minNodeId, Long maxNodeId, boolean ordered,
|
||||
final NodeRefQueryCallback resultsCallback)
|
||||
{
|
||||
@SuppressWarnings("rawtypes")
|
||||
ResultHandler resultHandler = new ResultHandler()
|
||||
{
|
||||
public void handleResult(ResultContext context)
|
||||
{
|
||||
NodeEntity entity = (NodeEntity) context.getResultObject();
|
||||
Pair<Long, NodeRef> nodePair = new Pair<Long, NodeRef>(entity.getId(), entity.getNodeRef());
|
||||
resultsCallback.handle(nodePair);
|
||||
}
|
||||
};
|
||||
|
||||
IdsEntity parameters = new IdsEntity();
|
||||
parameters.setIdOne(minNodeId);
|
||||
parameters.setIdTwo(maxNodeId);
|
||||
parameters.setIds(qnameIds);
|
||||
parameters.setOrdered(ordered);
|
||||
template.select(SELECT_NODES_WITH_ASPECT_IDS, parameters, resultHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Long insertNodeAssoc(Long sourceNodeId, Long targetNodeId, Long assocTypeQNameId, int assocIndex)
|
||||
{
|
||||
|
@@ -337,13 +337,6 @@ public class ADMAccessControlListDAO implements AccessControlListDAO
|
||||
setFixedAcls(getNodeIdNotNull(parent), inheritFrom, null, sharedAclToReplace, changes, false, asyncCall, true);
|
||||
return changes;
|
||||
}
|
||||
|
||||
public List<AclChange> setInheritanceForChildren(NodeRef parent, Long inheritFrom, Long sharedAclToReplace, boolean asyncCall, boolean forceSharedACL)
|
||||
{
|
||||
List<AclChange> changes = new ArrayList<AclChange>();
|
||||
setFixedAcls(getNodeIdNotNull(parent), inheritFrom, null, sharedAclToReplace, changes, false, asyncCall, true, forceSharedACL);
|
||||
return changes;
|
||||
}
|
||||
|
||||
public void updateChangedAcls(NodeRef startingPoint, List<AclChange> changes)
|
||||
{
|
||||
@@ -369,29 +362,6 @@ public class ADMAccessControlListDAO implements AccessControlListDAO
|
||||
setFixedAcls(nodeId, inheritFrom, mergeFrom, sharedAclToReplace, changes, set, false, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Support to set a shared ACL on a node and all of its children
|
||||
*
|
||||
* @param nodeId
|
||||
* the parent node
|
||||
* @param inheritFrom
|
||||
* the parent node's ACL
|
||||
* @param mergeFrom
|
||||
* the shared ACL, if already known. If <code>null</code>, will be retrieved / created lazily
|
||||
* @param changes
|
||||
* the list in which to record changes
|
||||
* @param set
|
||||
* set the shared ACL on the parent ?
|
||||
* @param asyncCall
|
||||
* function may require asynchronous call depending the execution time; if time exceeds configured <code>fixedAclMaxTransactionTime</code> value,
|
||||
* recursion is stopped using propagateOnChildren parameter(set on false) and those nodes for which the method execution was not finished
|
||||
* in the classical way, will have ASPECT_PENDING_FIX_ACL, which will be used in {@link FixedAclUpdater} for later processing
|
||||
*/
|
||||
public void setFixedAcls(Long nodeId, Long inheritFrom, Long mergeFrom, Long sharedAclToReplace, List<AclChange> changes, boolean set, boolean asyncCall, boolean propagateOnChildren)
|
||||
{
|
||||
setFixedAcls(nodeId, inheritFrom, mergeFrom, sharedAclToReplace, changes, set, false, true, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Support to set a shared ACL on a node and all of its children
|
||||
*
|
||||
@@ -409,10 +379,8 @@ public class ADMAccessControlListDAO implements AccessControlListDAO
|
||||
* function may require asynchronous call depending the execution time; if time exceeds configured <code>fixedAclMaxTransactionTime</code> value,
|
||||
* recursion is stopped using propagateOnChildren parameter(set on false) and those nodes for which the method execution was not finished
|
||||
* in the classical way, will have ASPECT_PENDING_FIX_ACL, which will be used in {@link FixedAclUpdater} for later processing
|
||||
* @param forceSharedACL
|
||||
* When a child node has an unexpected ACL, force it to assume the new shared ACL instead of throwing a concurrency exception.
|
||||
*/
|
||||
public void setFixedAcls(Long nodeId, Long inheritFrom, Long mergeFrom, Long sharedAclToReplace, List<AclChange> changes, boolean set, boolean asyncCall, boolean propagateOnChildren, boolean forceSharedACL)
|
||||
public void setFixedAcls(Long nodeId, Long inheritFrom, Long mergeFrom, Long sharedAclToReplace, List<AclChange> changes, boolean set, boolean asyncCall, boolean propagateOnChildren)
|
||||
{
|
||||
if (log.isDebugEnabled())
|
||||
{
|
||||
@@ -463,14 +431,14 @@ public class ADMAccessControlListDAO implements AccessControlListDAO
|
||||
|
||||
if (acl == null)
|
||||
{
|
||||
propagateOnChildren = setFixAclPending(child.getId(), inheritFrom, mergeFrom, sharedAclToReplace, changes, false, asyncCall, propagateOnChildren, forceSharedACL);
|
||||
propagateOnChildren = setFixAclPending(child.getId(), inheritFrom, mergeFrom, sharedAclToReplace, changes, false, asyncCall, propagateOnChildren);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Still has old shared ACL or already replaced
|
||||
if(acl.equals(sharedAclToReplace) || acl.equals(mergeFrom) || acl.equals(currentAcl))
|
||||
{
|
||||
propagateOnChildren = setFixAclPending(child.getId(), inheritFrom, mergeFrom, sharedAclToReplace, changes, false, asyncCall, propagateOnChildren, forceSharedACL);
|
||||
propagateOnChildren = setFixAclPending(child.getId(), inheritFrom, mergeFrom, sharedAclToReplace, changes, false, asyncCall, propagateOnChildren);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -489,20 +457,7 @@ public class ADMAccessControlListDAO implements AccessControlListDAO
|
||||
}
|
||||
else if (dbAcl.getAclType() == ACLType.SHARED)
|
||||
{
|
||||
if (forceSharedACL)
|
||||
{
|
||||
log.warn("Forcing shared ACL on node: " + child.getId() + " ( "
|
||||
+ nodeDAO.getNodePair(child.getId()).getSecond() + ") - " + dbAcl);
|
||||
sharedAclToReplace = acl;
|
||||
propagateOnChildren = setFixAclPending(child.getId(), inheritFrom, mergeFrom, sharedAclToReplace,
|
||||
changes, false, asyncCall, propagateOnChildren, forceSharedACL);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ConcurrencyFailureException(
|
||||
"setFixedAcls: unexpected shared acl: " + dbAcl + " on node " + child.getId() + " ( "
|
||||
+ nodeDAO.getNodePair(child.getId()).getSecond() + ")");
|
||||
}
|
||||
throw new ConcurrencyFailureException("setFixedAcls: unexpected shared acl: "+dbAcl);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -551,7 +506,7 @@ public class ADMAccessControlListDAO implements AccessControlListDAO
|
||||
*
|
||||
*/
|
||||
private boolean setFixAclPending(Long nodeId, Long inheritFrom, Long mergeFrom, Long sharedAclToReplace,
|
||||
List<AclChange> changes, boolean set, boolean asyncCall, boolean propagateOnChildren, boolean forceSharedACL)
|
||||
List<AclChange> changes, boolean set, boolean asyncCall, boolean propagateOnChildren)
|
||||
{
|
||||
// check transaction time
|
||||
long transactionStartTime = AlfrescoTransactionSupport.getTransactionStartTime();
|
||||
@@ -559,7 +514,7 @@ public class ADMAccessControlListDAO implements AccessControlListDAO
|
||||
if (transactionTime < fixedAclMaxTransactionTime)
|
||||
{
|
||||
// make regular method call if time is under max transaction configured time
|
||||
setFixedAcls(nodeId, inheritFrom, mergeFrom, sharedAclToReplace, changes, set, asyncCall, propagateOnChildren, forceSharedACL);
|
||||
setFixedAcls(nodeId, inheritFrom, mergeFrom, sharedAclToReplace, changes, set, asyncCall, propagateOnChildren);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@@ -91,11 +91,6 @@ public interface AccessControlListDAO
|
||||
*/
|
||||
public List<AclChange> setInheritanceForChildren(NodeRef parent, Long inheritFrom, Long sharedAclToReplace, boolean asyncCall);
|
||||
|
||||
/**
|
||||
* Set the inheritance on a given node and it's children. If an unexpected ACL occurs in a child, it can be overriden by setting forceSharedACL
|
||||
*/
|
||||
public List<AclChange> setInheritanceForChildren(NodeRef parent, Long inheritFrom, Long sharedAclToReplace, boolean asyncCall, boolean forceSharedACL);
|
||||
|
||||
public Long getIndirectAcl(NodeRef nodeRef);
|
||||
|
||||
public Long getInheritedAcl(NodeRef nodeRef);
|
||||
|
@@ -38,7 +38,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.batch.BatchProcessWorkProvider;
|
||||
import org.alfresco.repo.batch.BatchProcessor;
|
||||
import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorker;
|
||||
import org.alfresco.repo.domain.node.NodeDAO;
|
||||
import org.alfresco.repo.domain.node.NodeDAO.NodeRefQueryCallback;
|
||||
import org.alfresco.repo.lock.JobLockService;
|
||||
@@ -51,7 +50,6 @@ import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.security.permissions.PermissionServicePolicies;
|
||||
import org.alfresco.repo.security.permissions.PermissionServicePolicies.OnInheritPermissionsDisabled;
|
||||
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.repo.transaction.TransactionListenerAdapter;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
@@ -66,8 +64,6 @@ import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.dao.ConcurrencyFailureException;
|
||||
|
||||
/**
|
||||
* Finds nodes with ASPECT_PENDING_FIX_ACL aspect and sets fixed ACLs for them
|
||||
@@ -95,7 +91,6 @@ public class FixedAclUpdater extends TransactionListenerAdapter implements Appli
|
||||
|
||||
private int maxItemBatchSize = 100;
|
||||
private int numThreads = 4;
|
||||
private boolean forceSharedACL = false;
|
||||
|
||||
private ClassPolicyDelegate<OnInheritPermissionsDisabled> onInheritPermissionsDisabledDelegate;
|
||||
private PolicyComponent policyComponent;
|
||||
@@ -137,11 +132,6 @@ public class FixedAclUpdater extends TransactionListenerAdapter implements Appli
|
||||
this.maxItemBatchSize = maxItemBatchSize;
|
||||
}
|
||||
|
||||
public void setForceSharedACL(boolean forceSharedACL)
|
||||
{
|
||||
this.forceSharedACL = forceSharedACL;
|
||||
}
|
||||
|
||||
public void setLockTimeToLive(long lockTimeToLive)
|
||||
{
|
||||
this.lockTimeToLive = lockTimeToLive;
|
||||
@@ -192,7 +182,7 @@ public class FixedAclUpdater extends TransactionListenerAdapter implements Appli
|
||||
public List<NodeRef> execute() throws Throwable
|
||||
{
|
||||
getNodesCallback.init();
|
||||
nodeDAO.getNodesWithAspects(aspects, getNodesCallback.getMinNodeId(), null, true, getNodesCallback);
|
||||
nodeDAO.getNodesWithAspects(aspects, getNodesCallback.getMinNodeId(), null, getNodesCallback);
|
||||
getNodesCallback.done();
|
||||
|
||||
return getNodesCallback.getNodes();
|
||||
@@ -263,7 +253,7 @@ public class FixedAclUpdater extends TransactionListenerAdapter implements Appli
|
||||
{
|
||||
}
|
||||
|
||||
public void process(final NodeRef nodeRef)
|
||||
public void process(final NodeRef nodeRef) throws Throwable
|
||||
{
|
||||
RunAsWork<Void> findAndUpdateAclRunAsWork = new RunAsWork<Void>()
|
||||
{
|
||||
@@ -275,44 +265,34 @@ public class FixedAclUpdater extends TransactionListenerAdapter implements Appli
|
||||
log.debug(String.format("Processing node %s", nodeRef));
|
||||
}
|
||||
|
||||
try
|
||||
final Long nodeId = nodeDAO.getNodePair(nodeRef).getFirst();
|
||||
|
||||
// MNT-22009 - If node was deleted and in archive store, remove the aspect and properties and do not
|
||||
// process
|
||||
if (nodeRef.getStoreRef().equals(StoreRef.STORE_REF_ARCHIVE_SPACESSTORE))
|
||||
{
|
||||
final Long nodeId = nodeDAO.getNodePair(nodeRef).getFirst();
|
||||
|
||||
// MNT-22009 - If node was deleted and in archive store, remove the aspect and properties and do
|
||||
// not
|
||||
// process
|
||||
if (nodeRef.getStoreRef().equals(StoreRef.STORE_REF_ARCHIVE_SPACESSTORE))
|
||||
{
|
||||
accessControlListDAO.removePendingAclAspect(nodeId);
|
||||
return null;
|
||||
}
|
||||
|
||||
// retrieve acl properties from node
|
||||
Long inheritFrom = (Long) nodeDAO.getNodeProperty(nodeId, ContentModel.PROP_INHERIT_FROM_ACL);
|
||||
Long sharedAclToReplace = (Long) nodeDAO.getNodeProperty(nodeId, ContentModel.PROP_SHARED_ACL_TO_REPLACE);
|
||||
|
||||
// set inheritance using retrieved prop
|
||||
accessControlListDAO.setInheritanceForChildren(nodeRef, inheritFrom, sharedAclToReplace, true,
|
||||
forceSharedACL);
|
||||
|
||||
// Remove aspect
|
||||
accessControlListDAO.removePendingAclAspect(nodeId);
|
||||
|
||||
if (!policyIgnoreUtil.ignorePolicy(nodeRef))
|
||||
{
|
||||
boolean transformedToAsyncOperation = toBoolean((Boolean) AlfrescoTransactionSupport
|
||||
.getResource(FixedAclUpdater.FIXED_ACL_ASYNC_REQUIRED_KEY));
|
||||
|
||||
OnInheritPermissionsDisabled onInheritPermissionsDisabledPolicy = onInheritPermissionsDisabledDelegate
|
||||
.get(ContentModel.TYPE_BASE);
|
||||
onInheritPermissionsDisabledPolicy.onInheritPermissionsDisabled(nodeRef, transformedToAsyncOperation);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
catch (Exception e)
|
||||
|
||||
// retrieve acl properties from node
|
||||
Long inheritFrom = (Long) nodeDAO.getNodeProperty(nodeId, ContentModel.PROP_INHERIT_FROM_ACL);
|
||||
Long sharedAclToReplace = (Long) nodeDAO.getNodeProperty(nodeId, ContentModel.PROP_SHARED_ACL_TO_REPLACE);
|
||||
|
||||
// set inheritance using retrieved prop
|
||||
accessControlListDAO.setInheritanceForChildren(nodeRef, inheritFrom, sharedAclToReplace, true);
|
||||
|
||||
// Remove aspect
|
||||
accessControlListDAO.removePendingAclAspect(nodeId);
|
||||
|
||||
if (!policyIgnoreUtil.ignorePolicy(nodeRef))
|
||||
{
|
||||
log.error("Job could not process pending ACL node " + nodeRef + ": " + e);
|
||||
e.printStackTrace();
|
||||
boolean transformedToAsyncOperation = toBoolean(
|
||||
(Boolean) AlfrescoTransactionSupport.getResource(FixedAclUpdater.FIXED_ACL_ASYNC_REQUIRED_KEY));
|
||||
|
||||
OnInheritPermissionsDisabled onInheritPermissionsDisabledPolicy = onInheritPermissionsDisabledDelegate
|
||||
.get(ContentModel.TYPE_BASE);
|
||||
onInheritPermissionsDisabledPolicy.onInheritPermissionsDisabled(nodeRef, transformedToAsyncOperation);
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled())
|
||||
@@ -328,7 +308,6 @@ public class FixedAclUpdater extends TransactionListenerAdapter implements Appli
|
||||
AuthenticationUtil.runAs(findAndUpdateAclRunAsWork, AuthenticationUtil.getSystemUserName());
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
private class GetNodesWithAspectCallback implements NodeRefQueryCallback
|
||||
{
|
||||
|
@@ -69,19 +69,19 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
public static final String PROPERTY_READ_ONLY = "system.delete_not_exists.read_only";
|
||||
public static final String PROPERTY_TIMEOUT_SECONDS = "system.delete_not_exists.timeout_seconds";
|
||||
|
||||
protected Connection connection;
|
||||
private Connection connection;
|
||||
private String sql;
|
||||
private int line;
|
||||
private File scriptFile;
|
||||
private Properties globalProperties;
|
||||
|
||||
protected boolean readOnly;
|
||||
protected int deleteBatchSize;
|
||||
protected int batchSize;
|
||||
private boolean readOnly;
|
||||
private int deleteBatchSize;
|
||||
private int batchSize;
|
||||
private long timeoutSec;
|
||||
|
||||
protected long deletedCount;
|
||||
protected Date startTime;
|
||||
private long deletedCount;
|
||||
private Date startTime;
|
||||
|
||||
public DeleteNotExistsExecutor(Connection connection, String sql, int line, File scriptFile, Properties globalProperties)
|
||||
{
|
||||
@@ -164,7 +164,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
}
|
||||
}
|
||||
|
||||
protected void process(Pair<String, String>[] tableColumn, Long[] tableUpperLimits, String[] optionalWhereClauses) throws SQLException
|
||||
private void process(Pair<String, String>[] tableColumn, Long[] tableUpperLimits, String[] optionalWhereClauses) throws SQLException
|
||||
{
|
||||
// The approach is to fetch ordered row ids from all referencer/secondary (e.g.
|
||||
// alf_audit_app, alf_audit_entry, alf_prop_unique_ctx) tables and
|
||||
@@ -190,7 +190,6 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
try
|
||||
{
|
||||
connection.setAutoCommit(false);
|
||||
|
||||
primaryPrepStmt = connection.prepareStatement(createPreparedSelectStatement(primaryTableName, primaryColumnName, primaryWhereClause));
|
||||
primaryPrepStmt.setFetchSize(batchSize);
|
||||
primaryPrepStmt.setLong(1, primaryId);
|
||||
@@ -265,7 +264,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean isTimeoutExceeded()
|
||||
private boolean isTimeoutExceeded()
|
||||
{
|
||||
if (timeoutSec <= 0)
|
||||
{
|
||||
@@ -276,7 +275,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
return (now.getTime() > startTime.getTime() + (timeoutSec * 1000));
|
||||
}
|
||||
|
||||
protected Long processPrimaryTableResultSet(PreparedStatement primaryPrepStmt, PreparedStatement[] secondaryPrepStmts, PreparedStatement deletePrepStmt, Set<Long> deleteIds, String primaryTableName,
|
||||
private Long processPrimaryTableResultSet(PreparedStatement primaryPrepStmt, PreparedStatement[] secondaryPrepStmts, PreparedStatement deletePrepStmt, Set<Long> deleteIds, String primaryTableName,
|
||||
String primaryColumnName, Pair<String, String>[] tableColumn) throws SQLException
|
||||
{
|
||||
int rowsProcessed = 0;
|
||||
@@ -337,7 +336,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
return primaryId;
|
||||
}
|
||||
|
||||
protected void deleteFromPrimaryTable(PreparedStatement deletePrepStmt, Set<Long> deleteIds, String primaryTableName) throws SQLException
|
||||
private void deleteFromPrimaryTable(PreparedStatement deletePrepStmt, Set<Long> deleteIds, String primaryTableName) throws SQLException
|
||||
{
|
||||
int deletedBatchCount = deleteIds.size();
|
||||
if (!readOnly && !deleteIds.isEmpty())
|
||||
@@ -426,7 +425,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
return batchUpperLimit;
|
||||
}
|
||||
|
||||
protected boolean isLess(Long primaryId, Long[] secondaryIds)
|
||||
private boolean isLess(Long primaryId, Long[] secondaryIds)
|
||||
{
|
||||
for (Long secondaryId : secondaryIds)
|
||||
{
|
||||
@@ -448,8 +447,8 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
|
||||
return quotedString.replace("\"", "");
|
||||
}
|
||||
|
||||
protected String createPreparedSelectStatement(String tableName, String columnName, String whereClause)
|
||||
|
||||
private String createPreparedSelectStatement(String tableName, String columnName, String whereClause)
|
||||
{
|
||||
StringBuilder sqlBuilder = new StringBuilder("SELECT " + columnName + " FROM " + tableName + " WHERE ");
|
||||
|
||||
@@ -462,7 +461,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
return sqlBuilder.toString();
|
||||
}
|
||||
|
||||
protected String createPreparedDeleteStatement(String tableName, String idColumnName, int deleteBatchSize, String whereClause)
|
||||
private String createPreparedDeleteStatement(String tableName, String idColumnName, int deleteBatchSize, String whereClause)
|
||||
{
|
||||
StringBuilder stmtBuilder = new StringBuilder("DELETE FROM " + tableName + " WHERE ");
|
||||
|
||||
@@ -516,7 +515,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
}
|
||||
}
|
||||
|
||||
protected Long getColumnValueById(ResultSet resultSet, String columnId) throws SQLException
|
||||
private Long getColumnValueById(ResultSet resultSet, String columnId) throws SQLException
|
||||
{
|
||||
Long columnValue = null;
|
||||
if (resultSet != null && resultSet.next())
|
||||
@@ -527,7 +526,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
return columnValue;
|
||||
}
|
||||
|
||||
protected ResultSet[] getSecondaryResultSets(PreparedStatement[] preparedStatements) throws SQLException
|
||||
private ResultSet[] getSecondaryResultSets(PreparedStatement[] preparedStatements) throws SQLException
|
||||
{
|
||||
ResultSet[] secondaryResultSets = new ResultSet[preparedStatements.length];
|
||||
for (int i = 1; i < preparedStatements.length; i++)
|
||||
@@ -541,7 +540,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
return secondaryResultSets;
|
||||
}
|
||||
|
||||
protected Long[] getSecondaryIds(ResultSet[] secondaryResultSets, Pair<String, String>[] tableColumn) throws SQLException
|
||||
private Long[] getSecondaryIds(ResultSet[] secondaryResultSets, Pair<String, String>[] tableColumn) throws SQLException
|
||||
{
|
||||
Long[] secondaryIds = new Long[tableColumn.length];
|
||||
|
||||
@@ -572,7 +571,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
}
|
||||
}
|
||||
|
||||
protected void closeQuietly(Statement statement)
|
||||
private void closeQuietly(Statement statement)
|
||||
{
|
||||
if (statement != null)
|
||||
{
|
||||
@@ -587,7 +586,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
}
|
||||
}
|
||||
|
||||
protected void closeQuietly(Statement[] statements)
|
||||
private void closeQuietly(Statement[] statements)
|
||||
{
|
||||
if (statements != null)
|
||||
{
|
||||
@@ -598,7 +597,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
}
|
||||
}
|
||||
|
||||
protected void closeQuietly(ResultSet resultSet)
|
||||
private void closeQuietly(ResultSet resultSet)
|
||||
{
|
||||
if (resultSet != null)
|
||||
{
|
||||
@@ -613,7 +612,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
|
||||
}
|
||||
}
|
||||
|
||||
protected void closeQuietly(ResultSet[] resultSets)
|
||||
private void closeQuietly(ResultSet[] resultSets)
|
||||
{
|
||||
if (resultSets != null)
|
||||
{
|
||||
|
@@ -1,278 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.domain.schema.script;
|
||||
|
||||
import org.alfresco.util.Pair;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.io.File;
|
||||
import java.sql.Connection;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Extends <code>{@link DeleteNotExistsExecutor}</code> to cope with MySQL
|
||||
* specific fetch size limitation and restrictions.
|
||||
*/
|
||||
public class MySQLDeleteNotExistsExecutor extends DeleteNotExistsExecutor
|
||||
{
|
||||
private static final Log logger = LogFactory.getLog(MySQLDeleteNotExistsExecutor.class);
|
||||
|
||||
private final DataSource dataSource;
|
||||
|
||||
public MySQLDeleteNotExistsExecutor(Connection connection, String sql, int line, File scriptFile, Properties globalProperties, DataSource dataSource)
|
||||
{
|
||||
super(connection, sql, line, scriptFile, globalProperties);
|
||||
this.dataSource = dataSource;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process(Pair<String, String>[] tableColumn, Long[] tableUpperLimits, String[] optionalWhereClauses) throws SQLException
|
||||
{
|
||||
// The approach is to fetch ordered row ids from all referencer/secondary (e.g.
|
||||
// alf_audit_app, alf_audit_entry, alf_prop_unique_ctx) tables and
|
||||
// referenced/primary table (e.g. alf_prop_root) concurrently, so that it is
|
||||
// possible skip over id gaps efficiently while at the same time being able to
|
||||
// work out which ids are obsolete and delete them in batches.
|
||||
|
||||
// The algorithm can be further improved by iterating over the rows in descending order.
|
||||
// This is due to the fact that older data should be more stable in time.
|
||||
|
||||
String primaryTableName = tableColumn[0].getFirst();
|
||||
String primaryColumnName = tableColumn[0].getSecond();
|
||||
String primaryWhereClause = optionalWhereClauses[0];
|
||||
|
||||
Long primaryId = 0L;
|
||||
|
||||
PreparedStatement primaryPrepStmt = null;
|
||||
PreparedStatement[] secondaryPrepStmts = null;
|
||||
PreparedStatement deletePrepStmt = null;
|
||||
Set<Long> deleteIds = new HashSet<>();
|
||||
|
||||
deletedCount = 0L;
|
||||
startTime = new Date();
|
||||
|
||||
long defaultOffset = 0L;
|
||||
try
|
||||
{
|
||||
connection.setAutoCommit(false);
|
||||
|
||||
primaryPrepStmt = connection.prepareStatement(createLimitPreparedSelectStatement(primaryTableName, primaryColumnName, primaryWhereClause));
|
||||
primaryPrepStmt.setLong(1, primaryId);
|
||||
primaryPrepStmt.setLong(2, tableUpperLimits[0]);
|
||||
primaryPrepStmt.setInt(3, batchSize);
|
||||
primaryPrepStmt.setLong(4, defaultOffset);
|
||||
|
||||
boolean hasResults = primaryPrepStmt.execute();
|
||||
|
||||
if (hasResults)
|
||||
{
|
||||
secondaryPrepStmts = new PreparedStatement[tableColumn.length];
|
||||
for (int i = 1; i < tableColumn.length; i++)
|
||||
{
|
||||
PreparedStatement secStmt = connection.prepareStatement(createLimitPreparedSelectStatement(tableColumn[i].getFirst(), tableColumn[i].getSecond(), optionalWhereClauses[i]));
|
||||
secStmt.setLong(1, primaryId);
|
||||
secStmt.setLong(2, tableUpperLimits[i]);
|
||||
secStmt.setInt(3, batchSize);
|
||||
secStmt.setLong(4, defaultOffset);
|
||||
|
||||
secondaryPrepStmts[i] = secStmt;
|
||||
}
|
||||
|
||||
deletePrepStmt = connection.prepareStatement(createPreparedDeleteStatement(primaryTableName, primaryColumnName, deleteBatchSize, primaryWhereClause));
|
||||
|
||||
// Timeout is only checked at each bach start.
|
||||
// It can be further refined by being verified at each primary row processing.
|
||||
while (hasResults && !isTimeoutExceeded())
|
||||
{
|
||||
// Process batch
|
||||
primaryId = processPrimaryTableResultSet(primaryPrepStmt, secondaryPrepStmts, deletePrepStmt, deleteIds, primaryTableName, primaryColumnName, tableColumn);
|
||||
connection.commit();
|
||||
|
||||
if (primaryId == null)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
// Prepare for next batch
|
||||
primaryPrepStmt.setLong(1, primaryId);
|
||||
primaryPrepStmt.setLong(2, tableUpperLimits[0]);
|
||||
primaryPrepStmt.setInt(3, batchSize);
|
||||
primaryPrepStmt.setLong(4, defaultOffset);
|
||||
|
||||
for (int i = 1; i < tableColumn.length; i++)
|
||||
{
|
||||
PreparedStatement secStmt = secondaryPrepStmts[i];
|
||||
secStmt.setLong(1, primaryId);
|
||||
secStmt.setLong(2, tableUpperLimits[i]);
|
||||
secStmt.setInt(3, batchSize);
|
||||
secStmt.setLong(4, defaultOffset);
|
||||
}
|
||||
|
||||
hasResults = primaryPrepStmt.execute();
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we have any more ids to delete
|
||||
if (!deleteIds.isEmpty())
|
||||
{
|
||||
deleteFromPrimaryTable(deletePrepStmt, deleteIds, primaryTableName);
|
||||
connection.commit();
|
||||
}
|
||||
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
String msg = ((readOnly) ? "Script would have" : "Script") + " deleted a total of " + deletedCount + " items from table " + primaryTableName + ".";
|
||||
logger.debug(msg);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
closeQuietly(deletePrepStmt);
|
||||
closeQuietly(secondaryPrepStmts);
|
||||
closeQuietly(primaryPrepStmt);
|
||||
|
||||
connection.setAutoCommit(true);
|
||||
}
|
||||
}
|
||||
|
||||
protected Long processPrimaryTableResultSet(PreparedStatement primaryPrepStmt, PreparedStatement[] secondaryPrepStmts, PreparedStatement deletePrepStmt, Set<Long> deleteIds,
|
||||
String primaryTableName, String primaryColumnName, Pair<String, String>[] tableColumn) throws SQLException
|
||||
{
|
||||
int rowsProcessed = 0;
|
||||
Long primaryId = null;
|
||||
ResultSet[] secondaryResultSets = null;
|
||||
try (ResultSet resultSet = primaryPrepStmt.getResultSet())
|
||||
{
|
||||
secondaryResultSets = getSecondaryResultSets(secondaryPrepStmts);
|
||||
Long[] secondaryIds = getSecondaryIds(secondaryResultSets, tableColumn);
|
||||
|
||||
// Create and populate secondary tables offsets
|
||||
Long[] secondaryOffsets = new Long[tableColumn.length];
|
||||
for (int i = 1; i < tableColumn.length; i++)
|
||||
{
|
||||
secondaryOffsets[i] = 0L;
|
||||
}
|
||||
|
||||
while (resultSet.next())
|
||||
{
|
||||
++rowsProcessed;
|
||||
primaryId = resultSet.getLong(primaryColumnName);
|
||||
|
||||
while (isLess(primaryId, secondaryIds))
|
||||
{
|
||||
deleteIds.add(primaryId);
|
||||
|
||||
if (deleteIds.size() == deleteBatchSize)
|
||||
{
|
||||
deleteFromPrimaryTable(deletePrepStmt, deleteIds, primaryTableName);
|
||||
connection.commit();
|
||||
}
|
||||
|
||||
if (!resultSet.next())
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
++rowsProcessed;
|
||||
primaryId = resultSet.getLong(primaryColumnName);
|
||||
}
|
||||
|
||||
if (logger.isTraceEnabled())
|
||||
{
|
||||
logger.trace("RowsProcessed " + rowsProcessed + " from primary table " + primaryTableName);
|
||||
}
|
||||
|
||||
updateSecondaryIds(primaryId, secondaryIds, secondaryPrepStmts, secondaryOffsets, secondaryResultSets, tableColumn);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
closeQuietly(secondaryResultSets);
|
||||
}
|
||||
|
||||
return primaryId;
|
||||
}
|
||||
|
||||
private void updateSecondaryIds(Long primaryId, Long[] secondaryIds, PreparedStatement[] secondaryPrepStmts, Long[] secondaryOffsets, ResultSet[] secondaryResultSets,
|
||||
Pair<String, String>[] tableColumn) throws SQLException
|
||||
{
|
||||
for (int i = 1; i < tableColumn.length; i++)
|
||||
{
|
||||
Long secondaryId = secondaryIds[i];
|
||||
while (secondaryId != null && primaryId >= secondaryId)
|
||||
{
|
||||
ResultSet resultSet = secondaryResultSets[i];
|
||||
String columnId = tableColumn[i].getSecond();
|
||||
|
||||
secondaryId = getColumnValueById(resultSet, columnId);
|
||||
|
||||
// Check if we reach the end of the first page
|
||||
if (secondaryId == null)
|
||||
{
|
||||
// Close the previous result set
|
||||
closeQuietly(resultSet);
|
||||
|
||||
// Set to use the next page
|
||||
long offset = secondaryOffsets[i] + batchSize;
|
||||
secondaryOffsets[i] = offset;
|
||||
|
||||
PreparedStatement secStmt = secondaryPrepStmts[i];
|
||||
secStmt.setLong(4, offset);
|
||||
|
||||
// Check if any results were found
|
||||
boolean secHasResults = secStmt.execute();
|
||||
secondaryResultSets[i] = secHasResults ? secStmt.getResultSet() : null;
|
||||
|
||||
// Try again to get the next secondary id
|
||||
secondaryId = getColumnValueById(secondaryResultSets[i], columnId);
|
||||
}
|
||||
|
||||
secondaryIds[i] = secondaryId;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private String createLimitPreparedSelectStatement(String tableName, String columnName, String whereClause)
|
||||
{
|
||||
StringBuilder sqlBuilder = new StringBuilder("SELECT " + columnName + " FROM " + tableName + " WHERE ");
|
||||
|
||||
if (whereClause != null && !whereClause.isEmpty())
|
||||
{
|
||||
sqlBuilder.append(whereClause + " AND ");
|
||||
}
|
||||
|
||||
sqlBuilder.append(columnName + " > ? AND " + columnName + " <= ? ORDER BY " + columnName + " ASC LIMIT ? OFFSET ?");
|
||||
return sqlBuilder.toString();
|
||||
}
|
||||
}
|
@@ -350,7 +350,7 @@ public class ScriptExecutorImpl implements ScriptExecutor
|
||||
}
|
||||
else if (sql.startsWith("--DELETE_NOT_EXISTS"))
|
||||
{
|
||||
DeleteNotExistsExecutor deleteNotExists = createDeleteNotExistsExecutor(dialect, connection, sql, line, scriptFile);
|
||||
DeleteNotExistsExecutor deleteNotExists = new DeleteNotExistsExecutor(connection, sql, line, scriptFile, globalProperties);
|
||||
deleteNotExists.execute();
|
||||
|
||||
// Reset
|
||||
@@ -537,17 +537,7 @@ public class ScriptExecutorImpl implements ScriptExecutor
|
||||
try { scriptInputStream.close(); } catch (Throwable e) {}
|
||||
}
|
||||
}
|
||||
|
||||
private DeleteNotExistsExecutor createDeleteNotExistsExecutor(Dialect dialect, Connection connection, String sql, int line, File scriptFile)
|
||||
{
|
||||
if (dialect instanceof MySQLInnoDBDialect)
|
||||
{
|
||||
return new MySQLDeleteNotExistsExecutor(connection, sql, line, scriptFile, globalProperties, dataSource);
|
||||
}
|
||||
|
||||
return new DeleteNotExistsExecutor(connection, sql, line, scriptFile, globalProperties);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Execute the given SQL statement, absorbing exceptions that we expect during
|
||||
* schema creation or upgrade.
|
||||
|
@@ -416,13 +416,11 @@ public class EventConsolidator implements EventSupportedPolicies
|
||||
}
|
||||
// Get before values that changed
|
||||
Map<K, V> beforeDelta = new HashMap<>(before);
|
||||
Map<K, V> afterDelta = new HashMap<>(after);
|
||||
|
||||
beforeDelta.entrySet().removeAll(after.entrySet());
|
||||
|
||||
// Add nulls for before properties
|
||||
Set<K> beforeKeys = before.keySet();
|
||||
Set<K> newKeys = afterDelta.keySet();
|
||||
Set<K> newKeys = after.keySet();
|
||||
newKeys.removeAll(beforeKeys);
|
||||
|
||||
for (K key : newKeys)
|
||||
|
@@ -54,6 +54,7 @@ import org.alfresco.repo.policy.JavaBehaviour;
|
||||
import org.alfresco.repo.policy.PolicyComponent;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.repository.AssociationRef;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
@@ -89,11 +90,11 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
protected DictionaryService dictionaryService;
|
||||
private DescriptorService descriptorService;
|
||||
private EventFilterRegistry eventFilterRegistry;
|
||||
private Event2MessageProducer event2MessageProducer;
|
||||
private TransactionService transactionService;
|
||||
private PersonService personService;
|
||||
protected NodeResourceHelper nodeResourceHelper;
|
||||
|
||||
private EventGeneratorQueue eventGeneratorQueue;
|
||||
private NodeTypeFilter nodeTypeFilter;
|
||||
private ChildAssociationTypeFilter childAssociationTypeFilter;
|
||||
private EventUserFilter userFilter;
|
||||
@@ -108,10 +109,10 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
PropertyCheck.mandatory(this, "dictionaryService", dictionaryService);
|
||||
PropertyCheck.mandatory(this, "descriptorService", descriptorService);
|
||||
PropertyCheck.mandatory(this, "eventFilterRegistry", eventFilterRegistry);
|
||||
PropertyCheck.mandatory(this, "event2MessageProducer", event2MessageProducer);
|
||||
PropertyCheck.mandatory(this, "transactionService", transactionService);
|
||||
PropertyCheck.mandatory(this, "personService", personService);
|
||||
PropertyCheck.mandatory(this, "nodeResourceHelper", nodeResourceHelper);
|
||||
PropertyCheck.mandatory(this, "eventGeneratorQueue", eventGeneratorQueue);
|
||||
|
||||
this.nodeTypeFilter = eventFilterRegistry.getNodeTypeFilter();
|
||||
this.childAssociationTypeFilter = eventFilterRegistry.getChildAssociationTypeFilter();
|
||||
@@ -176,6 +177,12 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
this.eventFilterRegistry = eventFilterRegistry;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void setEvent2MessageProducer(Event2MessageProducer event2MessageProducer)
|
||||
{
|
||||
this.event2MessageProducer = event2MessageProducer;
|
||||
}
|
||||
|
||||
public void setTransactionService(TransactionService transactionService)
|
||||
{
|
||||
this.transactionService = transactionService;
|
||||
@@ -191,11 +198,6 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
this.nodeResourceHelper = nodeResourceHelper;
|
||||
}
|
||||
|
||||
public void setEventGeneratorQueue(EventGeneratorQueue eventGeneratorQueue)
|
||||
{
|
||||
this.eventGeneratorQueue = eventGeneratorQueue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCreateNode(ChildAssociationRef childAssocRef)
|
||||
{
|
||||
@@ -426,26 +428,20 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
|
||||
protected void sendEvent(NodeRef nodeRef, EventConsolidator consolidator)
|
||||
{
|
||||
EventInfo eventInfo = getEventInfo(AuthenticationUtil.getFullyAuthenticatedUser());
|
||||
eventGeneratorQueue.accept(()-> createEvent(nodeRef, consolidator, eventInfo));
|
||||
}
|
||||
|
||||
private RepoEvent<?> createEvent(NodeRef nodeRef, EventConsolidator consolidator, EventInfo eventInfo)
|
||||
{
|
||||
String user = eventInfo.getPrincipal();
|
||||
|
||||
if (consolidator.isTemporaryNode())
|
||||
{
|
||||
if (LOGGER.isTraceEnabled())
|
||||
{
|
||||
LOGGER.trace("Ignoring temporary node: " + nodeRef);
|
||||
}
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
|
||||
final String user = AuthenticationUtil.getFullyAuthenticatedUser();
|
||||
// Get the repo event before the filtering,
|
||||
// so we can take the latest node info into account
|
||||
final RepoEvent<?> event = consolidator.getRepoEvent(eventInfo);
|
||||
final RepoEvent<?> event = consolidator.getRepoEvent(getEventInfo(user));
|
||||
|
||||
|
||||
final QName nodeType = consolidator.getNodeType();
|
||||
if (isFiltered(nodeType, user))
|
||||
@@ -456,7 +452,7 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
+ ((nodeType == null) ? "Unknown' " : nodeType.toPrefixString())
|
||||
+ "' created by: " + user);
|
||||
}
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.getType().equals(EventType.NODE_UPDATED.getType()) && consolidator.isResourceBeforeAllFieldsNull())
|
||||
@@ -465,34 +461,27 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
{
|
||||
LOGGER.trace("Ignoring node updated event as no fields have been updated: " + nodeRef);
|
||||
}
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
|
||||
logEvent(event, consolidator.getEventTypes());
|
||||
return event;
|
||||
logAndSendEvent(event, consolidator.getEventTypes());
|
||||
}
|
||||
|
||||
protected void sendEvent(ChildAssociationRef childAssociationRef, ChildAssociationEventConsolidator consolidator)
|
||||
{
|
||||
EventInfo eventInfo = getEventInfo(AuthenticationUtil.getFullyAuthenticatedUser());
|
||||
eventGeneratorQueue.accept(()-> createEvent(eventInfo, childAssociationRef, consolidator));
|
||||
}
|
||||
|
||||
private RepoEvent<?> createEvent(EventInfo eventInfo, ChildAssociationRef childAssociationRef, ChildAssociationEventConsolidator consolidator)
|
||||
{
|
||||
String user = eventInfo.getPrincipal();
|
||||
if (consolidator.isTemporaryChildAssociation())
|
||||
{
|
||||
if (LOGGER.isTraceEnabled())
|
||||
{
|
||||
LOGGER.trace("Ignoring temporary child association: " + childAssociationRef);
|
||||
}
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
|
||||
final String user = AuthenticationUtil.getFullyAuthenticatedUser();
|
||||
// Get the repo event before the filtering,
|
||||
// so we can take the latest association info into account
|
||||
final RepoEvent<?> event = consolidator.getRepoEvent(eventInfo);
|
||||
final RepoEvent<?> event = consolidator.getRepoEvent(getEventInfo(user));
|
||||
|
||||
final QName childAssocType = consolidator.getChildAssocType();
|
||||
if (isFilteredChildAssociation(childAssocType, user))
|
||||
@@ -503,7 +492,7 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
+ ((childAssocType == null) ? "Unknown' " : childAssocType.toPrefixString())
|
||||
+ "' created by: " + user);
|
||||
}
|
||||
return null;
|
||||
return;
|
||||
} else if (childAssociationRef.isPrimary())
|
||||
{
|
||||
if (LOGGER.isTraceEnabled())
|
||||
@@ -512,20 +501,13 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
+ ((childAssocType == null) ? "Unknown' " : childAssocType.toPrefixString())
|
||||
+ "' created by: " + user);
|
||||
}
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
|
||||
logEvent(event, consolidator.getEventTypes());
|
||||
return event;
|
||||
logAndSendEvent(event, consolidator.getEventTypes());
|
||||
}
|
||||
|
||||
protected void sendEvent(AssociationRef peerAssociationRef, PeerAssociationEventConsolidator consolidator)
|
||||
{
|
||||
EventInfo eventInfo = getEventInfo(AuthenticationUtil.getFullyAuthenticatedUser());
|
||||
eventGeneratorQueue.accept(()-> createEvent(eventInfo, peerAssociationRef, consolidator));
|
||||
}
|
||||
|
||||
private RepoEvent<?> createEvent(EventInfo eventInfo, AssociationRef peerAssociationRef, PeerAssociationEventConsolidator consolidator)
|
||||
{
|
||||
if (consolidator.isTemporaryPeerAssociation())
|
||||
{
|
||||
@@ -533,21 +515,30 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
{
|
||||
LOGGER.trace("Ignoring temporary peer association: " + peerAssociationRef);
|
||||
}
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
|
||||
RepoEvent<?> event = consolidator.getRepoEvent(eventInfo);
|
||||
logEvent(event, consolidator.getEventTypes());
|
||||
return event;
|
||||
final String user = AuthenticationUtil.getFullyAuthenticatedUser();
|
||||
// Get the repo event before the filtering,
|
||||
// so we can take the latest association info into account
|
||||
final RepoEvent<?> event = consolidator.getRepoEvent(getEventInfo(user));
|
||||
|
||||
logAndSendEvent(event, consolidator.getEventTypes());
|
||||
}
|
||||
|
||||
private void logEvent(RepoEvent<?> event, Deque<EventType> listOfEvents)
|
||||
protected void logAndSendEvent(RepoEvent<?> event, Deque<EventType> listOfEvents)
|
||||
{
|
||||
if (LOGGER.isTraceEnabled())
|
||||
{
|
||||
LOGGER.trace("List of Events:" + listOfEvents);
|
||||
LOGGER.trace("Sending event:" + event);
|
||||
}
|
||||
// Need to execute this in another read txn because Camel expects it
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction((RetryingTransactionCallback<Void>) () -> {
|
||||
event2MessageProducer.send(event);
|
||||
|
||||
return null;
|
||||
}, true, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1,179 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.event2;
|
||||
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.alfresco.repo.event.v1.model.RepoEvent;
|
||||
import org.alfresco.util.PropertyCheck;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
|
||||
/*
|
||||
* This queue allows to create asynchronously the RepoEvent offloading the work to a ThreadPool but
|
||||
* at the same time it preserves the order of the events
|
||||
*/
|
||||
public class EventGeneratorQueue implements InitializingBean
|
||||
{
|
||||
protected static final Log LOGGER = LogFactory.getLog(EventGeneratorQueue.class);
|
||||
|
||||
protected Executor enqueueThreadPoolExecutor;
|
||||
protected Executor dequeueThreadPoolExecutor;
|
||||
protected Event2MessageProducer event2MessageProducer;
|
||||
protected BlockingQueue<EventInMaking> queue = new LinkedBlockingQueue<>();
|
||||
protected Runnable listener = createListener();
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception
|
||||
{
|
||||
PropertyCheck.mandatory(this, "enqueueThreadPoolExecutor", enqueueThreadPoolExecutor);
|
||||
PropertyCheck.mandatory(this, "dequeueThreadPoolExecutor", dequeueThreadPoolExecutor);
|
||||
PropertyCheck.mandatory(this, "event2MessageProducer", event2MessageProducer);
|
||||
}
|
||||
|
||||
public void setEvent2MessageProducer(Event2MessageProducer event2MessageProducer)
|
||||
{
|
||||
this.event2MessageProducer = event2MessageProducer;
|
||||
}
|
||||
|
||||
public void setEnqueueThreadPoolExecutor(Executor enqueueThreadPoolExecutor)
|
||||
{
|
||||
this.enqueueThreadPoolExecutor = enqueueThreadPoolExecutor;
|
||||
}
|
||||
|
||||
public void setDequeueThreadPoolExecutor(Executor dequeueThreadPoolExecutor)
|
||||
{
|
||||
this.dequeueThreadPoolExecutor = dequeueThreadPoolExecutor;
|
||||
dequeueThreadPoolExecutor.execute(listener);
|
||||
}
|
||||
|
||||
/**
|
||||
* Procedure to enqueue the callback functions that creates an event.
|
||||
* @param maker Callback function that creates an event.
|
||||
*/
|
||||
public void accept(Callable<RepoEvent<?>> maker)
|
||||
{
|
||||
EventInMaking eventInMaking = new EventInMaking(maker);
|
||||
queue.offer(eventInMaking);
|
||||
enqueueThreadPoolExecutor.execute(() -> {
|
||||
try
|
||||
{
|
||||
eventInMaking.make();
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
LOGGER.error("Unexpected error while enqueuing maker function for repository event" + e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create listener task in charge of dequeuing and sending events ready to be sent.
|
||||
* @return The task in charge of dequeuing and sending events ready to be sent.
|
||||
*/
|
||||
private Runnable createListener()
|
||||
{
|
||||
return new Runnable()
|
||||
{
|
||||
@Override
|
||||
public void run()
|
||||
{
|
||||
try
|
||||
{
|
||||
while (!Thread.interrupted())
|
||||
{
|
||||
try
|
||||
{
|
||||
EventInMaking eventInMaking = queue.take();
|
||||
RepoEvent<?> event = eventInMaking.getEventWhenReady();
|
||||
if (event != null)
|
||||
{
|
||||
event2MessageProducer.send(event);
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
LOGGER.error("Unexpected error while dequeuing and sending repository event" + e);
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
LOGGER.warn("Unexpected: rescheduling the listener thread.");
|
||||
dequeueThreadPoolExecutor.execute(listener);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* Simple class that makes events and allows to retrieve them when ready
|
||||
*/
|
||||
private static class EventInMaking
|
||||
{
|
||||
private Callable<RepoEvent<?>> maker;
|
||||
private volatile RepoEvent<?> event;
|
||||
private CountDownLatch latch;
|
||||
|
||||
public EventInMaking(Callable<RepoEvent<?>> maker)
|
||||
{
|
||||
this.maker = maker;
|
||||
this.latch = new CountDownLatch(1);
|
||||
}
|
||||
|
||||
public void make() throws Exception
|
||||
{
|
||||
try
|
||||
{
|
||||
event = maker.call();
|
||||
}
|
||||
finally
|
||||
{
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
|
||||
public RepoEvent<?> getEventWhenReady() throws InterruptedException
|
||||
{
|
||||
latch.await(30, TimeUnit.SECONDS);
|
||||
return event;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString()
|
||||
{
|
||||
return maker.toString();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -1,28 +1,28 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.forms.processor.node;
|
||||
|
||||
import static org.alfresco.repo.forms.processor.node.FormFieldConstants.ASSOC_DATA_ADDED_SUFFIX;
|
||||
@@ -634,12 +634,9 @@ public abstract class ContentModelFormProcessor<ItemType, PersistType> extends
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!fileInfo.getName().equals(fieldData.getValue()))
|
||||
{
|
||||
// if the name property changes, the rename method of the file folder
|
||||
// service should be called rather than updating the property directly
|
||||
this.fileFolderService.rename(nodeRef, (String) fieldData.getValue());
|
||||
}
|
||||
// if the name property changes the rename method of the file folder
|
||||
// service should be called rather than updating the property directly
|
||||
this.fileFolderService.rename(nodeRef, (String) fieldData.getValue());
|
||||
}
|
||||
catch (FileExistsException fee)
|
||||
{
|
||||
|
@@ -33,7 +33,6 @@ import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
@@ -153,16 +152,6 @@ public class SafeApplicationEventMulticaster implements ApplicationEventMulticas
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeApplicationListeners(Predicate<ApplicationListener<?>> predicate)
|
||||
{
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeApplicationListenerBeans(Predicate<String> predicate)
|
||||
{
|
||||
}
|
||||
|
||||
public void removeAllListeners()
|
||||
{
|
||||
synchronized (this.defaultRetriever)
|
||||
|
@@ -0,0 +1,538 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
package org.alfresco.repo.rendition.executer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Serializable;
|
||||
import java.io.StringWriter;
|
||||
import java.io.Writer;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.xml.transform.OutputKeys;
|
||||
import javax.xml.transform.TransformerConfigurationException;
|
||||
import javax.xml.transform.sax.SAXTransformerFactory;
|
||||
import javax.xml.transform.sax.TransformerHandler;
|
||||
import javax.xml.transform.stream.StreamResult;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.action.ParameterDefinitionImpl;
|
||||
import org.alfresco.repo.rendition.RenditionLocation;
|
||||
import org.alfresco.service.cmr.action.ParameterDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.rendition.RenditionServiceException;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.cmr.repository.ContentService;
|
||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.tika.config.TikaConfig;
|
||||
import org.apache.tika.exception.TikaException;
|
||||
import org.apache.tika.metadata.Metadata;
|
||||
import org.apache.tika.mime.MediaType;
|
||||
import org.apache.tika.parser.AutoDetectParser;
|
||||
import org.apache.tika.parser.ParseContext;
|
||||
import org.apache.tika.parser.Parser;
|
||||
import org.apache.tika.sax.BodyContentHandler;
|
||||
import org.apache.tika.sax.ContentHandlerDecorator;
|
||||
import org.xml.sax.Attributes;
|
||||
import org.xml.sax.ContentHandler;
|
||||
import org.xml.sax.SAXException;
|
||||
import org.xml.sax.helpers.AttributesImpl;
|
||||
|
||||
/**
|
||||
* This class provides a way to turn documents supported by the
|
||||
* {@link ContentService} standard transformers into basic, clean
|
||||
* HTML.
|
||||
* <P/>
|
||||
* The HTML that is produced probably isn't going to be suitable
|
||||
* for direct web publishing, as it's likely going to be too
|
||||
* basic. Instead, it should be simple and clean HTML, suitable
|
||||
* for being the basis of some web-friendly HTML once edited
|
||||
* / further transformed.
|
||||
*
|
||||
* @author Nick Burch
|
||||
* @since 3.4
|
||||
*
|
||||
* @deprecated The RenditionService is being replace by the simpler async RenditionService2.
|
||||
*/
|
||||
@Deprecated
|
||||
public class HTMLRenderingEngine extends AbstractRenderingEngine
|
||||
{
|
||||
private static Log logger = LogFactory.getLog(HTMLRenderingEngine.class);
|
||||
private TikaConfig tikaConfig;
|
||||
|
||||
/**
|
||||
* This optional parameter, when set to true, causes only the
|
||||
* contents of the HTML body to be written out as the rendition.
|
||||
* By default, the whole of the HTML document is used.
|
||||
*/
|
||||
public static final String PARAM_BODY_CONTENTS_ONLY = "bodyContentsOnly";
|
||||
/**
|
||||
* This optional parameter, when set to true, causes any embedded
|
||||
* images to be written into the same folder as the html, with
|
||||
* a name prefix.
|
||||
* By default, images are placed into a sub-folder.
|
||||
*/
|
||||
public static final String PARAM_IMAGES_SAME_FOLDER = "imagesSameFolder";
|
||||
|
||||
/*
|
||||
* Action constants
|
||||
*/
|
||||
public static final String NAME = "htmlRenderingEngine";
|
||||
|
||||
|
||||
@Override
|
||||
protected Collection<ParameterDefinition> getParameterDefinitions() {
|
||||
Collection<ParameterDefinition> paramList = super.getParameterDefinitions();
|
||||
paramList.add(new ParameterDefinitionImpl(PARAM_BODY_CONTENTS_ONLY, DataTypeDefinition.BOOLEAN, false,
|
||||
getParamDisplayLabel(PARAM_BODY_CONTENTS_ONLY)));
|
||||
paramList.add(new ParameterDefinitionImpl(PARAM_IMAGES_SAME_FOLDER, DataTypeDefinition.BOOLEAN, false,
|
||||
getParamDisplayLabel(PARAM_IMAGES_SAME_FOLDER)));
|
||||
return paramList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Injects the TikaConfig to use
|
||||
*
|
||||
* @param tikaConfig The Tika Config to use
|
||||
*/
|
||||
public void setTikaConfig(TikaConfig tikaConfig)
|
||||
{
|
||||
this.tikaConfig = tikaConfig;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.alfresco.repo.rendition.executer.AbstractRenderingEngine#render(org.alfresco.repo.rendition.executer.AbstractRenderingEngine.RenderingContext)
|
||||
*/
|
||||
@Override
|
||||
protected void render(RenderingContext context)
|
||||
{
|
||||
ContentReader contentReader = context.makeContentReader();
|
||||
String sourceMimeType = contentReader.getMimetype();
|
||||
|
||||
// Check that Tika supports the supplied file
|
||||
AutoDetectParser p = new AutoDetectParser(tikaConfig);
|
||||
MediaType sourceMediaType = MediaType.parse(sourceMimeType);
|
||||
if(! p.getParsers().containsKey(sourceMediaType))
|
||||
{
|
||||
throw new RenditionServiceException(
|
||||
"Source mime type of " + sourceMimeType +
|
||||
" is not supported by Tika for HTML conversions"
|
||||
);
|
||||
}
|
||||
|
||||
// Make the HTML Version using Tika
|
||||
// This will also extract out any images as found
|
||||
generateHTML(p, context);
|
||||
}
|
||||
|
||||
private String getHtmlBaseName(RenderingContext context)
|
||||
{
|
||||
// Based on the name of the source node, which will
|
||||
// also largely be the name of the html node
|
||||
String baseName = nodeService.getProperty(
|
||||
context.getSourceNode(),
|
||||
ContentModel.PROP_NAME
|
||||
).toString();
|
||||
if(baseName.lastIndexOf('.') > -1)
|
||||
{
|
||||
baseName = baseName.substring(0, baseName.lastIndexOf('.'));
|
||||
}
|
||||
return baseName;
|
||||
}
|
||||
/**
|
||||
* What name should be used for the images directory?
|
||||
* Note this is only required if {@link #PARAM_IMAGES_SAME_FOLDER} is false (the default).
|
||||
*/
|
||||
private String getImagesDirectoryName(RenderingContext context)
|
||||
{
|
||||
// Based on the name of the source node, which will
|
||||
// also largely be the name of the html node
|
||||
String folderName = getHtmlBaseName(context);
|
||||
folderName = folderName + "_files";
|
||||
return folderName;
|
||||
}
|
||||
/**
|
||||
* What prefix should be applied to the name of images?
|
||||
*/
|
||||
private String getImagesPrefixName(RenderingContext context)
|
||||
{
|
||||
if( context.getParamWithDefault(PARAM_IMAGES_SAME_FOLDER, false) )
|
||||
{
|
||||
// Prefix with the name of the source node
|
||||
return getHtmlBaseName(context) + "_";
|
||||
}
|
||||
else {
|
||||
// They have their own folder, so no prefix is needed
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a directory to store the images in.
|
||||
* The directory will be a sibling of the rendered
|
||||
* HTML, and named similar to it.
|
||||
* Note this is only required if {@link #PARAM_IMAGES_SAME_FOLDER} is false (the default).
|
||||
*/
|
||||
private NodeRef createImagesDirectory(RenderingContext context)
|
||||
{
|
||||
// It should be a sibling of the HTML in it's eventual location
|
||||
// (not it's current temporary one!)
|
||||
RenditionLocation location = resolveRenditionLocation(
|
||||
context.getSourceNode(), context.getDefinition(), context.getDestinationNode()
|
||||
);
|
||||
NodeRef parent = location.getParentRef();
|
||||
|
||||
// Figure out what to call it, based on the HTML node
|
||||
String folderName = getImagesDirectoryName(context);
|
||||
|
||||
// It is already there?
|
||||
// (eg from when the rendition is being re-run)
|
||||
NodeRef imgFolder = nodeService.getChildByName(
|
||||
parent, ContentModel.ASSOC_CONTAINS, folderName
|
||||
);
|
||||
if(imgFolder != null)
|
||||
return imgFolder;
|
||||
|
||||
// Create the directory
|
||||
Map<QName,Serializable> properties = new HashMap<QName,Serializable>();
|
||||
properties.put(ContentModel.PROP_NAME, folderName);
|
||||
imgFolder = nodeService.createNode(
|
||||
parent,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(folderName),
|
||||
ContentModel.TYPE_FOLDER,
|
||||
properties
|
||||
).getChildRef();
|
||||
|
||||
return imgFolder;
|
||||
}
|
||||
|
||||
private NodeRef createEmbeddedImage(NodeRef imgFolder, boolean primary,
|
||||
String filename, String contentType, InputStream imageSource,
|
||||
RenderingContext context)
|
||||
{
|
||||
// Create the node if needed
|
||||
NodeRef img = nodeService.getChildByName(
|
||||
imgFolder, ContentModel.ASSOC_CONTAINS, filename
|
||||
);
|
||||
if(img == null)
|
||||
{
|
||||
Map<QName,Serializable> properties = new HashMap<QName,Serializable>();
|
||||
properties.put(ContentModel.PROP_NAME, filename);
|
||||
img = nodeService.createNode(
|
||||
imgFolder,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(filename),
|
||||
ContentModel.TYPE_CONTENT,
|
||||
properties
|
||||
).getChildRef();
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Image node created: " + img);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO Once composite content is properly supported,
|
||||
// at this point we'll associate the new image with
|
||||
// the rendered HTML node so the dependency is tracked.
|
||||
|
||||
// Put the image into the node
|
||||
ContentWriter writer = contentService.getWriter(
|
||||
img, ContentModel.PROP_CONTENT, true
|
||||
);
|
||||
writer.setMimetype(contentType);
|
||||
writer.putContent(imageSource);
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Image content written into " + img);
|
||||
}
|
||||
|
||||
// All done
|
||||
return img;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a Tika-compatible SAX content handler, which will
|
||||
* be used to generate+capture the XHTML
|
||||
*/
|
||||
private ContentHandler buildContentHandler(Writer output, RenderingContext context)
|
||||
{
|
||||
// Create the main transformer
|
||||
SAXTransformerFactory factory = (SAXTransformerFactory)
|
||||
SAXTransformerFactory.newInstance();
|
||||
TransformerHandler handler;
|
||||
|
||||
try {
|
||||
handler = factory.newTransformerHandler();
|
||||
} catch (TransformerConfigurationException e) {
|
||||
throw new RenditionServiceException("SAX Processing isn't available - " + e);
|
||||
}
|
||||
|
||||
handler.getTransformer().setOutputProperty(OutputKeys.INDENT, "yes");
|
||||
handler.setResult(new StreamResult(output));
|
||||
handler.getTransformer().setOutputProperty(OutputKeys.METHOD, "xml");
|
||||
|
||||
// Change the image links as they go past
|
||||
String dirName = null, imgPrefix = null;
|
||||
if(context.getParamWithDefault(PARAM_IMAGES_SAME_FOLDER, false))
|
||||
{
|
||||
imgPrefix = getImagesPrefixName(context);
|
||||
}
|
||||
else
|
||||
{
|
||||
dirName = getImagesDirectoryName(context);
|
||||
}
|
||||
ContentHandler contentHandler = new TikaImageRewritingContentHandler(
|
||||
handler, dirName, imgPrefix
|
||||
);
|
||||
|
||||
// If required, wrap it to only return the body
|
||||
boolean bodyOnly = context.getParamWithDefault(PARAM_BODY_CONTENTS_ONLY, false);
|
||||
if(bodyOnly) {
|
||||
contentHandler = new BodyContentHandler(contentHandler);
|
||||
}
|
||||
|
||||
// All done
|
||||
return contentHandler;
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks Tika to translate the contents into HTML
|
||||
*/
|
||||
private void generateHTML(Parser p, RenderingContext context)
|
||||
{
|
||||
ContentReader contentReader = context.makeContentReader();
|
||||
|
||||
// Setup things to parse with
|
||||
StringWriter sw = new StringWriter();
|
||||
ContentHandler handler = buildContentHandler(sw, context);
|
||||
|
||||
// Tell Tika what we're dealing with
|
||||
Metadata metadata = new Metadata();
|
||||
metadata.set(
|
||||
Metadata.CONTENT_TYPE,
|
||||
contentReader.getMimetype()
|
||||
);
|
||||
metadata.set(
|
||||
Metadata.RESOURCE_NAME_KEY,
|
||||
nodeService.getProperty(
|
||||
context.getSourceNode(),
|
||||
ContentModel.PROP_NAME
|
||||
).toString()
|
||||
);
|
||||
|
||||
// Our parse context needs to extract images
|
||||
ParseContext parseContext = new ParseContext();
|
||||
parseContext.set(Parser.class, new TikaImageExtractingParser(context));
|
||||
|
||||
// Parse
|
||||
try {
|
||||
p.parse(
|
||||
contentReader.getContentInputStream(),
|
||||
handler, metadata, parseContext
|
||||
);
|
||||
} catch(Exception e) {
|
||||
throw new RenditionServiceException("Tika HTML Conversion Failed", e);
|
||||
}
|
||||
|
||||
// As a string
|
||||
String html = sw.toString();
|
||||
|
||||
// If we're doing body-only, remove all the html namespaces
|
||||
// that will otherwise clutter up the document
|
||||
boolean bodyOnly = context.getParamWithDefault(PARAM_BODY_CONTENTS_ONLY, false);
|
||||
if(bodyOnly) {
|
||||
html = html.replaceAll("<\\?xml.*?\\?>", "");
|
||||
html = html.replaceAll("<p xmlns=\"http://www.w3.org/1999/xhtml\"","<p");
|
||||
html = html.replaceAll("<h(\\d) xmlns=\"http://www.w3.org/1999/xhtml\"","<h\\1");
|
||||
html = html.replaceAll("<div xmlns=\"http://www.w3.org/1999/xhtml\"","<div");
|
||||
html = html.replaceAll("<table xmlns=\"http://www.w3.org/1999/xhtml\"","<table");
|
||||
html = html.replaceAll(" ","");
|
||||
}
|
||||
|
||||
// Save it
|
||||
ContentWriter contentWriter = context.makeContentWriter();
|
||||
contentWriter.setMimetype("text/html");
|
||||
contentWriter.putContent( html );
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A nested Tika parser which extracts out any
|
||||
* images as they come past.
|
||||
*/
|
||||
@SuppressWarnings("serial")
|
||||
private class TikaImageExtractingParser implements Parser {
|
||||
private Set<MediaType> types;
|
||||
|
||||
private RenderingContext renderingContext;
|
||||
private NodeRef imgFolder = null;
|
||||
private int count = 0;
|
||||
|
||||
private TikaImageExtractingParser(RenderingContext renderingContext) {
|
||||
this.renderingContext = renderingContext;
|
||||
|
||||
// Our expected types
|
||||
types = new HashSet<MediaType>();
|
||||
types.add(MediaType.image("bmp"));
|
||||
types.add(MediaType.image("gif"));
|
||||
types.add(MediaType.image("jpg"));
|
||||
types.add(MediaType.image("jpeg"));
|
||||
types.add(MediaType.image("png"));
|
||||
types.add(MediaType.image("tiff"));
|
||||
|
||||
// Are images going in the same place as the HTML?
|
||||
if( renderingContext.getParamWithDefault(PARAM_IMAGES_SAME_FOLDER, false) )
|
||||
{
|
||||
RenditionLocation location = resolveRenditionLocation(
|
||||
renderingContext.getSourceNode(), renderingContext.getDefinition(),
|
||||
renderingContext.getDestinationNode()
|
||||
);
|
||||
imgFolder = location.getParentRef();
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Using imgFolder: " + imgFolder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<MediaType> getSupportedTypes(ParseContext context) {
|
||||
return types;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void parse(InputStream stream, ContentHandler handler,
|
||||
Metadata metadata, ParseContext context) throws IOException,
|
||||
SAXException, TikaException {
|
||||
// Is it a supported image?
|
||||
String filename = metadata.get(Metadata.RESOURCE_NAME_KEY);
|
||||
String type = metadata.get(Metadata.CONTENT_TYPE);
|
||||
boolean accept = false;
|
||||
|
||||
if(type != null) {
|
||||
for(MediaType mt : types) {
|
||||
if(mt.toString().equals(type)) {
|
||||
accept = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if(filename != null) {
|
||||
for(MediaType mt : types) {
|
||||
String ext = "." + mt.getSubtype();
|
||||
if(filename.endsWith(ext)) {
|
||||
accept = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(!accept)
|
||||
return;
|
||||
|
||||
handleImage(stream, filename, type);
|
||||
}
|
||||
|
||||
private void handleImage(InputStream stream, String filename, String type) {
|
||||
count++;
|
||||
|
||||
// Do we already have the folder? If not, create it
|
||||
if(imgFolder == null) {
|
||||
imgFolder = createImagesDirectory(renderingContext);
|
||||
}
|
||||
|
||||
// Give it a sensible name if needed
|
||||
if(filename == null) {
|
||||
filename = "image-" + count + ".";
|
||||
filename += type.substring(type.indexOf('/')+1);
|
||||
}
|
||||
|
||||
// Prefix the filename if needed
|
||||
filename = getImagesPrefixName(renderingContext) + filename;
|
||||
|
||||
// Save the image
|
||||
createEmbeddedImage(imgFolder, (count==1), filename, type, stream, renderingContext);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A content handler that re-writes image src attributes,
|
||||
* and passes everything else on to the real one.
|
||||
*/
|
||||
private class TikaImageRewritingContentHandler extends ContentHandlerDecorator {
|
||||
private String imageFolder;
|
||||
private String imagePrefix;
|
||||
|
||||
private TikaImageRewritingContentHandler(ContentHandler handler, String imageFolder, String imagePrefix) {
|
||||
super(handler);
|
||||
this.imageFolder = imageFolder;
|
||||
this.imagePrefix = imagePrefix;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void startElement(String uri, String localName, String qName,
|
||||
Attributes origAttrs) throws SAXException {
|
||||
// If we have an image tag, re-write the src attribute
|
||||
// if required
|
||||
if("img".equals(localName)) {
|
||||
AttributesImpl attrs;
|
||||
if(origAttrs instanceof AttributesImpl) {
|
||||
attrs = (AttributesImpl)origAttrs;
|
||||
} else {
|
||||
attrs = new AttributesImpl(origAttrs);
|
||||
}
|
||||
|
||||
for(int i=0; i<attrs.getLength(); i++) {
|
||||
if("src".equals(attrs.getLocalName(i))) {
|
||||
String src = attrs.getValue(i);
|
||||
if(src.startsWith("embedded:")) {
|
||||
String newSrc = "";
|
||||
if(imageFolder != null)
|
||||
newSrc += imageFolder + "/";
|
||||
if(imagePrefix != null)
|
||||
newSrc += imagePrefix;
|
||||
newSrc += src.substring(src.indexOf(':')+1);
|
||||
attrs.setValue(i, newSrc);
|
||||
}
|
||||
}
|
||||
}
|
||||
super.startElement(uri, localName, qName, attrs);
|
||||
} else {
|
||||
// For any other tag, pass through as-is
|
||||
super.startElement(uri, localName, qName, origAttrs);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -52,7 +52,6 @@ import java.util.Set;
|
||||
import java.util.StringJoiner;
|
||||
|
||||
import static org.alfresco.repo.content.MimetypeMap.MIMETYPE_PDF;
|
||||
import static org.alfresco.repo.content.transform.magick.ImageTransformationOptions.OPT_COMMAND_OPTIONS;
|
||||
import static org.alfresco.repo.rendition2.RenditionDefinition2.ALLOW_ENLARGEMENT;
|
||||
import static org.alfresco.repo.rendition2.RenditionDefinition2.ALLOW_PDF_ENLARGEMENT;
|
||||
import static org.alfresco.repo.rendition2.RenditionDefinition2.ALPHA_REMOVE;
|
||||
@@ -123,7 +122,6 @@ public class TransformationOptionsConverter implements InitializingBean
|
||||
IMAGE_OPTIONS.addAll(RESIZE_OPTIONS);
|
||||
IMAGE_OPTIONS.add(AUTO_ORIENT);
|
||||
IMAGE_OPTIONS.add(ALPHA_REMOVE);
|
||||
IMAGE_OPTIONS.add(OPT_COMMAND_OPTIONS);
|
||||
}
|
||||
|
||||
private static Set<String> PDF_OPTIONS = new HashSet<>(Arrays.asList(new String[]
|
||||
@@ -286,8 +284,6 @@ public class TransformationOptionsConverter implements InitializingBean
|
||||
}
|
||||
opts.setSourceOptionsList(sourceOptionsList);
|
||||
}
|
||||
|
||||
ifSet(options, OPT_COMMAND_OPTIONS, (v) -> opts.setCommandOptions(v));
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -365,11 +361,13 @@ public class TransformationOptionsConverter implements InitializingBean
|
||||
{
|
||||
ImageTransformationOptions opts = (ImageTransformationOptions) options;
|
||||
|
||||
// From a security viewpoint it would be better not to support the option of passing anything to
|
||||
// ImageMagick. It might be possible to extract some of the well know values and add them to the
|
||||
// T-Engine engine_config.
|
||||
// TODO We don't support this any more for security reasons, however it might be possible to
|
||||
// extract some of the well know values and add them to the newer ImageMagick transform options.
|
||||
String commandOptions = opts.getCommandOptions();
|
||||
ifSet(commandOptions != null && !commandOptions.isBlank(), map, OPT_COMMAND_OPTIONS, commandOptions);
|
||||
if (commandOptions != null && !commandOptions.isBlank())
|
||||
{
|
||||
logger.error("ImageMagick commandOptions are no longer supported for security reasons: " + commandOptions);
|
||||
}
|
||||
|
||||
ImageResizeOptions imageResizeOptions = opts.getResizeOptions();
|
||||
if (imageResizeOptions != null)
|
||||
|
@@ -26,9 +26,6 @@
|
||||
package org.alfresco.repo.search;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.apache.http.HttpStatus;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author Andy
|
||||
@@ -36,10 +33,11 @@ import java.util.List;
|
||||
*/
|
||||
public class QueryParserException extends AlfrescoRuntimeException
|
||||
{
|
||||
/** Serial version UUID. */
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 4886993838297301968L;
|
||||
/** Http Status Code that should be returned by Remote API. */
|
||||
private int httpStatusCode;
|
||||
|
||||
/**
|
||||
* @param msgId
|
||||
@@ -47,6 +45,7 @@ public class QueryParserException extends AlfrescoRuntimeException
|
||||
public QueryParserException(String msgId)
|
||||
{
|
||||
super(msgId);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -56,6 +55,7 @@ public class QueryParserException extends AlfrescoRuntimeException
|
||||
public QueryParserException(String msgId, Object[] msgParams)
|
||||
{
|
||||
super(msgId, msgParams);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -65,6 +65,7 @@ public class QueryParserException extends AlfrescoRuntimeException
|
||||
public QueryParserException(String msgId, Throwable cause)
|
||||
{
|
||||
super(msgId, cause);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -75,22 +76,7 @@ public class QueryParserException extends AlfrescoRuntimeException
|
||||
public QueryParserException(String msgId, Object[] msgParams, Throwable cause)
|
||||
{
|
||||
super(msgId, msgParams, cause);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for exception that allows setting an HTTP status code.
|
||||
*
|
||||
* @param msgId Message for the exception
|
||||
* @param httpStatusCode Status code to return for exception
|
||||
*/
|
||||
public QueryParserException(String msgId, int httpStatusCode)
|
||||
{
|
||||
super(msgId);
|
||||
this.httpStatusCode = httpStatusCode;
|
||||
}
|
||||
|
||||
public int getHttpStatusCode()
|
||||
{
|
||||
return httpStatusCode;
|
||||
}
|
||||
}
|
@@ -1,57 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Data model classes
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.search;
|
||||
|
||||
import org.alfresco.repo.search.impl.solr.facet.facetsresponse.GenericFacetResponse;
|
||||
import org.alfresco.repo.search.impl.solr.facet.facetsresponse.Metric;
|
||||
import org.alfresco.service.cmr.search.ResultSet;
|
||||
import org.alfresco.util.Pair;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Supertype layer interface for all resultset coming from a search engine (e.g. Elasticsearch, Solr)
|
||||
* This interface has been originally extracted from the Apache Solr ResultSet implementation,
|
||||
* that's the reason why the naming used for denoting some things (e.g. facets) is tied to the Solr world.
|
||||
*/
|
||||
public interface SearchEngineResultSet extends ResultSet, SearchEngineResultMetadata
|
||||
{
|
||||
Map<String, List<Pair<String, Integer>>> getFieldFacets();
|
||||
|
||||
Map<String, List<Pair<String, Integer>>> getFacetIntervals();
|
||||
|
||||
Map<String, List<Map<String, String>>> getFacetRanges();
|
||||
|
||||
List<GenericFacetResponse> getPivotFacets();
|
||||
|
||||
Map<String, Set<Metric>> getStats();
|
||||
|
||||
long getLastIndexedTxId();
|
||||
|
||||
boolean getProcessedDenies();
|
||||
}
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -23,29 +23,16 @@
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.search;
|
||||
package org.alfresco.repo.search.impl;
|
||||
|
||||
/**
|
||||
* Additional metadata ops available for {@link org.alfresco.service.cmr.search.ResultSet} coming from a search engine.
|
||||
* Json returned from Solr
|
||||
*
|
||||
* @author Gethin James
|
||||
* @since 5.0
|
||||
* @see SearchEngineResultSet
|
||||
*/
|
||||
public interface SearchEngineResultMetadata
|
||||
public interface JSONResult
|
||||
{
|
||||
/**
|
||||
* Returns the query execution time, or put in other words, the amount of
|
||||
* time the search engine spent for processing the request.
|
||||
*
|
||||
* @return the query execution time
|
||||
*/
|
||||
Long getQueryTime();
|
||||
|
||||
/**
|
||||
* Total number of items matching a the current query execution.
|
||||
*
|
||||
* @return the number of items in the search index that matched a query execution.
|
||||
*/
|
||||
long getNumberFound();
|
||||
public Long getQueryTime();
|
||||
public long getNumberFound();
|
||||
}
|
@@ -1,28 +1,28 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.search.impl.querymodel.impl.db;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@@ -74,10 +74,6 @@ public class DBQuery extends BaseQuery implements DBQueryBuilderComponent
|
||||
|
||||
Set<String> selectorGroup;
|
||||
|
||||
private int limit = 0;
|
||||
|
||||
private int offset = 0;
|
||||
|
||||
/**
|
||||
* @param source Source
|
||||
* @param constraint Constraint
|
||||
@@ -137,22 +133,6 @@ public class DBQuery extends BaseQuery implements DBQueryBuilderComponent
|
||||
this.sinceTxId = sinceTxId;
|
||||
}
|
||||
|
||||
public int getLimit() {
|
||||
return limit;
|
||||
}
|
||||
|
||||
public void setLimit(int limit) {
|
||||
this.limit = limit;
|
||||
}
|
||||
|
||||
public int getOffset() {
|
||||
return offset;
|
||||
}
|
||||
|
||||
public void setOffset(int offset) {
|
||||
this.offset = offset;
|
||||
}
|
||||
|
||||
public List<DBQueryBuilderJoinCommand> getJoins()
|
||||
{
|
||||
HashMap<QName, DBQueryBuilderJoinCommand> singleJoins = new HashMap<QName, DBQueryBuilderJoinCommand>();
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user