mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-09-10 14:11:58 +00:00
Compare commits
164 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
f15c4d7a8f | ||
|
85a3c71849 | ||
|
4db8ca16e1 | ||
|
52e71719d1 | ||
|
507161a1d0 | ||
|
559171a32c | ||
|
b9f449df57 | ||
|
3185ecf6cb | ||
|
bd7f2a4250 | ||
|
79efa12b10 | ||
|
5807e756bd | ||
|
6ecb019b84 | ||
|
c26f933c44 | ||
|
43e528878e | ||
|
b120a9658f | ||
|
3875a84f74 | ||
|
8dddf293d0 | ||
|
d01e9ffbbc | ||
|
6f8507ebe2 | ||
|
923261d9b9 | ||
|
078e461b71 | ||
|
ddde92ef21 | ||
|
189011d528 | ||
|
af849d2144 | ||
|
53208f207c | ||
|
4cf3f77824 | ||
|
9d24e083bb | ||
|
6345c0a825 | ||
|
cda3d11c7e | ||
|
7ad35ac60d | ||
|
a164917232 | ||
|
507c6b2ed8 | ||
|
ab0d65897c | ||
|
d92844cebb | ||
|
25986d77d6 | ||
|
2b8948b84b | ||
|
64bf49cc22 | ||
|
3ce95c5262 | ||
|
e854a01988 | ||
|
65ba8b16e0 | ||
|
d2bb3a980d | ||
|
59b641474f | ||
|
9ee56762fb | ||
|
6cb8e84ee4 | ||
|
d540bb319b | ||
|
30a2bff92a | ||
|
ac08612183 | ||
|
031d1c740c | ||
|
aac73e2064 | ||
|
ec72000380 | ||
|
015e3213af | ||
|
d97510dfba | ||
|
df98ad9e92 | ||
|
19a7239612 | ||
|
2af2e7b868 | ||
|
84824edffa | ||
|
646419c073 | ||
|
ea854b55a4 | ||
|
4bfb26a660 | ||
|
1f608b1eb5 | ||
|
8652c9d644 | ||
|
57e8ee2c76 | ||
|
6cb00d8e3b | ||
|
bb58c4a2d9 | ||
|
1766ce6e31 | ||
|
0c5498fba0 | ||
|
48045c8c38 | ||
|
89adfacca6 | ||
|
6159bee521 | ||
|
a8a06f4b1f | ||
|
279ff3a0d8 | ||
|
b6a50ae6e1 | ||
|
83097d452b | ||
|
b26d03cc0c | ||
|
af24fef02b | ||
|
85fa4b5a93 | ||
|
01b5fb5593 | ||
|
09f8d7a806 | ||
|
77691ec5fd | ||
|
472710ffd4 | ||
|
16f0714e99 | ||
|
c7c40b06e1 | ||
|
439a9254a3 | ||
|
a9f19de31c | ||
|
d237e51bb2 | ||
|
d7758d5509 | ||
|
f8b40d3cc0 | ||
|
6d48c0979e | ||
|
1b7bada294 | ||
|
924f64409e | ||
|
7267ea5846 | ||
|
c86d82e9cd | ||
|
f7e6320a83 | ||
|
cff733bbdf | ||
|
57942cd43b | ||
|
4aaae012c5 | ||
|
1c387add9d | ||
|
d3ab782b57 | ||
|
b3d43b0500 | ||
|
a8e1ce909e | ||
|
76e815f9c5 | ||
|
5e973504d6 | ||
|
b84e1f8100 | ||
|
42d1176ccf | ||
|
113c27d130 | ||
|
e7cec99d07 | ||
|
aa6c347d74 | ||
|
3f8f05b0d4 | ||
|
2c6a7e3af6 | ||
|
692ff3ffa8 | ||
|
6b4df496ce | ||
|
6ce7d45aa2 | ||
|
d5210ab058 | ||
|
b7df7a80c1 | ||
|
b2d3c6e8c6 | ||
|
e2965a57f5 | ||
|
f395275129 | ||
|
ed3cc88b71 | ||
|
d41f3dcfde | ||
|
6bc03c02fb | ||
|
b68e805a37 | ||
|
68f34c284a | ||
|
3dcd47fac4 | ||
|
11354630f7 | ||
|
6eb7538bde | ||
|
2da894d82b | ||
|
b996987426 | ||
|
8b6dfa47b8 | ||
|
456e442438 | ||
|
b779613910 | ||
|
3530e3dc3b | ||
|
c4dcbba707 | ||
|
5b539b4248 | ||
|
6f1e35217b | ||
|
5d3f3f866e | ||
|
ad97dcd6f4 | ||
|
597056b1c4 | ||
|
4be9aad5f6 | ||
|
b066520e0e | ||
|
dcf564d3ba | ||
|
254d29d45f | ||
|
e0751568db | ||
|
fa696cfec5 | ||
|
49bc40bdea | ||
|
4f0b16b881 | ||
|
33155c8d92 | ||
|
838d758983 | ||
|
eee7e00310 | ||
|
d726ab8099 | ||
|
216f60a0ec | ||
|
8e973c2b60 | ||
|
d9c0016591 | ||
|
2deb07b97b | ||
|
3cf1cfd133 | ||
|
a1c0344828 | ||
|
b639c6ba62 | ||
|
e95100e429 | ||
|
9626f5ace6 | ||
|
48af7ebe9e | ||
|
f635bd9754 | ||
|
3cac5ebfc5 | ||
|
28eaa8c2f8 | ||
|
d6e096a06c | ||
|
c71e536245 |
24
.travis.yml
24
.travis.yml
@@ -55,7 +55,7 @@ jobs:
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.2
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.3
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext01TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - AppContext02TestSuite"
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.2
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.3
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext03TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - AppContext04TestSuite"
|
||||
@@ -78,7 +78,7 @@ jobs:
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.2
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.3
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext04TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - AppContext05TestSuite"
|
||||
@@ -97,7 +97,7 @@ jobs:
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.2
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.3
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext06TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - AppContextExtraTestSuite"
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.2
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.3
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContextExtraTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - MiscContextTestSuite"
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.2
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.3
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=MiscContextTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - SearchTestSuite"
|
||||
@@ -145,7 +145,7 @@ jobs:
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.name=alfresco -Ddb.url=jdbc:mariadb://localhost:3307/alfresco?useUnicode=yes\&characterEncoding=UTF-8 -Ddb.username=alfresco -Ddb.password=alfresco -Ddb.driver=org.mariadb.jdbc.Driver
|
||||
|
||||
- name: "Repository - MariaDB 10.6 tests"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/ OR commit_message =~ /\[latest db\]/
|
||||
before_script:
|
||||
- docker run -d -p 3307:3306 --name mariadb -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mariadb:10.6 --transaction-isolation=READ-COMMITTED --max-connections=300 --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
@@ -159,7 +159,7 @@ jobs:
|
||||
script: travis_wait 20 mvn -B test -pl repository -Dtest=AllDBTestsTestSuite -Ddb.driver=com.mysql.jdbc.Driver -Ddb.name=alfresco -Ddb.url=jdbc:mysql://localhost:3307/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Repository - MySQL 8 tests"
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/
|
||||
if: (branch =~ /(release\/.*$|master)/ AND commit_message !~ /\[skip db\]/ AND type != pull_request) OR commit_message =~ /\[db\]/ OR commit_message =~ /\[latest db\]/
|
||||
before_script:
|
||||
- docker run -d -p 3307:3306 -e MYSQL_ROOT_PASSWORD=alfresco -e MYSQL_USER=alfresco -e MYSQL_DATABASE=alfresco -e MYSQL_PASSWORD=alfresco mysql:8 --transaction-isolation='READ-COMMITTED'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
@@ -210,7 +210,7 @@ jobs:
|
||||
|
||||
- name: "Repository - PostgreSQL 13.3 tests"
|
||||
# We only run DB tests on the latest version of PostgreSQL on feature branches
|
||||
if: commit_message !~ /\[skip db\]/
|
||||
if: commit_message !~ /\[skip db\]/ OR commit_message =~ /\[latest db\]/
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
@@ -234,7 +234,7 @@ jobs:
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.2
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.3
|
||||
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext02TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Remote-api - AppContext03TestSuite"
|
||||
@@ -242,7 +242,7 @@ jobs:
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.2
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.3
|
||||
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext03TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Remote-api - AppContext04TestSuite"
|
||||
@@ -250,7 +250,7 @@ jobs:
|
||||
before_script:
|
||||
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
|
||||
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.2
|
||||
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.3
|
||||
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext04TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
|
||||
|
||||
- name: "Remote-api - AppContextExtraTestSuite"
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-amps</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-parent</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-automation-community-repo</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
|
||||
|
@@ -55,6 +55,7 @@ import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanCo
|
||||
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_SEARCH_DISPOSITION_ACTION_AS_OF;
|
||||
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_SEARCH_DISPOSITION_ACTION_NAME;
|
||||
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_SEARCH_DISPOSITION_AUTHORITY;
|
||||
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_SEARCH_DISPOSITION_EVENTS;
|
||||
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_SEARCH_DISPOSITION_EVENTS_ELIGIBLE;
|
||||
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_SEARCH_DISPOSITION_INSTRUCTIONS;
|
||||
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_SEARCH_DISPOSITION_PERIOD;
|
||||
@@ -248,6 +249,9 @@ public class RecordProperties extends TestModel
|
||||
@JsonProperty (PROPERTIES_RECORD_SEARCH_DISPOSITION_EVENTS_ELIGIBLE)
|
||||
private Boolean recordSearchDispositionEventsEligible;
|
||||
|
||||
@JsonProperty (PROPERTIES_RECORD_SEARCH_DISPOSITION_EVENTS)
|
||||
private List<String> recordSearchDispositionEvents;
|
||||
|
||||
@JsonProperty (PROPERTIES_RECORD_SEARCH_DISPOSITION_INSTRUCTIONS)
|
||||
private String recordSearchDispositionInstructions;
|
||||
|
||||
|
@@ -47,6 +47,7 @@ import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanCo
|
||||
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_VITAL_RECORD_INDICATOR;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||
@@ -134,7 +135,7 @@ public class RecordCategoryChildProperties extends TestModel
|
||||
private String recordSearchDispositionInstructions;
|
||||
|
||||
@JsonProperty (PROPERTIES_RECORD_SEARCH_DISPOSITION_EVENTS)
|
||||
private Boolean recordSearchDispositionEvents;
|
||||
private List<String> recordSearchDispositionEvents;
|
||||
|
||||
@JsonProperty (PROPERTIES_OWNER)
|
||||
private Owner owner;
|
||||
|
@@ -0,0 +1,103 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Records Management Module
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* -
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
* -
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
* -
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
* -
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.rest.v0;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.alfresco.rest.core.v0.BaseAPI;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
/**
|
||||
* Methods to make API requests using v0 API for Exporting Items
|
||||
*
|
||||
* @author Shubham Jain
|
||||
* @since 7.1.0
|
||||
*/
|
||||
|
||||
@Component
|
||||
public class ExportAPI extends BaseAPI
|
||||
{
|
||||
/**
|
||||
* The URI to export an item
|
||||
*/
|
||||
private static final String EXPORT_API = "{0}rma/admin/export";
|
||||
|
||||
/**
|
||||
* Export a single Record/Record Folder/Record Category using V0 Export API
|
||||
*
|
||||
* @param user User performing the export
|
||||
* @param password User's Password
|
||||
* @param expectedStatusCode Expected Response Code
|
||||
* @param nodeID ID of the Node(Record/RecordFolder) to be exported
|
||||
* @return HTTP Response
|
||||
*/
|
||||
public HttpResponse exportRMNode(String user, String password, int expectedStatusCode, String nodeID)
|
||||
{
|
||||
return export(user, password, expectedStatusCode, Collections.singletonList(getNodeRefSpacesStore() + nodeID));
|
||||
}
|
||||
|
||||
/**
|
||||
* Export a list of nodes using V0 Export API
|
||||
*
|
||||
* @param user User performing the export
|
||||
* @param password User's Password
|
||||
* @param expectedStatusCode Expected Response Code
|
||||
* @param nodeIDList List of the nodes to be exported
|
||||
* @return HTTP Response
|
||||
*/
|
||||
public HttpResponse exportRMNodes(String user, String password, int expectedStatusCode, List<String> nodeIDList)
|
||||
{
|
||||
|
||||
List<String> nodeRefs =
|
||||
nodeIDList.stream().map(nodeID -> getNodeRefSpacesStore() + nodeID).collect(Collectors.toList());
|
||||
|
||||
return export(user, password, expectedStatusCode, nodeRefs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Export API function to perform Export Operation on items with given noderefs using V0 Export Rest API
|
||||
*
|
||||
* @param user User performing the export
|
||||
* @param password User's Password
|
||||
* @param expectedStatusCode Expected Response Code
|
||||
* @param nodeRefs list of the noderefs for the items to be exported
|
||||
* @return Rest API Post Request
|
||||
*/
|
||||
public HttpResponse export(String user, String password, int expectedStatusCode, List<String> nodeRefs)
|
||||
{
|
||||
final JSONObject requestParams = new JSONObject();
|
||||
|
||||
requestParams.put("nodeRefs", new JSONArray(nodeRefs));
|
||||
|
||||
return doPostJsonRequest(user, password, expectedStatusCode, requestParams, EXPORT_API);
|
||||
}
|
||||
}
|
@@ -67,7 +67,7 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.testng.annotations.AfterClass;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.apache.commons.httpclient.HttpStatus.SC_INTERNAL_SERVER_ERROR;
|
||||
/**
|
||||
* API tests to check actions on frozen content
|
||||
*
|
||||
@@ -309,11 +309,11 @@ public class PreventActionsOnFrozenContentTests extends BaseRMRestTest
|
||||
|
||||
STEP("Execute the retain action");
|
||||
rmRolesAndActionsAPI.executeAction(getAdminUser().getUsername(), getAdminUser().getPassword(), record.getName(),
|
||||
RM_ACTIONS.END_RETENTION);
|
||||
RM_ACTIONS.END_RETENTION, null, SC_INTERNAL_SERVER_ERROR);
|
||||
|
||||
STEP("Check the record search disposition properties");
|
||||
Record recordUpdated = getRestAPIFactory().getRecordsAPI().getRecord(record.getId());
|
||||
assertTrue(recordUpdated.getProperties().getRecordSearchDispositionActionName().contains(RM_ACTIONS.DESTROY.getAction()));
|
||||
assertTrue(recordUpdated.getProperties().getRecordSearchDispositionActionName().contains(RM_ACTIONS.END_RETENTION.getAction()));
|
||||
assertTrue(recordUpdated.getProperties().getRecordSearchDispositionPeriod().contains("immediately"));
|
||||
}
|
||||
|
||||
|
@@ -0,0 +1,268 @@
|
||||
/*-
|
||||
* #%L
|
||||
* Alfresco Records Management Module
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* -
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
* -
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
* -
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
* -
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.rest.rm.community.recordcategories;
|
||||
|
||||
import static org.alfresco.rest.rm.community.model.recordcategory.RetentionPeriodProperty.CREATED_DATE;
|
||||
import static org.alfresco.rest.rm.community.model.recordcategory.RetentionPeriodProperty.CUT_OFF_DATE;
|
||||
import static org.alfresco.rest.rm.community.model.recordcategory.RetentionPeriodProperty.DATE_FILED;
|
||||
import static org.alfresco.rest.rm.community.utils.CoreUtil.createBodyForMoveCopy;
|
||||
import static org.alfresco.rest.rm.community.utils.CoreUtil.toContentModel;
|
||||
import static org.alfresco.utility.data.RandomData.getRandomName;
|
||||
import static org.alfresco.utility.report.log.Step.STEP;
|
||||
import static org.springframework.http.HttpStatus.OK;
|
||||
import static org.testng.Assert.assertFalse;
|
||||
import static org.testng.Assert.assertTrue;
|
||||
|
||||
import org.alfresco.rest.core.v0.BaseAPI.RM_ACTIONS;
|
||||
import org.alfresco.rest.core.v0.RMEvents;
|
||||
import org.alfresco.rest.rm.community.base.BaseRMRestTest;
|
||||
import org.alfresco.rest.rm.community.model.record.Record;
|
||||
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategory;
|
||||
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategoryChild;
|
||||
import org.alfresco.rest.v0.service.DispositionScheduleService;
|
||||
import org.alfresco.test.AlfrescoTest;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.testng.annotations.AfterMethod;
|
||||
import org.testng.annotations.BeforeMethod;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
/**
|
||||
* Tests for moving record categories between record categories with different retention schedule
|
||||
*/
|
||||
public class MoveRecCategoriesWithRSTests extends BaseRMRestTest
|
||||
{
|
||||
private RecordCategory rootCategory, rootCategory2;
|
||||
private Record elRecord, nonElRecord;
|
||||
@Autowired
|
||||
private DispositionScheduleService dispositionScheduleService;
|
||||
|
||||
/**
|
||||
* Create two root categories with some retention schedules on record level
|
||||
*/
|
||||
@BeforeMethod
|
||||
private void setUpMoveRecCategoriesWithRSTests()
|
||||
{
|
||||
STEP("Create record category with retention schedule and apply it to records.");
|
||||
rootCategory = createRootCategory(getRandomName("rootCategory1"));
|
||||
dispositionScheduleService.createCategoryRetentionSchedule(rootCategory.getName(), true);
|
||||
|
||||
STEP("Create record category with retention schedule and apply it to records.");
|
||||
rootCategory2 = createRootCategory(getRandomName("rootCategory2"));
|
||||
dispositionScheduleService.createCategoryRetentionSchedule(rootCategory2.getName(), true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given following structure is created:
|
||||
* rootCategory1 with RS applied on record level with cut off and destroy after 1 day
|
||||
* - subCategory1 without RS
|
||||
* - recFolder
|
||||
* - incomplete electronic record
|
||||
* - complete non-electronic record
|
||||
* rootCategory2 with RS with retain and destroy both after 2 day
|
||||
* When moving subcategory1 within rootCategory2
|
||||
* Then the records will inherit the RS from rootCategory2
|
||||
*/
|
||||
@Test
|
||||
@AlfrescoTest (jira = "APPS-1005")
|
||||
public void testInheritWhenMoveToDifferentRSStep() throws Exception
|
||||
{
|
||||
STEP("Add retention schedule cut off step after 1 day period.");
|
||||
dispositionScheduleService.addCutOffAfterPeriodStep(rootCategory.getName(), "day|1", CREATED_DATE);
|
||||
|
||||
STEP("Add retention schedule destroy step after 1 Day period.");
|
||||
dispositionScheduleService.addDestroyWithGhostingAfterPeriodStep(rootCategory.getName(), "day|1", CUT_OFF_DATE);
|
||||
|
||||
STEP("Create a subcategory with a record folder and records.");
|
||||
RecordCategoryChild subCategory = createSubCategoryWithRecords();
|
||||
|
||||
STEP("Add retention schedule retain step after 2 day period.");
|
||||
dispositionScheduleService.addRetainAfterPeriodStep(rootCategory2.getName(), "day|2");
|
||||
|
||||
STEP("Add retention schedule destroy step after 2 Day period.");
|
||||
dispositionScheduleService.addDestroyWithGhostingAfterPeriodStep(rootCategory2.getName(), "day|2", DATE_FILED);
|
||||
|
||||
STEP("Move the subcategory within the rootCategory2.");
|
||||
getRestAPIFactory().getNodeAPI(toContentModel(subCategory.getId())).move(createBodyForMoveCopy(rootCategory2.getId()));
|
||||
assertStatusCode(OK);
|
||||
|
||||
STEP("Check that both records inherit rootCategory2 retention schedule");
|
||||
elRecord = getRestAPIFactory().getRecordsAPI().getRecord(elRecord.getId());
|
||||
nonElRecord = getRestAPIFactory().getRecordsAPI().getRecord(nonElRecord.getId());
|
||||
assertTrue(elRecord.getProperties().getRecordSearchDispositionActionName().equalsIgnoreCase(RM_ACTIONS.END_RETENTION.getAction()),
|
||||
"Disposition action should be retain");
|
||||
assertTrue(elRecord.getProperties().getRecordSearchDispositionPeriod().equalsIgnoreCase("day"),
|
||||
"Disposition period property should be day");
|
||||
assertTrue(elRecord.getProperties().getRecordSearchDispositionPeriodExpression().equalsIgnoreCase("2"),
|
||||
"Disposition period expression should be 2");
|
||||
assertTrue(nonElRecord.getProperties().getRecordSearchDispositionActionName().equalsIgnoreCase(RM_ACTIONS.END_RETENTION.getAction()),
|
||||
"Disposition action should be retain");
|
||||
assertTrue(nonElRecord.getProperties().getRecordSearchDispositionPeriod().equalsIgnoreCase("day"),
|
||||
"Disposition period property should be day");
|
||||
assertTrue(nonElRecord.getProperties().getRecordSearchDispositionPeriodExpression().equalsIgnoreCase("2"),
|
||||
"Disposition period expression should be 2");
|
||||
}
|
||||
|
||||
/**
|
||||
* Given following structure is created:
|
||||
* rootCategory1 with RS applied on record level with retain and destroy after 1 day
|
||||
* - subCategory without RS
|
||||
* - recFolder
|
||||
* - incomplete electronic record
|
||||
* - complete non-electronic record
|
||||
* rootCategory2 with RS with cut off on event case closed and destroy both after 2 day
|
||||
* When moving subcategory within rootCategory2
|
||||
* Then the records will inherit the RS from rootCategory2
|
||||
*/
|
||||
@Test
|
||||
@AlfrescoTest (jira = "APPS-1004")
|
||||
public void testInheritWhenMoveToDifferentRSStepOnEventBase() throws Exception
|
||||
{
|
||||
STEP("Add retention schedule retain step after 1 day period.");
|
||||
dispositionScheduleService.addRetainAfterPeriodStep(rootCategory.getName(), "day|1");
|
||||
|
||||
STEP("Add retention schedule destroy step after 1 Day period.");
|
||||
dispositionScheduleService.addDestroyWithGhostingAfterPeriodStep(rootCategory.getName(), "day|1", CUT_OFF_DATE);
|
||||
|
||||
STEP("Create a subcategory with a record folder and records.");
|
||||
RecordCategoryChild subCategory = createSubCategoryWithRecords();
|
||||
|
||||
STEP("Add retention schedule cut off step on event case closed.");
|
||||
dispositionScheduleService.addCutOffAfterEventStep(rootCategory2.getName(), RMEvents.CASE_CLOSED.getEventName());
|
||||
|
||||
STEP("Add retention schedule destroy step after 1 Day period.");
|
||||
dispositionScheduleService.addDestroyWithGhostingAfterPeriodStep(rootCategory2.getName(), "day|2", DATE_FILED);
|
||||
|
||||
STEP("Move the subcategory within the rootCategory2.");
|
||||
getRestAPIFactory().getNodeAPI(toContentModel(subCategory.getId())).move(createBodyForMoveCopy(rootCategory2.getId()));
|
||||
assertStatusCode(OK);
|
||||
|
||||
STEP("Check that both records inherit rootCategory2 retention schedule");
|
||||
elRecord = getRestAPIFactory().getRecordsAPI().getRecord(elRecord.getId());
|
||||
nonElRecord = getRestAPIFactory().getRecordsAPI().getRecord(nonElRecord.getId());
|
||||
assertTrue(elRecord.getProperties().getRecordSearchDispositionActionName().equalsIgnoreCase(RM_ACTIONS.CUT_OFF.getAction()),
|
||||
"Disposition action should be cut off");
|
||||
assertTrue(elRecord.getProperties().getRecordSearchDispositionPeriod().equalsIgnoreCase("none"),
|
||||
"Disposition period property should none");
|
||||
assertTrue(elRecord.getProperties().getRecordSearchDispositionPeriodExpression().equalsIgnoreCase("0"),
|
||||
"Disposition period expression should be 0");
|
||||
assertTrue(elRecord.getProperties().getRecordSearchDispositionEvents().contains(RMEvents.CASE_CLOSED.getEventName()),
|
||||
"Disposition event list doesn't contain case closed event");
|
||||
assertTrue(nonElRecord.getProperties().getRecordSearchDispositionActionName().equalsIgnoreCase(RM_ACTIONS.CUT_OFF.getAction()),
|
||||
"Disposition action should be cut off");
|
||||
assertTrue(nonElRecord.getProperties().getRecordSearchDispositionPeriod().equalsIgnoreCase("none"),
|
||||
"Disposition period property should be none");
|
||||
assertTrue(nonElRecord.getProperties().getRecordSearchDispositionPeriodExpression().equalsIgnoreCase("0"),
|
||||
"Disposition period expression should be 0");
|
||||
assertTrue(nonElRecord.getProperties().getRecordSearchDispositionEvents().contains(RMEvents.CASE_CLOSED.getEventName()),
|
||||
"Disposition event list doesn't contain case closed event");
|
||||
}
|
||||
|
||||
/**
|
||||
* Given following structure is created:
|
||||
* rootCategory1 with RS applied on record level with cut off on event case closed and destroy after 1 day
|
||||
* - subCategory2 without RS
|
||||
* - recFolder
|
||||
* - incomplete electronic record
|
||||
* - complete non-electronic record
|
||||
* rootCategory2 with cut off on event Obsolete and destroy both after 2 day
|
||||
* When moving subcategory2 within rootCategory2
|
||||
* Then the records will inherit the RS from rootCategory2
|
||||
*/
|
||||
@Test
|
||||
@AlfrescoTest (jira = "APPS-1004")
|
||||
public void testInheritWhenMoveToSameStepDifferentEvent() throws Exception
|
||||
{
|
||||
STEP("Add retention schedule cut off on case closed.");
|
||||
dispositionScheduleService.addCutOffAfterEventStep(rootCategory.getName(), RMEvents.CASE_CLOSED.getEventName());
|
||||
|
||||
STEP("Add retention schedule destroy step after 1 Day period.");
|
||||
dispositionScheduleService.addDestroyWithGhostingAfterPeriodStep(rootCategory.getName(), "day|1", CUT_OFF_DATE);
|
||||
|
||||
STEP("Create a subcategory with a record folder and records.");
|
||||
RecordCategoryChild subCategory = createSubCategoryWithRecords();
|
||||
|
||||
STEP("Add retention schedule cut off step on event separation.");
|
||||
dispositionScheduleService.addCutOffAfterEventStep(rootCategory2.getName(), RMEvents.OBSOLETE.getEventName());
|
||||
|
||||
STEP("Add retention schedule destroy step after 2 Day period.");
|
||||
dispositionScheduleService.addDestroyWithGhostingAfterPeriodStep(rootCategory2.getName(), "day|2", DATE_FILED);
|
||||
|
||||
STEP("Move the subcategory within the rootCategory2.");
|
||||
getRestAPIFactory().getNodeAPI(toContentModel(subCategory.getId())).move(createBodyForMoveCopy(rootCategory2.getId()));
|
||||
assertStatusCode(OK);
|
||||
|
||||
STEP("Check that both records inherit rootCategory2 retention schedule");
|
||||
elRecord = getRestAPIFactory().getRecordsAPI().getRecord(elRecord.getId());
|
||||
nonElRecord = getRestAPIFactory().getRecordsAPI().getRecord(nonElRecord.getId());
|
||||
assertTrue(elRecord.getProperties().getRecordSearchDispositionActionName().equalsIgnoreCase(RM_ACTIONS.CUT_OFF.getAction()),
|
||||
"Disposition action should be cut off");
|
||||
assertTrue(elRecord.getProperties().getRecordSearchDispositionPeriod().equalsIgnoreCase("none"),
|
||||
"Disposition period property should be none");
|
||||
assertTrue(elRecord.getProperties().getRecordSearchDispositionPeriodExpression().equalsIgnoreCase("0"),
|
||||
"Disposition period expression should be 0");
|
||||
assertFalse(elRecord.getProperties().getRecordSearchDispositionEvents().contains(RMEvents.CASE_CLOSED.getEventName()),
|
||||
"Event list contain the event from the previous RS ");
|
||||
assertTrue(elRecord.getProperties().getRecordSearchDispositionEvents().contains(RMEvents.OBSOLETE.getEventName()),
|
||||
"Event list doesn't contain the event from the current RS ");
|
||||
assertTrue(nonElRecord.getProperties().getRecordSearchDispositionActionName().equalsIgnoreCase(RM_ACTIONS.CUT_OFF.getAction()),
|
||||
"Disposition action should be cut off");
|
||||
assertTrue(nonElRecord.getProperties().getRecordSearchDispositionPeriod().equalsIgnoreCase("none"),
|
||||
"Disposition period property should be none");
|
||||
assertTrue(nonElRecord.getProperties().getRecordSearchDispositionPeriodExpression().equalsIgnoreCase("0"),
|
||||
"Disposition period expression should be 0");
|
||||
assertFalse(nonElRecord.getProperties().getRecordSearchDispositionEvents().contains(RMEvents.CASE_CLOSED.getEventName()),
|
||||
"Event list contain the event from the previous RS ");
|
||||
assertTrue(nonElRecord.getProperties().getRecordSearchDispositionEvents().contains(RMEvents.OBSOLETE.getEventName()),
|
||||
"Event list doesn't contain the event from the current RS ");
|
||||
}
|
||||
|
||||
@AfterMethod (alwaysRun = true)
|
||||
public void cleanupMoveRecCategoriesWithRSTests()
|
||||
{
|
||||
getRestAPIFactory().getRecordCategoryAPI().deleteRecordCategory(rootCategory.getId());
|
||||
getRestAPIFactory().getRecordCategoryAPI().deleteRecordCategory(rootCategory2.getId());
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to create a sub-category with a folder, an incomplete electronic record and a complete
|
||||
* electronic record
|
||||
* @return
|
||||
*/
|
||||
private RecordCategoryChild createSubCategoryWithRecords()
|
||||
{
|
||||
STEP("Create a subcategory with a record folder");
|
||||
RecordCategoryChild subCategory = createRecordCategory(rootCategory.getId(), getRandomName("subCategory"));
|
||||
RecordCategoryChild recFolder = createFolder(subCategory.getId(), getRandomName("recFolder"));
|
||||
|
||||
STEP("Create 2 records in the record folder. Complete one of them.");
|
||||
elRecord = createElectronicRecord(recFolder.getId(), getRandomName("elRecord"));
|
||||
nonElRecord = createNonElectronicRecord(recFolder.getId(), getRandomName("nonElRecord"));
|
||||
getRestAPIFactory().getRecordsAPI().completeRecord(nonElRecord.getId());
|
||||
return subCategory;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,136 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Records Management Module
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* -
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
* -
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
* -
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
* -
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.rest.rm.community.records;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
|
||||
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentType.CONTENT_TYPE;
|
||||
import static org.alfresco.rest.rm.community.utils.FilePlanComponentsUtil.createTempFile;
|
||||
import static org.alfresco.utility.data.RandomData.getRandomName;
|
||||
import static org.alfresco.utility.report.log.Step.STEP;
|
||||
import static org.apache.http.HttpStatus.SC_OK;
|
||||
|
||||
import org.alfresco.rest.rm.community.base.BaseRMRestTest;
|
||||
import org.alfresco.rest.rm.community.model.record.Record;
|
||||
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategory;
|
||||
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategoryChild;
|
||||
import org.alfresco.rest.v0.ExportAPI;
|
||||
import org.alfresco.test.AlfrescoTest;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.testng.annotations.AfterClass;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.DataProvider;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
/**
|
||||
* This class contains tests for testing the Export functionality on RM site
|
||||
*
|
||||
* @author Shubham Jain
|
||||
* @since 7.1.0
|
||||
*/
|
||||
public class ExportRecordsTests extends BaseRMRestTest
|
||||
{
|
||||
private RecordCategory rootCategory;
|
||||
|
||||
private RecordCategoryChild recordFolder;
|
||||
|
||||
@Autowired
|
||||
private ExportAPI exportAPI;
|
||||
|
||||
@BeforeClass (alwaysRun = true)
|
||||
public void exportRecordsTestsBeforeClass()
|
||||
{
|
||||
STEP("Create root level category");
|
||||
rootCategory = createRootCategory(getRandomName("Category"));
|
||||
|
||||
STEP("Create the record folder inside the rootCategory");
|
||||
recordFolder = createRecordFolder(rootCategory.getId(), getRandomName("Folder"));
|
||||
|
||||
}
|
||||
|
||||
@DataProvider (name = "CreateRMNodes")
|
||||
public Object[][] getRMNodeID()
|
||||
{
|
||||
return new String[][] {
|
||||
{ createRecord("Record_4MB", 4).getId() },
|
||||
{ createRecord("Record_200MB", 200).getId() },
|
||||
{ recordFolder.getId() }
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a record with size > 4 MB
|
||||
* When I export the record using API
|
||||
* Then the request is successful
|
||||
*/
|
||||
@Test (description = "Testing the RM Export functionality for records of size >4MB and Record " +
|
||||
"Folder containing records with size >4MB",
|
||||
dataProvider = "CreateRMNodes")
|
||||
@AlfrescoTest (jira = "APPS-986")
|
||||
public void exportRMNodeTest(String nodeID)
|
||||
{
|
||||
STEP("Export the created record/record folder with size greater than 4 MB and verifying the expected response" +
|
||||
" code");
|
||||
exportAPI.exportRMNode(getAdminUser().getUsername(), getAdminUser().getPassword(), SC_OK, nodeID);
|
||||
}
|
||||
|
||||
/**
|
||||
* I would change this to
|
||||
* Given a list of records with a size > 4MB
|
||||
* When I export the records
|
||||
* Then the request is succesfull
|
||||
*/
|
||||
@Test (description = "Testing the RM Export functionality using API for a list of Records at once with " +
|
||||
"collective size of more than 4MB")
|
||||
public void exportRecordsTest()
|
||||
{
|
||||
STEP("Export all the created records at once and verifying the expected response code");
|
||||
exportAPI.exportRMNodes(getAdminUser().getUsername(), getAdminUser().getPassword(),
|
||||
SC_OK, asList(createRecord("Record_2MB", 2).getId(), createRecord("Record_3MB", 3).getId()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Record with a specific size in RM Site inside already created Record Folder
|
||||
*
|
||||
* @param recordName Name of the record to be created
|
||||
* @param sizeInMegaBytes Size of the record to be created in MegaBytes
|
||||
* @return Created record with defined size
|
||||
*/
|
||||
public Record createRecord(String recordName, int sizeInMegaBytes)
|
||||
{
|
||||
return getRestAPIFactory().getRecordFolderAPI().createRecord(Record.builder().name(recordName)
|
||||
.nodeType(CONTENT_TYPE).build(), recordFolder.getId(),
|
||||
createTempFile("TempFile", sizeInMegaBytes));
|
||||
}
|
||||
|
||||
@AfterClass (alwaysRun = true)
|
||||
public void exportRecordsTestsAfter()
|
||||
{
|
||||
STEP("Delete the created rootCategory along with corresponding record folders/records present in it");
|
||||
getRestAPIFactory().getRecordCategoryAPI().deleteRecordCategory(rootCategory.getId());
|
||||
}
|
||||
}
|
@@ -41,6 +41,7 @@ import static org.testng.Assert.assertTrue;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.RandomAccessFile;
|
||||
|
||||
import org.alfresco.rest.rm.community.model.record.Record;
|
||||
import org.alfresco.rest.rm.community.model.record.RecordProperties;
|
||||
@@ -66,13 +67,19 @@ public class FilePlanComponentsUtil
|
||||
// Intentionally blank
|
||||
}
|
||||
|
||||
/** Name of the image resource file to be used for records body */
|
||||
/**
|
||||
* Name of the image resource file to be used for records body
|
||||
*/
|
||||
public static final String IMAGE_FILE = "money.JPG";
|
||||
|
||||
/** Title prefix for record category children */
|
||||
/**
|
||||
* Title prefix for record category children
|
||||
*/
|
||||
public static final String TITLE_PREFIX = "Title for ";
|
||||
|
||||
/** Description prefix for record category children */
|
||||
/**
|
||||
* Description prefix for record category children
|
||||
*/
|
||||
public static final String DESCRIPTION_PREFIX = "This is the description for";
|
||||
|
||||
|
||||
@@ -87,7 +94,7 @@ public class FilePlanComponentsUtil
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a record model with the given type and a random name (with "Record " prefix)
|
||||
* Creates a record model with the given type and a random name (with "Record " prefix)
|
||||
*
|
||||
* @param nodeType The node type
|
||||
* @return The {@link Record} with for the given node type
|
||||
@@ -95,9 +102,9 @@ public class FilePlanComponentsUtil
|
||||
private static Record createRecordModel(String nodeType)
|
||||
{
|
||||
return Record.builder()
|
||||
.name("Record " + getRandomAlphanumeric())
|
||||
.nodeType(nodeType)
|
||||
.build();
|
||||
.name("Record " + getRandomAlphanumeric())
|
||||
.nodeType(nodeType)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -133,22 +140,22 @@ public class FilePlanComponentsUtil
|
||||
/**
|
||||
* Creates an unfiled records container child record model with the given name and type
|
||||
*
|
||||
* @param name The name of the unfiled records container child
|
||||
* @param name The name of the unfiled records container child
|
||||
* @param nodeType The type of the record category child
|
||||
* @return The {@link UnfiledContainerChild} with the given details
|
||||
*/
|
||||
public static UnfiledContainerChild createUnfiledContainerChildRecordModel(String name, String nodeType)
|
||||
{
|
||||
return UnfiledContainerChild.builder()
|
||||
.name(name)
|
||||
.nodeType(nodeType)
|
||||
.build();
|
||||
.name(name)
|
||||
.nodeType(nodeType)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a nonElectronic container child record model with all available properties for the non electronic records
|
||||
*
|
||||
* @param name The name of the unfiled records container child
|
||||
* @param name The name of the unfiled records container child
|
||||
* @param nodeType The type of the record category child
|
||||
* @return The {@link UnfiledContainerChild} with the given details
|
||||
*/
|
||||
@@ -156,19 +163,19 @@ public class FilePlanComponentsUtil
|
||||
String shelf, String storageLocation, Integer numberOfCopies, Integer physicalSize)
|
||||
{
|
||||
return UnfiledContainerChild.builder()
|
||||
.name(name)
|
||||
.nodeType(NON_ELECTRONIC_RECORD_TYPE)
|
||||
.properties(UnfiledContainerChildProperties.builder()
|
||||
.title(title)
|
||||
.description(description)
|
||||
.box(box)
|
||||
.file(file)
|
||||
.shelf(shelf)
|
||||
.storageLocation(storageLocation)
|
||||
.numberOfCopies(numberOfCopies)
|
||||
.physicalSize(physicalSize)
|
||||
.build())
|
||||
.build();
|
||||
.name(name)
|
||||
.nodeType(NON_ELECTRONIC_RECORD_TYPE)
|
||||
.properties(UnfiledContainerChildProperties.builder()
|
||||
.title(title)
|
||||
.description(description)
|
||||
.box(box)
|
||||
.file(file)
|
||||
.shelf(shelf)
|
||||
.storageLocation(storageLocation)
|
||||
.numberOfCopies(numberOfCopies)
|
||||
.physicalSize(physicalSize)
|
||||
.build())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -190,110 +197,110 @@ public class FilePlanComponentsUtil
|
||||
String shelf, String storageLocation, Integer numberOfCopies, Integer physicalSize)
|
||||
{
|
||||
return Record.builder()
|
||||
.name(name)
|
||||
.nodeType(NON_ELECTRONIC_RECORD_TYPE)
|
||||
.properties(RecordProperties.builder()
|
||||
.title(title)
|
||||
.description(description)
|
||||
.box(box)
|
||||
.file(file)
|
||||
.shelf(shelf)
|
||||
.storageLocation(storageLocation)
|
||||
.numberOfCopies(numberOfCopies)
|
||||
.physicalSize(physicalSize)
|
||||
.build())
|
||||
.build();
|
||||
.name(name)
|
||||
.nodeType(NON_ELECTRONIC_RECORD_TYPE)
|
||||
.properties(RecordProperties.builder()
|
||||
.title(title)
|
||||
.description(description)
|
||||
.box(box)
|
||||
.file(file)
|
||||
.shelf(shelf)
|
||||
.storageLocation(storageLocation)
|
||||
.numberOfCopies(numberOfCopies)
|
||||
.physicalSize(physicalSize)
|
||||
.build())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a record model with the given name, description and title
|
||||
*
|
||||
* @param name The name of the record
|
||||
* @param name The name of the record
|
||||
* @param description The description of the record
|
||||
* @param title The title of the record
|
||||
* @param title The title of the record
|
||||
* @return The {@link Record} with the given details
|
||||
*/
|
||||
public static Record createRecordModel(String name, String description, String title)
|
||||
{
|
||||
return Record.builder()
|
||||
.name(name)
|
||||
.properties(RecordProperties.builder()
|
||||
.description(description)
|
||||
.title(title)
|
||||
.build())
|
||||
.build();
|
||||
.name(name)
|
||||
.properties(RecordProperties.builder()
|
||||
.description(description)
|
||||
.title(title)
|
||||
.build())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a record category child model with the given name and type
|
||||
*
|
||||
* @param name The name of the record category child
|
||||
* @param name The name of the record category child
|
||||
* @param nodeType The type of the record category child
|
||||
* @return The {@link RecordCategoryChild} with the given details
|
||||
*/
|
||||
public static RecordCategoryChild createRecordCategoryChildModel(String name, String nodeType)
|
||||
{
|
||||
return RecordCategoryChild.builder()
|
||||
.name(name)
|
||||
.nodeType(nodeType)
|
||||
.properties(RecordCategoryChildProperties.builder()
|
||||
.title(TITLE_PREFIX + name)
|
||||
.build())
|
||||
.build();
|
||||
.name(name)
|
||||
.nodeType(nodeType)
|
||||
.properties(RecordCategoryChildProperties.builder()
|
||||
.title(TITLE_PREFIX + name)
|
||||
.build())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a record category model with the given name and title
|
||||
*
|
||||
* @param name The name of the record category
|
||||
* @param name The name of the record category
|
||||
* @param title The title of the record category
|
||||
* @return The {@link RecordCategory} with the given details
|
||||
*/
|
||||
public static RecordCategory createRecordCategoryModel(String name, String title)
|
||||
{
|
||||
return RecordCategory.builder()
|
||||
.name(name)
|
||||
.nodeType(RECORD_CATEGORY_TYPE)
|
||||
.properties(RecordCategoryProperties.builder()
|
||||
.title(title)
|
||||
.build())
|
||||
.build();
|
||||
.name(name)
|
||||
.nodeType(RECORD_CATEGORY_TYPE)
|
||||
.properties(RecordCategoryProperties.builder()
|
||||
.title(title)
|
||||
.build())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a record folder model with the given name and title
|
||||
*
|
||||
* @param name The name of the record folder
|
||||
* @param name The name of the record folder
|
||||
* @param title The title of the record folder
|
||||
* @return The {@link RecordFolder} with the given details
|
||||
*/
|
||||
public static RecordFolder createRecordFolderModel(String name, String title)
|
||||
{
|
||||
return RecordFolder.builder()
|
||||
.name(name)
|
||||
.nodeType(RECORD_FOLDER_TYPE)
|
||||
.properties(RecordFolderProperties.builder()
|
||||
.title(title)
|
||||
.build())
|
||||
.build();
|
||||
.name(name)
|
||||
.nodeType(RECORD_FOLDER_TYPE)
|
||||
.properties(RecordFolderProperties.builder()
|
||||
.title(title)
|
||||
.build())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an unfiled records container child model with the given name and type
|
||||
*
|
||||
* @param name The name of the unfiled records container child
|
||||
* @param name The name of the unfiled records container child
|
||||
* @param nodeType The type of the record category child
|
||||
* @return The {@link UnfiledContainerChild} with the given details
|
||||
*/
|
||||
public static UnfiledContainerChild createUnfiledContainerChildModel(String name, String nodeType)
|
||||
{
|
||||
return UnfiledContainerChild.builder()
|
||||
.name(name)
|
||||
.nodeType(nodeType)
|
||||
.properties(UnfiledContainerChildProperties.builder()
|
||||
.title(TITLE_PREFIX + name)
|
||||
.build())
|
||||
.build();
|
||||
.name(name)
|
||||
.nodeType(nodeType)
|
||||
.properties(UnfiledContainerChildProperties.builder()
|
||||
.title(TITLE_PREFIX + name)
|
||||
.build())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -324,6 +331,32 @@ public class FilePlanComponentsUtil
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Method to create a temporary file with specific size
|
||||
*
|
||||
* @param name file name
|
||||
* @param sizeInMegaBytes size
|
||||
* @return temporary file
|
||||
*/
|
||||
public static File createTempFile(final String name, long sizeInMegaBytes)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Create file
|
||||
final File file = File.createTempFile(name, ".txt");
|
||||
|
||||
RandomAccessFile raf = new RandomAccessFile(file, "rw");
|
||||
raf.setLength(sizeInMegaBytes * 1024 * 1024);
|
||||
raf.close();
|
||||
|
||||
return file;
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
throw new RuntimeException("Unable to create test file.", exception);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to verify all properties of a nonElectronic record
|
||||
*
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-parent</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -1,4 +1,4 @@
|
||||
TRANSFORMERS_TAG=2.5.2
|
||||
TRANSFORMERS_TAG=2.5.3
|
||||
SOLR6_TAG=2.0.2-RC1
|
||||
POSTGRES_TAG=13.3
|
||||
ACTIVEMQ_TAG=5.16.1
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=You can't create items i
|
||||
rm.action.create.transfer.child-error-message=You can't create items in Transfer Folders.
|
||||
rm.action.create.record.folder.child-error-message=You can only create records in record folders and this was a {0}.
|
||||
rm.action.transfer-non-editable=You can't edit transfer folder or container metadata.
|
||||
|
||||
rm.action.node.frozen.error-message=Unable to perform action {0} because the node is frozen or has frozen children.
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=Nelze vytv\u00e1\u0159et
|
||||
rm.action.create.transfer.child-error-message=Nelze vytv\u00e1\u0159et polo\u017eky ve slo\u017ece p\u0159enosu.
|
||||
rm.action.create.record.folder.child-error-message=Z\u00e1znamy je mo\u017en\u00e9 vytv\u00e1\u0159et pouze ve slo\u017ek\u00e1ch z\u00e1znam\u016f a toto bylo {0}.
|
||||
rm.action.transfer-non-editable=Nelze upravovat metadata kontejneru nebo slo\u017eky pro p\u0159enos.
|
||||
|
||||
rm.action.node.frozen.error-message=Akci {0} nelze prov\u00e9st, proto\u017ee uzel je zablokovan\u00fd nebo m\u00e1 zablokovan\u00e9 pod\u0159\u00edzen\u00e9 prvky.
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=Du kan ikke oprette elem
|
||||
rm.action.create.transfer.child-error-message=Du kan ikke oprette elementer i Overf\u00f8r-mapper.
|
||||
rm.action.create.record.folder.child-error-message=Du kan kun oprette poster i postmapper, og dette var en {0}.
|
||||
rm.action.transfer-non-editable=Du kan ikke redigere overf\u00f8rselsmappe- eller container-metadata.
|
||||
|
||||
rm.action.node.frozen.error-message=Handlingen kan ikke udf\u00f8res {0}, fordi noden er l\u00e5st eller har l\u00e5ste underordnede noder.
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=Sie k\u00f6nnen keine El
|
||||
rm.action.create.transfer.child-error-message=Sie k\u00f6nnen keine Elemente in \u00dcbertragungsordnern erstellen.
|
||||
rm.action.create.record.folder.child-error-message=In Record-Ordnern k\u00f6nnen Sie nur Records erstellen. Das war aber ein {0}.
|
||||
rm.action.transfer-non-editable=Sie k\u00f6nnen Metadaten von \u00dcbertragungsordnern oder -containern nicht bearbeiten.
|
||||
|
||||
rm.action.node.frozen.error-message=Die Aktion ''{0}'' kann nicht ausgef\u00fchrt werden, da der Knoten oder untergeordnete Elemente von ihm festgefahren ist bzw. sind.
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=No se pueden crear eleme
|
||||
rm.action.create.transfer.child-error-message=No se pueden crear elementos en las carpetas de transferencia.
|
||||
rm.action.create.record.folder.child-error-message=Solo puede crear documentos de archivo en carpetas de documentos de archivo. {0} no se puede crear aqu\u00ed.
|
||||
rm.action.transfer-non-editable=No se puede editar una carpeta de transferencia ni los metadatos de un contenedor.
|
||||
|
||||
rm.action.node.frozen.error-message=No se puede realizar la acci\u00f3n {0} porque el nodo est\u00e1 congelado o tiene elementos secundarios congelados.
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=Siirtos\u00e4ili\u00f6\u
|
||||
rm.action.create.transfer.child-error-message=Siirtokansioihin ei voi luoda kohteita.
|
||||
rm.action.create.record.folder.child-error-message=Tietuekansioihin voi luoda ainoastaan tietueita, mutta t\u00e4m\u00e4 oli {0}.
|
||||
rm.action.transfer-non-editable=Siirtokansion tai -s\u00e4ili\u00f6n metatietoja ei voi muokata.
|
||||
|
||||
rm.action.node.frozen.error-message=Toimintoa {0} ei voitu suorittaa, koska solmu on j\u00e4\u00e4dytetty tai sill\u00e4 on j\u00e4\u00e4dytettyj\u00e4 alatasoja.
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=Impossible de cr\u00e9er
|
||||
rm.action.create.transfer.child-error-message=Impossible de cr\u00e9er des \u00e9l\u00e9ments dans les dossiers de transfert.
|
||||
rm.action.create.record.folder.child-error-message=Vous ne pouvez cr\u00e9er des documents d''archives que dans les dossiers d''archives, dans le cas pr\u00e9sent {0}.
|
||||
rm.action.transfer-non-editable=Impossible de modifier les m\u00e9tadonn\u00e9es de dossier de transfert ou de contenant.
|
||||
|
||||
rm.action.node.frozen.error-message=Impossible d''effectuer l''action {0} car le n\u0153ud ou ses enfants sont gel\u00e9s.
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=Impossibile creare eleme
|
||||
rm.action.create.transfer.child-error-message=Impossibile creare elementi nelle cartelle Trasferimento.
|
||||
rm.action.create.record.folder.child-error-message=I record possono essere creati solo nelle cartelle dei record e questa era {0}.
|
||||
rm.action.transfer-non-editable=Impossibile modificare la cartella di trasferimento o i metadati dei contenitori.
|
||||
|
||||
rm.action.node.frozen.error-message=Impossibile eseguire l''azione {0} poich\u00e9 il nodo \u00e8 congelato o presenta nodi figlio congelati.
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=\u8ee2\u9001\u30b3\u30f3
|
||||
rm.action.create.transfer.child-error-message=\u8ee2\u9001\u30d5\u30a9\u30eb\u30c0\u5185\u3067\u306f\u30a2\u30a4\u30c6\u30e0\u3092\u4f5c\u6210\u3067\u304d\u307e\u305b\u3093\u3002
|
||||
rm.action.create.record.folder.child-error-message=\u30ec\u30b3\u30fc\u30c9\u3092\u4f5c\u6210\u3067\u304d\u308b\u306e\u306f\u30ec\u30b3\u30fc\u30c9\u30d5\u30a9\u30eb\u30c0\u5185\u306e\u307f\u3067\u3001\u3053\u308c\u306f {0} \u3067\u3059\u3002
|
||||
rm.action.transfer-non-editable=\u8ee2\u9001\u30d5\u30a9\u30eb\u30c0\u307e\u305f\u306f\u30b3\u30f3\u30c6\u30ca\u30e1\u30bf\u30c7\u30fc\u30bf\u306f\u7de8\u96c6\u3067\u304d\u307e\u305b\u3093\u3002
|
||||
|
||||
rm.action.node.frozen.error-message=\u30ce\u30fc\u30c9\u304c\u30d5\u30ea\u30fc\u30ba\u3057\u3066\u3044\u308b\u304b\u3001\u307e\u305f\u306f\u5b50\u304c\u51cd\u7d50\u3057\u3066\u3044\u308b\u305f\u3081\u3001\u30a2\u30af\u30b7\u30e7\u30f3 {0}\u3092\u5b9f\u884c\u3067\u304d\u307e\u305b\u3093
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=Elementer kan ikke oppre
|
||||
rm.action.create.transfer.child-error-message=Elementer kan ikke opprettes i overf\u00f8ringsmapper.
|
||||
rm.action.create.record.folder.child-error-message=Oppf\u00f8ringer kan bare opprettes i oppf\u00f8ringsmapper, og dette er en {0}.
|
||||
rm.action.transfer-non-editable=Overf\u00f8ringsmapper eller beholdermetadata kan ikke redigeres.
|
||||
|
||||
rm.action.node.frozen.error-message=Kan ikke utf\u00f8re handlingen {0} fordi noden er frossen eller har frosne underordnede elementer.
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=U kunt geen onderdelen m
|
||||
rm.action.create.transfer.child-error-message=U kunt geen onderdelen maken in overdrachtsmappen.
|
||||
rm.action.create.record.folder.child-error-message=U kunt alleen archiefstukken maken in archiefmappen en dit was een {0}.
|
||||
rm.action.transfer-non-editable=U kunt metagegevens in een overdrachtsmap of -container niet bewerken.
|
||||
|
||||
rm.action.node.frozen.error-message=Kan de actie {0} niet uitvoeren omdat de node geblokkeerd is of geblokkeerde onderliggende elementen heeft.
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=Nie mo\u017cna utworzy\u
|
||||
rm.action.create.transfer.child-error-message=Nie mo\u017cna utworzy\u0107 pozycji w folderach przesy\u0142ania.
|
||||
rm.action.create.record.folder.child-error-message=W folderach rekord\u00f3w mo\u017cna tworzy\u0107 tylko rekordy, a to by\u0142o {0}.
|
||||
rm.action.transfer-non-editable=Nie mo\u017cna edytowa\u0107 folderu przesy\u0142ania ani metadanych kontenera.
|
||||
|
||||
rm.action.node.frozen.error-message=Nie mo\u017cna wykona\u0107 czynno\u015bci {0}, poniewa\u017c w\u0119ze\u0142 jest zablokowany lub ma zablokowane elementy podrz\u0119dne.
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=N\u00e3o \u00e9 poss\u00
|
||||
rm.action.create.transfer.child-error-message=N\u00e3o \u00e9 poss\u00edvel criar itens nas pastas de Transfer\u00eancia.
|
||||
rm.action.create.record.folder.child-error-message=\u00c9 poss\u00edvel apenas criar documentos arquiv\u00edsticos em pastas de documentos arquiv\u00edsticos; esta foi {0}.
|
||||
rm.action.transfer-non-editable=N\u00e3o \u00e9 poss\u00edvel editar os metadados das pastas de transfer\u00eancia ou cont\u00eainer.
|
||||
|
||||
rm.action.node.frozen.error-message=N\u00e3o \u00e9 poss\u00edvel executar a a\u00e7\u00e3o {0} porque o n\u00f3 est\u00e1 congelado ou tem filhos congelados.
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=\u041d\u0435\u0432\u043e
|
||||
rm.action.create.transfer.child-error-message=\u041d\u0435\u0432\u043e\u0437\u043c\u043e\u0436\u043d\u043e \u0441\u043e\u0437\u0434\u0430\u0442\u044c \u044d\u043b\u0435\u043c\u0435\u043d\u0442\u044b \u0432 \u043f\u0430\u043f\u043a\u0430\u0445 \u043f\u0435\u0440\u0435\u0434\u0430\u0447\u0438.
|
||||
rm.action.create.record.folder.child-error-message=\u0417\u0430\u043f\u0438\u0441\u0438 \u043c\u043e\u0436\u043d\u043e \u0441\u043e\u0437\u0434\u0430\u0442\u044c \u0442\u043e\u043b\u044c\u043a\u043e \u0432 \u043f\u0430\u043f\u043a\u0430\u0445 \u0437\u0430\u043f\u0438\u0441\u0435\u0439, \u0430 \u044d\u0442\u043e {0}.
|
||||
rm.action.transfer-non-editable=\u041d\u0435\u0432\u043e\u0437\u043c\u043e\u0436\u043d\u043e \u0440\u0435\u0434\u0430\u043a\u0442\u0438\u0440\u043e\u0432\u0430\u0442\u044c \u043c\u0435\u0442\u0430\u0434\u0430\u043d\u043d\u044b\u0435 \u043a\u043e\u043d\u0442\u0435\u0439\u043d\u0435\u0440\u0430 \u0438\u043b\u0438 \u043f\u0430\u043f\u043a\u0438 \u043f\u0435\u0440\u0435\u0434\u0430\u0447\u0438.
|
||||
|
||||
rm.action.node.frozen.error-message=\u041d\u0435 \u0443\u0434\u0430\u0435\u0442\u0441\u044f \u0432\u044b\u043f\u043e\u043b\u043d\u0438\u0442\u044c \u0434\u0435\u0439\u0441\u0442\u0432\u0438\u0435 {0}, \u043f\u043e\u0442\u043e\u043c\u0443 \u0447\u0442\u043e \u0443\u0437\u0435\u043b \u0437\u0430\u043a\u0440\u0435\u043f\u043b\u0435\u043d \u0438\u043b\u0438 \u0438\u043c\u0435\u0435\u0442 \u0437\u0430\u043a\u0440\u0435\u043f\u043b\u0435\u043d\u043d\u044b\u0435 \u0434\u043e\u0447\u0435\u0440\u043d\u0438\u0435 \u044d\u043b\u0435\u043c\u0435\u043d\u0442\u044b.
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=Du kan inte skapa objekt
|
||||
rm.action.create.transfer.child-error-message=Du kan inte skapa objekt i \u00d6verf\u00f6ringsmappar.
|
||||
rm.action.create.record.folder.child-error-message=Du kan endast skapa handlingar i handlingsmappar och detta var en {0}.
|
||||
rm.action.transfer-non-editable=Du kan inte redigera \u00f6verf\u00f6ringsmappen eller beh\u00e5llarens metadata.
|
||||
|
||||
rm.action.node.frozen.error-message=Det gick inte att utf\u00f6ra \u00e5tg\u00e4rd {0} eftersom noden \u00e4r frusen eller har frysta underordnade.
|
||||
|
@@ -45,4 +45,4 @@ rm.action.create.transfer.container.child-error-message=\u60a8\u65e0\u6cd5\u5728
|
||||
rm.action.create.transfer.child-error-message=\u60a8\u65e0\u6cd5\u5728 Transfer \u6587\u4ef6\u5939\u4e2d\u521b\u5efa\u9879\u76ee\u3002
|
||||
rm.action.create.record.folder.child-error-message=\u60a8\u53ea\u80fd\u5728\u8bb0\u5f55\u6587\u4ef6\u5939\u4e2d\u521b\u5efa\u8bb0\u5f55\uff0c\u4e14\u8fd9\u662f\u4e00\u4e2a {0}\u3002
|
||||
rm.action.transfer-non-editable=\u60a8\u65e0\u6cd5\u7f16\u8f91\u4f20\u8f93\u6587\u4ef6\u5939\u6216\u5bb9\u5668\u5143\u6570\u636e\u3002
|
||||
|
||||
rm.action.node.frozen.error-message=\u65e0\u6cd5\u6267\u884c\u64cd\u4f5c{0}\uff0c\u56e0\u4e3a\u8282\u70b9\u5df2\u51bb\u7ed3\u6216\u51bb\u7ed3\u7684\u5b50\u8282\u70b9\u3002
|
||||
|
@@ -80,9 +80,7 @@
|
||||
<property name="searchService" ref="searchService" />
|
||||
<property name="personService" ref="personService" />
|
||||
<property name="recordsManagementActionService" ref="recordsManagementActionService" />
|
||||
<property name="recordFolderService" ref="RecordFolderService" />
|
||||
<property name="recordService" ref="RecordService" />
|
||||
<property name="freezeService" ref="FreezeService" />
|
||||
<property name="freezeService" ref="freezeService"/>
|
||||
<property name="batchSize" value="${rm.dispositionlifecycletrigger.batchsize}"/>
|
||||
</bean>
|
||||
|
||||
|
@@ -102,9 +102,11 @@
|
||||
<bean id="rma.unfiledRecordFolder" class="org.alfresco.module.org_alfresco_module_rm.model.rma.type.UnfiledRecordFolderType" parent="rm.baseBehaviour" />
|
||||
|
||||
<bean id="rma.recordCategory" class="org.alfresco.module.org_alfresco_module_rm.model.rma.type.RecordCategoryType" parent="rm.baseBehaviour">
|
||||
<property name="recordService" ref="RecordService" />
|
||||
<property name="vitalRecordService" ref="VitalRecordService" />
|
||||
<property name="filePlanPermissionService" ref="FilePlanPermissionService" />
|
||||
<property name="recordFolderService" ref="RecordFolderService" />
|
||||
<property name="dispositionService" ref="DispositionService" />
|
||||
</bean>
|
||||
|
||||
<bean id="rma.recordFolder" class="org.alfresco.module.org_alfresco_module_rm.model.rma.type.RecordFolderType" parent="rm.baseBehaviour">
|
||||
|
@@ -695,6 +695,13 @@
|
||||
init-method="init" depends-on="org_alfresco_module_rm_resourceBundles">
|
||||
<property name="policyComponent" ref="policyComponent"/>
|
||||
<property name="nodeService" ref="nodeService"/>
|
||||
<!-- list of disposition actions to automatically execute when eligible -->
|
||||
<property name="retentionActions">
|
||||
<list>
|
||||
<value>retain</value>
|
||||
</list>
|
||||
</property>
|
||||
<property name="freezeService" ref="freezeService"/>
|
||||
</bean>
|
||||
|
||||
<bean id="RecordsManagementActionService" class="org.springframework.aop.framework.ProxyFactoryBean">
|
||||
@@ -899,6 +906,8 @@
|
||||
<bean id="freezeService" class="org.alfresco.module.org_alfresco_module_rm.freeze.FreezeServiceImpl" parent="baseService">
|
||||
<property name="filePlanService" ref="FilePlanService" />
|
||||
<property name="holdService" ref="HoldService" />
|
||||
<property name="recordFolderService" ref="RecordFolderService"/>
|
||||
<property name="recordService" ref="RecordService"/>
|
||||
</bean>
|
||||
|
||||
<bean id="FreezeService" class="org.springframework.aop.framework.ProxyFactoryBean">
|
||||
@@ -944,6 +953,7 @@
|
||||
org.alfresco.module.org_alfresco_module_rm.freeze.FreezeService.hasFrozenChildren=RM_ALLOW
|
||||
org.alfresco.module.org_alfresco_module_rm.freeze.FreezeService.getFreezeDate=RM_ALLOW
|
||||
org.alfresco.module.org_alfresco_module_rm.freeze.FreezeService.getFreezeInitiator=RM_ALLOW
|
||||
org.alfresco.module.org_alfresco_module_rm.freeze.FreezeService.isFrozenOrHasFrozenChildren=RM_ALLOW
|
||||
org.alfresco.module.org_alfresco_module_rm.freeze.FreezeService.*=RM_DENY
|
||||
]]>
|
||||
</value>
|
||||
|
@@ -5,7 +5,7 @@ version: "3"
|
||||
services:
|
||||
alfresco:
|
||||
# acs repo community image with ags repo community amp applied
|
||||
image: alfresco/alfresco-governance-repository-community:latest
|
||||
image: alfresco/alfresco-governance-repository-community-base:latest
|
||||
environment:
|
||||
CATALINA_OPTS : "
|
||||
-agentlib:jdwp=transport=dt_socket,address=8000,server=y,suspend=n
|
||||
|
@@ -8,13 +8,13 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
<app.amp.client.war.folder>${project.build.directory}/${project.build.finalName}-war</app.amp.client.war.folder>
|
||||
|
||||
<image.name>alfresco/alfresco-governance-repository-community</image.name>
|
||||
<image.name>alfresco/alfresco-governance-repository-community-base</image.name>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
@@ -141,6 +141,11 @@
|
||||
<artifactId>spring-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
@@ -35,17 +35,17 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.module.org_alfresco_module_rm.RecordsManagementPolicies.BeforeRMActionExecution;
|
||||
import org.alfresco.module.org_alfresco_module_rm.RecordsManagementPolicies.OnRMActionExecution;
|
||||
import org.alfresco.module.org_alfresco_module_rm.freeze.FreezeService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.util.PoliciesUtil;
|
||||
import org.alfresco.repo.policy.ClassPolicyDelegate;
|
||||
import org.alfresco.repo.policy.PolicyComponent;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
/**
|
||||
@@ -53,14 +53,13 @@ import org.springframework.extensions.surf.util.I18NUtil;
|
||||
*
|
||||
* @author Roy Wetherall
|
||||
*/
|
||||
@Slf4j
|
||||
public class RecordsManagementActionServiceImpl implements RecordsManagementActionService
|
||||
{
|
||||
/** I18N */
|
||||
private static final String MSG_NOT_DEFINED = "rm.action.not-defined";
|
||||
private static final String MSG_NO_IMPLICIT_NODEREF = "rm.action.no-implicit-noderef";
|
||||
|
||||
/** Logger */
|
||||
private static Log logger = LogFactory.getLog(RecordsManagementActionServiceImpl.class);
|
||||
private static final String MSG_NODE_FROZEN = "rm.action.node.frozen.error-message";
|
||||
|
||||
/** Registered records management actions */
|
||||
private Map<String, RecordsManagementAction> rmActions = new HashMap<>(13);
|
||||
@@ -78,6 +77,16 @@ public class RecordsManagementActionServiceImpl implements RecordsManagementActi
|
||||
private ClassPolicyDelegate<BeforeRMActionExecution> beforeRMActionExecutionDelegate;
|
||||
private ClassPolicyDelegate<OnRMActionExecution> onRMActionExecutionDelegate;
|
||||
|
||||
/**
|
||||
* Freeze Service
|
||||
*/
|
||||
private FreezeService freezeService;
|
||||
|
||||
/**
|
||||
* list of retention actions to automatically execute
|
||||
*/
|
||||
private List<String> retentionActions;
|
||||
|
||||
/**
|
||||
* @return Policy component
|
||||
*/
|
||||
@@ -94,6 +103,19 @@ public class RecordsManagementActionServiceImpl implements RecordsManagementActi
|
||||
return this.nodeService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param freezeService freeze service
|
||||
*/
|
||||
public void setFreezeService(FreezeService freezeService)
|
||||
{
|
||||
this.freezeService = freezeService;
|
||||
}
|
||||
|
||||
public void setRetentionActions(List<String> retentionActions)
|
||||
{
|
||||
this.retentionActions = retentionActions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the policy component
|
||||
*
|
||||
@@ -267,21 +289,23 @@ public class RecordsManagementActionServiceImpl implements RecordsManagementActi
|
||||
*/
|
||||
public RecordsManagementActionResult executeRecordsManagementAction(NodeRef nodeRef, String name, Map<String, Serializable> parameters)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Executing record management action on " + nodeRef);
|
||||
logger.debug(" actionName = " + name);
|
||||
logger.debug(" parameters = " + parameters);
|
||||
}
|
||||
log.debug("Executing record management action on " + nodeRef);
|
||||
log.debug(" actionName = " + name);
|
||||
log.debug(" parameters = " + parameters);
|
||||
|
||||
RecordsManagementAction rmAction = this.rmActions.get(name);
|
||||
if (rmAction == null)
|
||||
{
|
||||
String msg = I18NUtil.getMessage(MSG_NOT_DEFINED, name);
|
||||
if (logger.isWarnEnabled())
|
||||
{
|
||||
logger.warn(msg);
|
||||
}
|
||||
log.warn(msg);
|
||||
throw new AlfrescoRuntimeException(msg);
|
||||
}
|
||||
|
||||
if (retentionActions.contains(name.toLowerCase()) && freezeService.isFrozenOrHasFrozenChildren(nodeRef))
|
||||
{
|
||||
String msg = I18NUtil.getMessage(MSG_NODE_FROZEN, name);
|
||||
log.debug(msg);
|
||||
|
||||
throw new AlfrescoRuntimeException(msg);
|
||||
}
|
||||
|
||||
@@ -307,10 +331,7 @@ public class RecordsManagementActionServiceImpl implements RecordsManagementActi
|
||||
if (implicitTargetNode == null)
|
||||
{
|
||||
String msg = I18NUtil.getMessage(MSG_NO_IMPLICIT_NODEREF, name);
|
||||
if (logger.isWarnEnabled())
|
||||
{
|
||||
logger.warn(msg);
|
||||
}
|
||||
log.warn(msg);
|
||||
throw new AlfrescoRuntimeException(msg);
|
||||
}
|
||||
else
|
||||
|
@@ -45,7 +45,6 @@ import org.alfresco.module.org_alfresco_module_rm.disposition.property.Dispositi
|
||||
import org.alfresco.module.org_alfresco_module_rm.event.RecordsManagementEvent;
|
||||
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanComponentKind;
|
||||
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.freeze.FreezeService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.record.RecordService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.recordfolder.RecordFolderService;
|
||||
@@ -59,7 +58,6 @@ import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
|
||||
import org.alfresco.repo.transaction.AlfrescoTransactionSupport.TxnReadState;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
|
@@ -150,4 +150,12 @@ public interface FreezeService
|
||||
*/
|
||||
@Deprecated
|
||||
Set<NodeRef> getHolds(NodeRef filePlan);
|
||||
|
||||
/**
|
||||
* Check given node or its children are frozen
|
||||
* The node should be record or record folder for retention schedule
|
||||
*
|
||||
* @param nodeRef
|
||||
*/
|
||||
boolean isFrozenOrHasFrozenChildren(NodeRef nodeRef);
|
||||
}
|
||||
|
@@ -43,6 +43,8 @@ import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.hold.HoldService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.record.RecordService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.recordfolder.RecordFolderService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.util.ServiceBaseImpl;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
@@ -75,6 +77,32 @@ public class FreezeServiceImpl extends ServiceBaseImpl
|
||||
/** Hold service */
|
||||
private HoldService holdService;
|
||||
|
||||
/**
|
||||
* Record Folder Service
|
||||
*/
|
||||
private RecordFolderService recordFolderService;
|
||||
|
||||
/**
|
||||
* Record Service
|
||||
*/
|
||||
private RecordService recordService;
|
||||
|
||||
/**
|
||||
* @param recordFolderService record folder service
|
||||
*/
|
||||
public void setRecordFolderService(RecordFolderService recordFolderService)
|
||||
{
|
||||
this.recordFolderService = recordFolderService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param recordService record service
|
||||
*/
|
||||
public void setRecordService(RecordService recordService)
|
||||
{
|
||||
this.recordService = recordService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return File plan service
|
||||
*/
|
||||
@@ -392,4 +420,24 @@ public class FreezeServiceImpl extends ServiceBaseImpl
|
||||
// create hold
|
||||
return getHoldService().createHold(filePlan, holdName, reason, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to determine if a node is frozen or has frozen children
|
||||
*
|
||||
* @param nodeRef Node to be checked
|
||||
* @return <code>true</code> if the node is frozen or has frozen children, <code>false</code> otherwise
|
||||
*/
|
||||
@Override
|
||||
public boolean isFrozenOrHasFrozenChildren(NodeRef nodeRef)
|
||||
{
|
||||
if (recordFolderService.isRecordFolder(nodeRef))
|
||||
{
|
||||
return isFrozen(nodeRef) || hasFrozenChildren(nodeRef);
|
||||
}
|
||||
else if (recordService.isRecord(nodeRef))
|
||||
{
|
||||
return isFrozen(nodeRef);
|
||||
}
|
||||
return Boolean.FALSE;
|
||||
}
|
||||
}
|
||||
|
@@ -30,15 +30,14 @@ package org.alfresco.module.org_alfresco_module_rm.job;
|
||||
import static org.alfresco.module.org_alfresco_module_rm.action.RMDispositionActionExecuterAbstractBase.PARAM_NO_ERROR_CHECK;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.module.org_alfresco_module_rm.action.RecordsManagementActionService;
|
||||
|
||||
import org.alfresco.module.org_alfresco_module_rm.freeze.FreezeService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.record.RecordService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.recordfolder.RecordFolderService;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
@@ -48,8 +47,8 @@ import org.alfresco.service.cmr.search.ResultSet;
|
||||
import org.alfresco.service.cmr.search.SearchParameters;
|
||||
import org.alfresco.service.cmr.search.SearchService;
|
||||
import org.alfresco.service.cmr.security.PersonService;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
|
||||
/**
|
||||
* The Disposition Lifecycle Job Finds all disposition action nodes which are for disposition actions specified Where
|
||||
@@ -58,14 +57,14 @@ import org.apache.commons.logging.LogFactory;
|
||||
* @author mrogers
|
||||
* @author Roy Wetherall
|
||||
*/
|
||||
@Slf4j
|
||||
public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecuter
|
||||
{
|
||||
/** logger */
|
||||
private static Log logger = LogFactory.getLog(DispositionLifecycleJobExecuter.class);
|
||||
|
||||
/** batching properties */
|
||||
private int batchSize;
|
||||
public static final int DEFAULT_BATCH_SIZE = 500;
|
||||
private static final String MSG_NODE_FROZEN = "rm.action.node.frozen.error-message";
|
||||
|
||||
/** list of disposition actions to automatically execute */
|
||||
private List<String> dispositionActions;
|
||||
@@ -88,11 +87,13 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
|
||||
/** freeze service */
|
||||
private FreezeService freezeService;
|
||||
|
||||
/** record service */
|
||||
private RecordService recordService;
|
||||
|
||||
/** record folder service */
|
||||
private RecordFolderService recordFolderService;
|
||||
/**
|
||||
* @param freezeService freeze service
|
||||
*/
|
||||
public void setFreezeService(FreezeService freezeService)
|
||||
{
|
||||
this.freezeService = freezeService;
|
||||
}
|
||||
|
||||
/**
|
||||
* List of disposition actions to automatically execute when eligible.
|
||||
@@ -133,30 +134,6 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
|
||||
this.searchService = searchService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param freezeService freeze service
|
||||
*/
|
||||
public void setFreezeService(FreezeService freezeService)
|
||||
{
|
||||
this.freezeService = freezeService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param recordService record service
|
||||
*/
|
||||
public void setRecordService(RecordService recordService)
|
||||
{
|
||||
this.recordService = recordService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param recordFolderService record folder service
|
||||
*/
|
||||
public void setRecordFolderService(RecordFolderService recordFolderService)
|
||||
{
|
||||
this.recordFolderService = recordFolderService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the search query string.
|
||||
*
|
||||
@@ -207,11 +184,11 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
|
||||
{
|
||||
try
|
||||
{
|
||||
logger.debug("Job Starting");
|
||||
log.debug("Job Starting");
|
||||
|
||||
if (dispositionActions == null || dispositionActions.isEmpty())
|
||||
{
|
||||
logger.debug("Job Finished as disposition action is empty");
|
||||
log.debug("Job Finished as disposition action is empty");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -220,16 +197,11 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
|
||||
|
||||
if (batchSize < 1)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Invalid value for batch size: " + batchSize + " default value used instead.");
|
||||
}
|
||||
log.debug("Invalid value for batch size: " + batchSize + " default value used instead.");
|
||||
batchSize = DEFAULT_BATCH_SIZE;
|
||||
}
|
||||
if (logger.isTraceEnabled())
|
||||
{
|
||||
logger.trace("Using batch size of " + batchSize);
|
||||
}
|
||||
|
||||
log.trace("Using batch size of " + batchSize);
|
||||
|
||||
while (hasMore)
|
||||
{
|
||||
@@ -247,10 +219,7 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
|
||||
skipCount += resultNodes.size(); // increase by page size
|
||||
results.close();
|
||||
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Processing " + resultNodes.size() + " nodes");
|
||||
}
|
||||
log.debug("Processing " + resultNodes.size() + " nodes");
|
||||
|
||||
// process search results
|
||||
if (!resultNodes.isEmpty())
|
||||
@@ -258,14 +227,11 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
|
||||
executeAction(resultNodes);
|
||||
}
|
||||
}
|
||||
logger.debug("Job Finished");
|
||||
log.debug("Job Finished");
|
||||
}
|
||||
catch (AlfrescoRuntimeException exception)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug(exception);
|
||||
}
|
||||
log.debug(exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -299,12 +265,9 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
|
||||
}
|
||||
Map<String, Serializable> props = Map.of(PARAM_NO_ERROR_CHECK, false);
|
||||
|
||||
if (isFrozenOrHasFrozenChildren(parent.getParentRef()))
|
||||
if (freezeService.isFrozenOrHasFrozenChildren(parent.getParentRef()))
|
||||
{
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("unable to perform action " + dispAction +
|
||||
" because node is frozen or has frozen children");
|
||||
}
|
||||
log.debug(I18NUtil.getMessage(MSG_NODE_FROZEN, dispAction));
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -314,17 +277,13 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
|
||||
recordsManagementActionService
|
||||
.executeRecordsManagementAction(parent.getParentRef(), dispAction, props);
|
||||
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Processed action: " + dispAction + "on" + parent);
|
||||
}
|
||||
log.debug("Processed action: " + dispAction + "on" + parent);
|
||||
|
||||
}
|
||||
catch (AlfrescoRuntimeException exception)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug(exception);
|
||||
}
|
||||
log.debug(exception.getMessage());
|
||||
|
||||
}
|
||||
}
|
||||
return Boolean.TRUE;
|
||||
@@ -332,25 +291,6 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
|
||||
retryingTransactionHelper.doInTransaction(processTranCB, false, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to determine if a node is frozen or has frozen children
|
||||
*
|
||||
* @param nodeRef Node to be checked
|
||||
* @return <code>true</code> if the node is frozen or has frozen children, <code>false</code> otherwise
|
||||
*/
|
||||
private boolean isFrozenOrHasFrozenChildren(NodeRef nodeRef)
|
||||
{
|
||||
if (recordFolderService.isRecordFolder(nodeRef))
|
||||
{
|
||||
return freezeService.isFrozen(nodeRef) || freezeService.hasFrozenChildren(nodeRef);
|
||||
}
|
||||
if (recordService.isRecord(nodeRef))
|
||||
{
|
||||
return freezeService.isFrozen(nodeRef);
|
||||
}
|
||||
throw new AlfrescoRuntimeException("The nodeRef '" + nodeRef + "' is neither a record nor a record folder.");
|
||||
}
|
||||
|
||||
public PersonService getPersonService()
|
||||
{
|
||||
return personService;
|
||||
|
@@ -30,6 +30,10 @@ package org.alfresco.module.org_alfresco_module_rm.model.behaviour;
|
||||
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionAction;
|
||||
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.model.BaseBehaviourBean;
|
||||
import org.alfresco.module.org_alfresco_module_rm.record.RecordService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.recordfolder.RecordFolderService;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
@@ -52,7 +56,13 @@ public abstract class AbstractDisposableItem extends BaseBehaviourBean
|
||||
|
||||
/** disposition service */
|
||||
protected DispositionService dispositionService;
|
||||
|
||||
|
||||
/** record service */
|
||||
protected RecordService recordService;
|
||||
|
||||
/** record folder service */
|
||||
protected RecordFolderService recordFolderService;
|
||||
|
||||
/**
|
||||
* @param dispositionService disposition service
|
||||
*/
|
||||
@@ -60,6 +70,22 @@ public abstract class AbstractDisposableItem extends BaseBehaviourBean
|
||||
{
|
||||
this.dispositionService = dispositionService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param recordService record service
|
||||
*/
|
||||
public void setRecordService(RecordService recordService)
|
||||
{
|
||||
this.recordService = recordService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param recordFolderService record folder service
|
||||
*/
|
||||
public void setRecordFolderService(RecordFolderService recordFolderService)
|
||||
{
|
||||
this.recordFolderService = recordFolderService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes unwanted aspects
|
||||
@@ -86,4 +112,35 @@ public abstract class AbstractDisposableItem extends BaseBehaviourBean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans and re-initiates the containing records
|
||||
*
|
||||
* @param childAssociationRef
|
||||
*/
|
||||
protected void reinitializeRecordFolder(ChildAssociationRef childAssociationRef)
|
||||
{
|
||||
|
||||
NodeRef newNodeRef = childAssociationRef.getChildRef();
|
||||
|
||||
AuthenticationUtil.runAs(() -> {
|
||||
// clean record folder
|
||||
cleanDisposableItem(nodeService, newNodeRef);
|
||||
|
||||
// re-initialise the record folder
|
||||
recordFolderService.setupRecordFolder(newNodeRef);
|
||||
|
||||
// sort out the child records
|
||||
for (NodeRef record : recordService.getRecords(newNodeRef))
|
||||
{
|
||||
// clean record
|
||||
cleanDisposableItem(nodeService, record);
|
||||
|
||||
// Re-initiate the records in the new folder.
|
||||
recordService.file(record);
|
||||
}
|
||||
|
||||
return null;
|
||||
}, AuthenticationUtil.getSystemUserName());
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -27,13 +27,14 @@
|
||||
|
||||
package org.alfresco.module.org_alfresco_module_rm.model.rma.type;
|
||||
|
||||
import static org.alfresco.model.ContentModel.TYPE_CONTENT;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.model.BaseBehaviourBean;
|
||||
import org.alfresco.module.org_alfresco_module_rm.recordfolder.RecordFolderService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.model.behaviour.AbstractDisposableItem;
|
||||
import org.alfresco.module.org_alfresco_module_rm.security.FilePlanPermissionService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.vital.VitalRecordService;
|
||||
import org.alfresco.repo.copy.CopyBehaviourCallback;
|
||||
@@ -49,6 +50,7 @@ import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.namespace.RegexQNamePattern;
|
||||
|
||||
/**
|
||||
* rma:recordCategory behaviour bean
|
||||
@@ -60,9 +62,10 @@ import org.alfresco.service.namespace.QName;
|
||||
(
|
||||
defaultType = "rma:recordCategory"
|
||||
)
|
||||
public class RecordCategoryType extends BaseBehaviourBean
|
||||
public class RecordCategoryType extends AbstractDisposableItem
|
||||
implements NodeServicePolicies.OnCreateChildAssociationPolicy,
|
||||
NodeServicePolicies.OnCreateNodePolicy
|
||||
NodeServicePolicies.OnCreateNodePolicy,
|
||||
NodeServicePolicies.OnMoveNodePolicy
|
||||
{
|
||||
private final static List<QName> ACCEPTED_UNIQUE_CHILD_TYPES = new ArrayList<>();
|
||||
private final static List<QName> ACCEPTED_NON_UNIQUE_CHILD_TYPES = Arrays.asList(TYPE_RECORD_CATEGORY, TYPE_RECORD_FOLDER);
|
||||
@@ -73,9 +76,6 @@ public class RecordCategoryType extends BaseBehaviourBean
|
||||
/** file plan permission service */
|
||||
protected FilePlanPermissionService filePlanPermissionService;
|
||||
|
||||
/** record folder service */
|
||||
private RecordFolderService recordFolderService;
|
||||
|
||||
/**
|
||||
* @param vitalRecordService vital record service
|
||||
*/
|
||||
@@ -92,14 +92,6 @@ public class RecordCategoryType extends BaseBehaviourBean
|
||||
this.filePlanPermissionService = filePlanPermissionService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param recordFolderService record folder service
|
||||
*/
|
||||
public void setRecordFolderService(RecordFolderService recordFolderService)
|
||||
{
|
||||
this.recordFolderService = recordFolderService;
|
||||
}
|
||||
|
||||
/**
|
||||
* On every event
|
||||
*
|
||||
@@ -204,6 +196,53 @@ public class RecordCategoryType extends BaseBehaviourBean
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Record Category move behaviour
|
||||
*
|
||||
* @see org.alfresco.repo.node.NodeServicePolicies.OnMoveNodePolicy#onMoveNode(org.alfresco.service.cmr.repository.ChildAssociationRef, org.alfresco.service.cmr.repository.ChildAssociationRef)
|
||||
*/
|
||||
@Override
|
||||
@Behaviour
|
||||
(
|
||||
kind = BehaviourKind.CLASS,
|
||||
notificationFrequency = NotificationFrequency.FIRST_EVENT
|
||||
)
|
||||
public void onMoveNode(ChildAssociationRef oldChildAssocRef, ChildAssociationRef newChildAssocRef)
|
||||
{
|
||||
// clean the child folders and records only if the old parent category has a disposition schedule set
|
||||
// if it doesn't, then there are no old properties on the child nodes that have to be cleaned in order
|
||||
// for new ones to be set
|
||||
if (nodeService.getType(newChildAssocRef.getChildRef()).equals(TYPE_RECORD_CATEGORY)
|
||||
&& dispositionService.getDispositionSchedule(oldChildAssocRef.getParentRef()) != null)
|
||||
{
|
||||
reinitializeRecordFolders(newChildAssocRef);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively reinitialize each folder in a structure of categories
|
||||
* Unwanted aspects will be removed from the child records and the records will be re-filed
|
||||
* Disposition schedule aspects and properties will be inherited from the new parent category
|
||||
*
|
||||
* @param childAssociationRef
|
||||
*/
|
||||
private void reinitializeRecordFolders(ChildAssociationRef childAssociationRef)
|
||||
{
|
||||
for (ChildAssociationRef newChildRef : nodeService.getChildAssocs(childAssociationRef.getChildRef(),
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
RegexQNamePattern.MATCH_ALL))
|
||||
{
|
||||
if (nodeService.getType(newChildRef.getChildRef()).equals(TYPE_RECORD_CATEGORY))
|
||||
{
|
||||
reinitializeRecordFolders(newChildRef);
|
||||
}
|
||||
else if (!nodeService.getType(newChildRef.getChildRef()).equals(TYPE_CONTENT))
|
||||
{
|
||||
reinitializeRecordFolder(newChildRef);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy callback for record category
|
||||
*/
|
||||
|
@@ -34,8 +34,6 @@ import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.identifier.IdentifierService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.model.behaviour.AbstractDisposableItem;
|
||||
import org.alfresco.module.org_alfresco_module_rm.record.RecordService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.recordfolder.RecordFolderService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.vital.VitalRecordService;
|
||||
import org.alfresco.repo.copy.CopyBehaviourCallback;
|
||||
import org.alfresco.repo.copy.CopyDetails;
|
||||
@@ -68,11 +66,6 @@ public class RecordFolderType extends AbstractDisposableItem
|
||||
implements NodeServicePolicies.OnMoveNodePolicy,
|
||||
NodeServicePolicies.OnCreateChildAssociationPolicy
|
||||
{
|
||||
/** record service */
|
||||
private RecordService recordService;
|
||||
|
||||
/** record folder service */
|
||||
private RecordFolderService recordFolderService;
|
||||
|
||||
/** vital record service */
|
||||
protected VitalRecordService vitalRecordService;
|
||||
@@ -85,22 +78,6 @@ public class RecordFolderType extends AbstractDisposableItem
|
||||
|
||||
private static final String MSG_CANNOT_CREATE_CHILDREN_IN_CLOSED_RECORD_FOLDER = "rm.service.add-children-to-closed-record-folder";
|
||||
|
||||
/**
|
||||
* @param recordService record service
|
||||
*/
|
||||
public void setRecordService(RecordService recordService)
|
||||
{
|
||||
this.recordService = recordService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param recordFolderService record folder service
|
||||
*/
|
||||
public void setRecordFolderService(RecordFolderService recordFolderService)
|
||||
{
|
||||
this.recordFolderService = recordFolderService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param vitalRecordService vital record service
|
||||
*/
|
||||
@@ -131,31 +108,7 @@ public class RecordFolderType extends AbstractDisposableItem
|
||||
{
|
||||
if (!oldChildAssocRef.getParentRef().equals(newChildAssocRef.getParentRef()))
|
||||
{
|
||||
final NodeRef newNodeRef = newChildAssocRef.getChildRef();
|
||||
|
||||
AuthenticationUtil.runAs(new AuthenticationUtil.RunAsWork<Object>()
|
||||
{
|
||||
public Object doWork()
|
||||
{
|
||||
// clean record folder
|
||||
cleanDisposableItem(nodeService, newNodeRef);
|
||||
|
||||
// re-initialise the record folder
|
||||
recordFolderService.setupRecordFolder(newNodeRef);
|
||||
|
||||
// sort out the child records
|
||||
for (NodeRef record : recordService.getRecords(newNodeRef))
|
||||
{
|
||||
// clean record
|
||||
cleanDisposableItem(nodeService, record);
|
||||
|
||||
// Re-initiate the records in the new folder.
|
||||
recordService.file(record);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}, AuthenticationUtil.getSystemUserName());
|
||||
reinitializeRecordFolder(newChildAssocRef);
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@@ -28,6 +28,8 @@ package org.alfresco.module.org_alfresco_module_rm.test.integration.disposition;
|
||||
|
||||
import static org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils.DEFAULT_DISPOSITION_DESCRIPTION;
|
||||
import static org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils.DEFAULT_DISPOSITION_INSTRUCTIONS;
|
||||
import static org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils.DEFAULT_EVENT_NAME;
|
||||
import static org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils.SEPARATION_EVENT_NAME;
|
||||
import static org.alfresco.util.GUID.generate;
|
||||
|
||||
import java.io.Serializable;
|
||||
@@ -37,6 +39,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.module.org_alfresco_module_rm.action.impl.CutOffAction;
|
||||
import org.alfresco.module.org_alfresco_module_rm.action.impl.DestroyAction;
|
||||
import org.alfresco.module.org_alfresco_module_rm.action.impl.RetainAction;
|
||||
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionAction;
|
||||
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionSchedule;
|
||||
@@ -190,6 +193,162 @@ public class DispositionScheduleInheritanceTest extends BaseRMTestCase
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a root record category A with a retention schedule set to retain and destroy after 1 day
|
||||
* and another root record category B with a retention schedule set to cut off and destroy after 1 day containing a
|
||||
* subcategory
|
||||
* When moving the subcategory into the first root category
|
||||
* Then records under the subcategory inherit the retention schedule of the parent record category
|
||||
* The events list contain the retain event step inherited from the new parent category
|
||||
* <p>
|
||||
* Please see https://alfresco.atlassian.net/browse/APPS-1004
|
||||
*/
|
||||
public void testRetentionScheduleInheritance_APPS_1004()
|
||||
{
|
||||
doBehaviourDrivenTest(new BehaviourDrivenTest()
|
||||
{
|
||||
NodeRef category1;
|
||||
NodeRef subcategory2;
|
||||
NodeRef record;
|
||||
Date asOfDateBeforeMove;
|
||||
|
||||
@Override
|
||||
public void given()
|
||||
{
|
||||
// create root category1
|
||||
category1 = filePlanService.createRecordCategory(filePlan, generate());
|
||||
|
||||
// create record level disposition schedule for category1
|
||||
createDispositionScheduleRetainAndCutOffOneDay(category1);
|
||||
|
||||
// create root category2
|
||||
NodeRef category2 = filePlanService.createRecordCategory(filePlan, generate());
|
||||
|
||||
// create record level disposition schedule for category2
|
||||
createDispositionScheduleCutOffAndDestroyOneDay(category2);
|
||||
|
||||
// create subcategory2 under category2
|
||||
subcategory2 = filePlanService.createRecordCategory(category2, generate());
|
||||
|
||||
// create folder under subcategory2
|
||||
folder = recordFolderService.createRecordFolder(subcategory2, generate());
|
||||
|
||||
// file record in folder and complete it
|
||||
record = utils.createRecord(folder, generate(), generate());
|
||||
utils.completeRecord(record);
|
||||
|
||||
//store the date to check if it was updated
|
||||
asOfDateBeforeMove = dispositionService.getNextDispositionAction(record).getAsOfDate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void when() throws Exception
|
||||
{
|
||||
// move subcategory2 under category1
|
||||
fileFolderService.move(subcategory2, category1, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void then() throws Exception
|
||||
{
|
||||
dispositionService.getDispositionSchedule(record);
|
||||
// check the next disposition action
|
||||
DispositionAction dispositionActionAfterMove = dispositionService.getNextDispositionAction(record);
|
||||
assertNotNull(dispositionActionAfterMove);
|
||||
assertEquals(RetainAction.NAME, dispositionActionAfterMove.getName());
|
||||
assertNotNull(dispositionActionAfterMove.getAsOfDate());
|
||||
assertTrue(dispositionActionAfterMove.getAsOfDate().after(asOfDateBeforeMove));
|
||||
|
||||
// check the search aspect details
|
||||
assertTrue(nodeService.hasAspect(record, ASPECT_RM_SEARCH));
|
||||
assertEquals(RetainAction.NAME, nodeService.getProperty(record, PROP_RS_DISPOSITION_ACTION_NAME));
|
||||
assertNotNull(nodeService.getProperty(record, PROP_RS_DISPOSITION_ACTION_AS_OF));
|
||||
assertNull((List<String>) nodeService.getProperty(record, PROP_RS_DISPOSITION_EVENTS));
|
||||
assertNotNull(nodeService.getProperty(record, PROP_RS_DISPOITION_INSTRUCTIONS));
|
||||
assertNotNull(nodeService.getProperty(record, PROP_RS_DISPOITION_AUTHORITY));
|
||||
assertTrue((Boolean) nodeService.getProperty(record, PROP_RS_HAS_DISPOITION_SCHEDULE));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a root record category A with a retention schedule set to cut off on event 'case closed'
|
||||
* and another root record category B with a retention schedule set to cut off on event 'separation'
|
||||
* When moving the subcategory into the first root category
|
||||
* Then records under the subcategory inherit the retention schedule of the parent record category
|
||||
* The events list contain the case closed event step inherited from the new parent category
|
||||
* <p>
|
||||
* Please see https://alfresco.atlassian.net/browse/APPS-1005
|
||||
*/
|
||||
public void testRetentionScheduleInheritance_APPS_1005()
|
||||
{
|
||||
doBehaviourDrivenTest(new BehaviourDrivenTest()
|
||||
{
|
||||
NodeRef category1;
|
||||
NodeRef subcategory2;
|
||||
NodeRef record;
|
||||
Date asOfDateBeforeMove;
|
||||
|
||||
@Override
|
||||
public void given()
|
||||
{
|
||||
// create root category1
|
||||
category1 = filePlanService.createRecordCategory(filePlan, generate());
|
||||
|
||||
utils.createDispositionSchedule(category1, DEFAULT_DISPOSITION_INSTRUCTIONS,
|
||||
DEFAULT_DISPOSITION_DESCRIPTION, true, true, false, DEFAULT_EVENT_NAME);
|
||||
|
||||
// create root category2
|
||||
NodeRef category2 = filePlanService.createRecordCategory(filePlan, generate());
|
||||
|
||||
// create record level disposition schedule for category2
|
||||
utils.createDispositionSchedule(category2, DEFAULT_DISPOSITION_INSTRUCTIONS,
|
||||
DEFAULT_DISPOSITION_DESCRIPTION, true, true, false, SEPARATION_EVENT_NAME);
|
||||
|
||||
// create subcategory2 under category2
|
||||
subcategory2 = filePlanService.createRecordCategory(category2, generate());
|
||||
|
||||
// create folder under subcategory2
|
||||
folder = recordFolderService.createRecordFolder(subcategory2, generate());
|
||||
|
||||
// file record in folder and complete it
|
||||
record = utils.createRecord(folder, generate(), generate());
|
||||
utils.completeRecord(record);
|
||||
|
||||
//store the date to check if it was updated
|
||||
asOfDateBeforeMove = dispositionService.getNextDispositionAction(record).getAsOfDate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void when() throws Exception
|
||||
{
|
||||
// move subcategory2 under category1
|
||||
fileFolderService.move(subcategory2, category1, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void then() throws Exception
|
||||
{
|
||||
// check the next disposition action
|
||||
DispositionAction dispositionActionAfterMove = dispositionService.getNextDispositionAction(record);
|
||||
assertNotNull(dispositionActionAfterMove);
|
||||
assertEquals(CutOffAction.NAME, dispositionActionAfterMove.getName());
|
||||
|
||||
// check the search aspect details
|
||||
assertTrue(nodeService.hasAspect(record, ASPECT_RM_SEARCH));
|
||||
assertEquals(CutOffAction.NAME, nodeService.getProperty(record, PROP_RS_DISPOSITION_ACTION_NAME));
|
||||
assertNotNull((List<String>) nodeService.getProperty(record, PROP_RS_DISPOSITION_EVENTS));
|
||||
assertEquals(((List<String>) ((List<String>) nodeService.getProperty(record,
|
||||
PROP_RS_DISPOSITION_EVENTS))).size(), 1);
|
||||
assertEquals(DEFAULT_EVENT_NAME, ((List<String>) ((List<String>) nodeService.getProperty(record,
|
||||
PROP_RS_DISPOSITION_EVENTS))).get(0));
|
||||
assertNotNull(nodeService.getProperty(record, PROP_RS_DISPOITION_INSTRUCTIONS));
|
||||
assertNotNull(nodeService.getProperty(record, PROP_RS_DISPOITION_AUTHORITY));
|
||||
assertTrue((Boolean) nodeService.getProperty(record, PROP_RS_HAS_DISPOITION_SCHEDULE));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void createDispositionScheduleCutOff(NodeRef category, String action, String period)
|
||||
{
|
||||
DispositionSchedule ds = utils.createDispositionSchedule(category, DEFAULT_DISPOSITION_INSTRUCTIONS, DEFAULT_DISPOSITION_DESCRIPTION, true, false, false);
|
||||
@@ -205,6 +364,22 @@ public class DispositionScheduleInheritanceTest extends BaseRMTestCase
|
||||
createDispositionScheduleStep(ds, RetainAction.NAME, CommonRMTestUtils.PERIOD_IMMEDIATELY);
|
||||
}
|
||||
|
||||
private void createDispositionScheduleRetainAndCutOffOneDay(NodeRef category)
|
||||
{
|
||||
DispositionSchedule ds = utils.createDispositionSchedule(category, DEFAULT_DISPOSITION_INSTRUCTIONS, DEFAULT_DISPOSITION_DESCRIPTION, true, false, false);
|
||||
|
||||
createDispositionScheduleStep(ds, RetainAction.NAME, CommonRMTestUtils.PERIOD_ONE_DAY);
|
||||
createDispositionScheduleStep(ds, DestroyAction.NAME, CommonRMTestUtils.PERIOD_ONE_DAY);
|
||||
}
|
||||
|
||||
private void createDispositionScheduleCutOffAndDestroyOneDay(NodeRef category)
|
||||
{
|
||||
DispositionSchedule ds = utils.createDispositionSchedule(category, DEFAULT_DISPOSITION_INSTRUCTIONS, DEFAULT_DISPOSITION_DESCRIPTION, true, false, false);
|
||||
|
||||
createDispositionScheduleStep(ds, CutOffAction.NAME, CommonRMTestUtils.PERIOD_ONE_DAY);
|
||||
createDispositionScheduleStep(ds, DestroyAction.NAME, CommonRMTestUtils.PERIOD_ONE_DAY);
|
||||
}
|
||||
|
||||
private void createDispositionScheduleStep(DispositionSchedule ds, String action, String period)
|
||||
{
|
||||
Map<QName, Serializable> step = new HashMap<QName, Serializable>(3);
|
||||
|
@@ -53,7 +53,7 @@ public class FreezeServiceImplTest extends BaseRMTestCase
|
||||
|
||||
/**
|
||||
* Test freeze service methods.
|
||||
*
|
||||
*
|
||||
* @deprecated as of 2.2
|
||||
*/
|
||||
public void testFreezeService() throws Exception
|
||||
@@ -219,7 +219,7 @@ public class FreezeServiceImplTest extends BaseRMTestCase
|
||||
// hold is not automatically removed
|
||||
holdAssocs = holdService.getHolds(filePlan);
|
||||
assertEquals(1, holdAssocs.size());
|
||||
|
||||
|
||||
// delete hold
|
||||
holdService.deleteHold(holdNodeRef);
|
||||
|
||||
@@ -265,8 +265,38 @@ public class FreezeServiceImplTest extends BaseRMTestCase
|
||||
assertFalse(freezeService.isFrozen(recordFour));
|
||||
// assertFalse(freezeService.hasFrozenChildren(rmFolder));
|
||||
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
doTestInTransaction(new Test<Void>()
|
||||
{
|
||||
@Override
|
||||
public Void run() throws Exception
|
||||
{
|
||||
NodeRef hold101 = holdService.createHold(filePlan, "freezename 103", "FreezeReason", null);
|
||||
// Freeze a record folder
|
||||
assertNotNull(hold101);
|
||||
holdService.addToHold(hold101, rmFolder);
|
||||
assertTrue(recordFolderService.isRecordFolder(rmFolder));
|
||||
assertTrue(freezeService.isFrozenOrHasFrozenChildren(rmFolder));
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
doTestInTransaction(new Test<Void>()
|
||||
{
|
||||
@Override
|
||||
public Void run() throws Exception
|
||||
{
|
||||
NodeRef hold101 = holdService.createHold(filePlan, "freezename 104", "FreezeReason", null);
|
||||
// Freeze a record inside a record folder
|
||||
assertNotNull(hold101);
|
||||
holdService.addToHold(hold101, recordThree);
|
||||
assertTrue(recordService.isRecord(recordThree));
|
||||
assertTrue(freezeService.isFrozenOrHasFrozenChildren(rmFolder));
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -82,8 +82,10 @@ public class CommonRMTestUtils implements RecordsManagementModel
|
||||
public static final String DEFAULT_DISPOSITION_INSTRUCTIONS = "disposition instructions";
|
||||
public static final String DEFAULT_DISPOSITION_DESCRIPTION = "disposition action description";
|
||||
public static final String DEFAULT_EVENT_NAME = "case_closed";
|
||||
public static final String SEPARATION_EVENT_NAME = "separation";
|
||||
public static final String PERIOD_NONE = "none|0";
|
||||
public static final String PERIOD_IMMEDIATELY = "immediately|0";
|
||||
public static final String PERIOD_ONE_DAY = "day|1";
|
||||
public static final String PERIOD_FIVE_DAYS = "day|5";
|
||||
public static final String PERIOD_TEN_DAYS = "day|10";
|
||||
public static final String PERIOD_ONE_WEEK = "week|1";
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<build>
|
||||
|
5
amps/module-info.java
Normal file
5
amps/module-info.java
Normal file
@@ -0,0 +1,5 @@
|
||||
module simple.lombok {
|
||||
requires static lombok;
|
||||
requires java.logging;
|
||||
|
||||
}
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -8,7 +8,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-amps</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -26,7 +26,6 @@
|
||||
package org.alfresco.repo.content;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
|
||||
@@ -50,7 +49,6 @@ public class ContentContext implements Serializable
|
||||
|
||||
private ContentReader existingContentReader;
|
||||
private String contentUrl;
|
||||
private Set<String> storageClasses;
|
||||
|
||||
/**
|
||||
* Construct the instance with the content URL.
|
||||
@@ -63,20 +61,7 @@ public class ContentContext implements Serializable
|
||||
this.existingContentReader = existingContentReader;
|
||||
this.contentUrl = contentUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct the instance with the content URL.
|
||||
*
|
||||
* @param existingContentReader content with which to seed the new writer - may be <tt>null</tt>
|
||||
* @param contentUrl the content URL - may be <tt>null</tt>
|
||||
* @param storageClasses the storage classes specific to the provided content URL - may be <tt>null</tt>
|
||||
*/
|
||||
public ContentContext(ContentReader existingContentReader, String contentUrl, Set<String> storageClasses)
|
||||
{
|
||||
this(existingContentReader, contentUrl);
|
||||
this.storageClasses = storageClasses;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String toString()
|
||||
{
|
||||
@@ -103,22 +88,5 @@ public class ContentContext implements Serializable
|
||||
{
|
||||
return contentUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Returns the storage classes for the content- may be <tt>null</tt>
|
||||
*/
|
||||
public Set<String> getStorageClasses()
|
||||
{
|
||||
return storageClasses;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the storage classes for the content- may be <tt>null</tt>
|
||||
*
|
||||
* @param storageClasses
|
||||
*/
|
||||
public void setStorageClasses(Set<String> storageClasses)
|
||||
{
|
||||
this.storageClasses = storageClasses;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Data model classes
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -25,11 +25,6 @@
|
||||
*/
|
||||
package org.alfresco.repo.content;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.api.AlfrescoPublicApi;
|
||||
import org.alfresco.service.cmr.repository.ContentAccessor;
|
||||
import org.alfresco.service.cmr.repository.ContentIOException;
|
||||
@@ -38,6 +33,7 @@ import org.alfresco.service.cmr.repository.ContentStreamListener;
|
||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||
import org.alfresco.service.cmr.repository.DirectAccessUrl;
|
||||
|
||||
|
||||
/**
|
||||
* Provides low-level retrieval of content
|
||||
* {@link org.alfresco.service.cmr.repository.ContentReader readers} and
|
||||
@@ -91,23 +87,7 @@ public interface ContentStore
|
||||
* The delimiter that must be found in all URLS, i.e <b>://</b>
|
||||
*/
|
||||
public static final String PROTOCOL_DELIMITER = "://";
|
||||
|
||||
public static final String STORAGE_CLASS_DEFAULT = "default";
|
||||
public static final String STORAGE_CLASS_ARCHIVE = "archive";
|
||||
public static final String STORAGE_CLASS_WORM = "worm";
|
||||
|
||||
/**
|
||||
* The 'default' storage class
|
||||
*
|
||||
* A content is considered to have a default storage class if:
|
||||
* the value is a Set.of("default")
|
||||
* the value is an empty set
|
||||
* the value is null
|
||||
*/
|
||||
public static final StorageClassSet SCS_DEFAULT = new StorageClassSet(STORAGE_CLASS_DEFAULT);
|
||||
public static final StorageClassSet SCS_ARCHIVE = new StorageClassSet(STORAGE_CLASS_ARCHIVE);
|
||||
public static final StorageClassSet SCS_WORM = new StorageClassSet(STORAGE_CLASS_WORM);
|
||||
|
||||
|
||||
/**
|
||||
* Check if the content URL format is supported by the store.
|
||||
*
|
||||
@@ -258,89 +238,56 @@ public interface ContentStore
|
||||
*/
|
||||
public boolean delete(String contentUrl);
|
||||
|
||||
/**
|
||||
* Gets a presigned URL to directly access a binary content. It is up to the actual store
|
||||
* implementation if it can fulfil this request with an expiry time or not.
|
||||
*
|
||||
* @param contentUrl A content store URL
|
||||
* @param expiresAt An optional expiry date, so the direct access url would become invalid when the expiry date is reached
|
||||
* @return A direct access URL object for a binary content
|
||||
* @throws UnsupportedOperationException if the store is unable to provide the information
|
||||
*/
|
||||
default DirectAccessUrl getDirectAccessUrl(String contentUrl, Date expiresAt)
|
||||
{
|
||||
throw new UnsupportedOperationException(
|
||||
"Retrieving direct access URLs is not supported by this content store.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the store supports the retrieving of direct access URLs.
|
||||
*
|
||||
* @return true if direct access URLs retrieving is supported, false otherwise
|
||||
* @return {@code true} if direct access URLs retrieving is supported, {@code false} otherwise
|
||||
*/
|
||||
default boolean isDirectAccessSupported()
|
||||
default boolean isContentDirectUrlEnabled()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether or not the current {@link ContentStore} supports the provided {@link Set} storage classes
|
||||
* Checks if the store supports the retrieving of a direct access URL for the given node.
|
||||
*
|
||||
* @param storageClassSet The storage classes that will be checked whether or not are supported
|
||||
* @return true if the storage classes are supported, false otherwise.
|
||||
* @param contentUrl the {@code URL} of the content for which to request a direct access {@code URL}
|
||||
* @return {@code true} if direct access URLs retrieving is supported for the node, {@code false} otherwise
|
||||
*/
|
||||
default boolean isStorageClassesSupported(StorageClassSet storageClassSet)
|
||||
default boolean isContentDirectUrlEnabled(String contentUrl)
|
||||
{
|
||||
return storageClassSet == null ||
|
||||
storageClassSet.isEmpty() ||
|
||||
(1 == storageClassSet.size() && storageClassSet.equals(SCS_DEFAULT));
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Returns the complete {@link Set} of supported storage classes by this {@link ContentStore}
|
||||
*/
|
||||
default Set<String> getSupportedStorageClasses()
|
||||
{
|
||||
return Set.of(ContentStore.STORAGE_CLASS_DEFAULT);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Updates the storage class for content
|
||||
* Gets a presigned URL to directly access the content. It is up to the actual store
|
||||
* implementation if it can fulfil this request with an expiry time or not.
|
||||
*
|
||||
* @param contentUrl The URL of the content that will have its storage classes updated
|
||||
* @param storageClassSet The new storage class
|
||||
* @param parameters extra parameters
|
||||
* @param contentUrl A content store {@code URL}
|
||||
* @param attachment {@code true} if an attachment URL is requested, {@code false} for an embedded {@code URL}.
|
||||
* @param fileName File name of the content
|
||||
* @return A direct access {@code URL} object for the content
|
||||
* @throws UnsupportedOperationException if the store is unable to provide the information
|
||||
*/
|
||||
default void updateStorageClasses(String contentUrl, StorageClassSet storageClassSet, Map<String, Object> parameters)
|
||||
default DirectAccessUrl requestContentDirectUrl(String contentUrl, boolean attachment, String fileName)
|
||||
{
|
||||
|
||||
return requestContentDirectUrl(contentUrl, attachment, fileName, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param contentUrl the URL of the content for which the storage classes are to be requested
|
||||
* @return Returns the current storage classes for the content found at the contentUrl
|
||||
* Gets a presigned URL to directly access the content. It is up to the actual store
|
||||
* implementation if it can fulfil this request with an expiry time or not.
|
||||
*
|
||||
* @param contentUrl A content store {@code URL}
|
||||
* @param attachment {@code true} if an attachment URL is requested, {@code false} for an embedded {@code URL}.
|
||||
* @param fileName File name of the content
|
||||
* @param validFor The time at which the direct access {@code URL} will expire.
|
||||
* @return A direct access {@code URL} object for the content.
|
||||
* @throws UnsupportedOperationException if the store is unable to provide the information
|
||||
*/
|
||||
default StorageClassSet findStorageClasses(String contentUrl)
|
||||
default DirectAccessUrl requestContentDirectUrl(String contentUrl, boolean attachment, String fileName, Long validFor)
|
||||
{
|
||||
return SCS_DEFAULT;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Returns the complete collection of allowed storage classes transitions.
|
||||
* The key represents the source storage classes while the value (as a {@link Set}) represents all the possible target storage classes.
|
||||
*/
|
||||
default Map<StorageClassSet, Set<StorageClassSet>> getStorageClassesTransitions()
|
||||
{
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param contentUrl the URL of the content for which the storage classes transitions are to be requested
|
||||
* @return Returns the complete collection of allowed storage classes transitions for the content found at content URL
|
||||
*/
|
||||
default Map<StorageClassSet, Set<StorageClassSet>> findStorageClassesTransitions(String contentUrl)
|
||||
{
|
||||
return Collections.emptyMap();
|
||||
throw new UnsupportedOperationException(
|
||||
"Retrieving direct access URLs is not supported by this content store.");
|
||||
}
|
||||
}
|
||||
|
@@ -1,58 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Data model classes
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.content;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
|
||||
/**
|
||||
* Thrown when an operation regarding the storage classes of the content failed to execute.
|
||||
*
|
||||
* @author Lucian Tuca
|
||||
*/
|
||||
public class UnsupportedStorageClassException extends AlfrescoRuntimeException
|
||||
{
|
||||
private final ContentStore contentStore;
|
||||
private final Set<String> storageClasses;
|
||||
|
||||
public UnsupportedStorageClassException(ContentStore contentStore, Set<String> storageClasses, String msg)
|
||||
{
|
||||
super(msg);
|
||||
this.contentStore = contentStore;
|
||||
this.storageClasses = storageClasses;
|
||||
}
|
||||
|
||||
public ContentStore getContentStore()
|
||||
{
|
||||
return contentStore;
|
||||
}
|
||||
|
||||
public Set<String> getStorageClasses()
|
||||
{
|
||||
return storageClasses;
|
||||
}
|
||||
}
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Data model classes
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Data model classes
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -27,6 +27,7 @@ package org.alfresco.service.cmr.repository;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.alfresco.api.AlfrescoPublicApi;
|
||||
|
||||
@@ -36,7 +37,8 @@ public class DirectAccessUrl implements Serializable
|
||||
private static final long serialVersionUID = -881676208224414139L;
|
||||
|
||||
private String contentUrl;
|
||||
private Date expiresAt;
|
||||
private Date expiryTime;
|
||||
private boolean attachment;
|
||||
|
||||
public String getContentUrl()
|
||||
{
|
||||
@@ -48,13 +50,38 @@ public class DirectAccessUrl implements Serializable
|
||||
this.contentUrl = contentUrl;
|
||||
}
|
||||
|
||||
public Date getExpiresAt()
|
||||
public Date getExpiryTime()
|
||||
{
|
||||
return expiresAt;
|
||||
return expiryTime;
|
||||
}
|
||||
|
||||
public void setExpiresAt(Date expiresAt)
|
||||
public void setExpiryTime(Date expiryTime)
|
||||
{
|
||||
this.expiresAt = expiresAt;
|
||||
this.expiryTime = expiryTime;
|
||||
}
|
||||
|
||||
public boolean isAttachment()
|
||||
{
|
||||
return attachment;
|
||||
}
|
||||
|
||||
public void setAttachment(boolean attachment)
|
||||
{
|
||||
this.attachment = attachment;
|
||||
}
|
||||
|
||||
@Override public boolean equals(Object obj)
|
||||
{
|
||||
if (this == obj) return true;
|
||||
if (obj == null || getClass() != obj.getClass()) return false;
|
||||
|
||||
DirectAccessUrl that = (DirectAccessUrl) obj;
|
||||
return attachment == that.attachment && Objects.equals(contentUrl,
|
||||
that.contentUrl) && Objects.equals(expiryTime, that.expiryTime);
|
||||
}
|
||||
|
||||
@Override public int hashCode()
|
||||
{
|
||||
return Objects.hash(contentUrl, expiryTime, attachment);
|
||||
}
|
||||
}
|
||||
|
@@ -9,6 +9,6 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
</project>
|
||||
|
54
packaging/distribution/src/main/resources/licenses/3rd-party/BSDlike-JDOM.txt
vendored
Normal file
54
packaging/distribution/src/main/resources/licenses/3rd-party/BSDlike-JDOM.txt
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
/*--
|
||||
|
||||
Copyright (C) 2000-2012 Jason Hunter & Brett McLaughlin.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions, and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions, and the disclaimer that follows
|
||||
these conditions in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
3. The name "JDOM" must not be used to endorse or promote products
|
||||
derived from this software without prior written permission. For
|
||||
written permission, please contact <request_AT_jdom_DOT_org>.
|
||||
|
||||
4. Products derived from this software may not be called "JDOM", nor
|
||||
may "JDOM" appear in their name, without prior written permission
|
||||
from the JDOM Project Management <request_AT_jdom_DOT_org>.
|
||||
|
||||
In addition, we request (but do not require) that you include in the
|
||||
end-user documentation provided with the redistribution and/or in the
|
||||
software itself an acknowledgement equivalent to the following:
|
||||
"This product includes software developed by the
|
||||
JDOM Project (http://www.jdom.org/)."
|
||||
Alternatively, the acknowledgment may be graphical using the logos
|
||||
available at http://www.jdom.org/images/logos.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
|
||||
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE JDOM AUTHORS OR THE PROJECT
|
||||
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
|
||||
USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
|
||||
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
This software consists of voluntary contributions made by many
|
||||
individuals on behalf of the JDOM Project and was originally
|
||||
created by Jason Hunter <jhunter_AT_jdom_DOT_org> and
|
||||
Brett McLaughlin <brett_AT_jdom_DOT_org>. For more information
|
||||
on the JDOM Project, please see <http://www.jdom.org/>.
|
||||
|
||||
*/
|
||||
|
46
packaging/distribution/src/main/resources/licenses/3rd-party/BSDlike-XPP3.txt
vendored
Normal file
46
packaging/distribution/src/main/resources/licenses/3rd-party/BSDlike-XPP3.txt
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
Indiana University Extreme! Lab Software License
|
||||
|
||||
Version 1.1.1
|
||||
|
||||
Copyright (c) 2002 Extreme! Lab, Indiana University. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in
|
||||
the documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. The end-user documentation included with the redistribution, if any,
|
||||
must include the following acknowledgment:
|
||||
|
||||
"This product includes software developed by the Indiana University
|
||||
Extreme! Lab (http://www.extreme.indiana.edu/)."
|
||||
|
||||
Alternately, this acknowledgment may appear in the software itself,
|
||||
if and wherever such third-party acknowledgments normally appear.
|
||||
|
||||
4. The names "Indiana Univeristy" and "Indiana Univeristy Extreme! Lab"
|
||||
must not be used to endorse or promote products derived from this
|
||||
software without prior written permission. For written permission,
|
||||
please contact http://www.extreme.indiana.edu/.
|
||||
|
||||
5. Products derived from this software may not use "Indiana Univeristy"
|
||||
name nor may "Indiana Univeristy" appear in their name, without prior
|
||||
written permission of the Indiana University.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESSED OR IMPLIED
|
||||
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
IN NO EVENT SHALL THE AUTHORS, COPYRIGHT HOLDERS OR ITS CONTRIBUTORS
|
||||
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
||||
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
@@ -1,6 +1,6 @@
|
||||
# Fetch image based on Tomcat 9.0, Java 11 and Centos 8
|
||||
# More infos about this image: https://github.com/Alfresco/alfresco-docker-base-tomcat
|
||||
FROM alfresco/alfresco-base-tomcat:9.0.45-java-11-centos-8
|
||||
FROM alfresco/alfresco-base-tomcat:9.0.52-java-11-centos-7
|
||||
|
||||
# Set default docker_context.
|
||||
ARG resource_path=target
|
||||
@@ -65,12 +65,12 @@ RUN sed -i -e "s_log4j.appender.File.File\=alfresco.log_log4j.appender.File.File
|
||||
|
||||
# fontconfig is required by Activiti worflow diagram generator
|
||||
# installing pinned dependencies as well
|
||||
RUN yum install -y fontconfig-2.13.1-3.el8 \
|
||||
dejavu-fonts-common-2.35-7.el8 \
|
||||
fontpackages-filesystem-1.44-22.el8 \
|
||||
freetype-2.9.1-4.el8_3.1 \
|
||||
libpng-1.6.34-5.el8 \
|
||||
dejavu-sans-fonts-2.35-7.el8 && \
|
||||
RUN yum install -y fontconfig-2.13.0-4.3.el7 \
|
||||
dejavu-fonts-common-2.33-6.el7 \
|
||||
fontpackages-filesystem-1.44-8.el7 \
|
||||
freetype-2.8-14.el7_9.1 \
|
||||
libpng-1.5.13-8.el7 \
|
||||
dejavu-sans-fonts-2.33-6.el7 && \
|
||||
yum clean all
|
||||
|
||||
# The standard configuration is to have all Tomcat files owned by root with group GROUPNAME and whilst owner has read/write privileges,
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -1,4 +1,4 @@
|
||||
TRANSFORMERS_TAG=2.5.2
|
||||
TRANSFORMERS_TAG=2.5.3
|
||||
SOLR6_TAG=2.0.2
|
||||
POSTGRES_TAG=13.3
|
||||
ACTIVEMQ_TAG=5.16.1
|
||||
|
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -90,7 +90,7 @@ public class CancelCheckOutTests extends CmisTest
|
||||
.cancelCheckOut();
|
||||
}
|
||||
|
||||
@Test(groups = { TestGroup.REGRESSION, TestGroup.CMIS})
|
||||
@Test(groups = { TestGroup.NOT_SUPPORTED_ON_SINGLE_PIPELINE, TestGroup.REGRESSION, TestGroup.CMIS})
|
||||
@TestRail(section = {"cmis-api"}, executionType= ExecutionType.REGRESSION,
|
||||
description = "Verify that cancel check out on document created with Versioning State CHECKED OUT deletes the document")
|
||||
public void cancelCheckOutOnDocWithVersioningStateCheckedOut() throws Exception
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -1,9 +1,6 @@
|
||||
package org.alfresco.rest.nodes;
|
||||
|
||||
import static org.alfresco.utility.report.log.Step.STEP;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import javax.json.Json;
|
||||
import javax.json.JsonObject;
|
||||
@@ -21,16 +18,12 @@ import org.alfresco.utility.constants.UserRole;
|
||||
import org.alfresco.utility.data.DataUser;
|
||||
import org.alfresco.utility.model.ContentModel;
|
||||
import org.alfresco.utility.model.FileModel;
|
||||
import org.alfresco.utility.model.FolderModel;
|
||||
import org.alfresco.utility.model.SiteModel;
|
||||
import org.alfresco.utility.model.TestGroup;
|
||||
import org.alfresco.utility.model.UserModel;
|
||||
import org.alfresco.utility.report.Bug;
|
||||
import org.alfresco.utility.testrail.ExecutionType;
|
||||
import org.alfresco.utility.testrail.annotation.TestRail;
|
||||
import org.springframework.http.HttpMethod;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import io.restassured.RestAssured;
|
||||
@@ -40,20 +33,6 @@ import io.restassured.RestAssured;
|
||||
*/
|
||||
public class NodesTests extends RestTest
|
||||
{
|
||||
private UserModel user1;
|
||||
private SiteModel site1;
|
||||
private FolderModel folder1;
|
||||
private FileModel file1;
|
||||
|
||||
@BeforeClass(alwaysRun = true)
|
||||
public void dataPreparation() throws Exception
|
||||
{
|
||||
user1 = dataUser.createRandomTestUser();
|
||||
site1 = dataSite.usingUser(user1).createPublicRandomSite();
|
||||
folder1 = dataContent.usingUser(user1).usingSite(site1).createFolder();
|
||||
file1 = dataContent.usingUser(user1).usingResource(folder1).createContent(CMISUtil.DocumentType.TEXT_PLAIN);
|
||||
}
|
||||
|
||||
@TestRail(section = { TestGroup.REST_API,TestGroup.NODES }, executionType = ExecutionType.SANITY,
|
||||
description = "Verify files can be moved from one folder to another")
|
||||
@Test(groups = { TestGroup.REST_API, TestGroup.NODES, TestGroup.SANITY})
|
||||
@@ -147,61 +126,4 @@ public class NodesTests extends RestTest
|
||||
logger.info("CMIS API call response status code is: " + cmisApiStatusCode);
|
||||
assertEquals(HttpStatus.FORBIDDEN.value(), cmisApiStatusCode);
|
||||
}
|
||||
|
||||
@TestRail(section = { TestGroup.SANITY }, executionType = ExecutionType.SANITY,
|
||||
description = "Check that the default node storage classes are retrieved - GET /nodes/{nodeId}.")
|
||||
@Test(groups = { TestGroup.SANITY })
|
||||
public void getNodeStorageClass() throws Exception
|
||||
{
|
||||
STEP("1. Get storage classes for a node with content.");
|
||||
RestNodeModel restResponse = restClient.authenticateUser(user1).withCoreAPI().usingNode(file1).usingParams("include=storageClasses").getNode();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
assertTrue(restResponse.getContent().getStorageClasses().contains("default"));
|
||||
|
||||
STEP("2. Check that storage classes for a node with content are not displayed by default.");
|
||||
restResponse = restClient.authenticateUser(user1).withCoreAPI().usingNode(file1).getNode();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
assertNull(restResponse.getContent().getStorageClasses());
|
||||
|
||||
STEP("3. Check that the request for storage classes on a node without content (e.g folder) is gracefully ignored.");
|
||||
restClient.authenticateUser(user1).withCoreAPI().usingNode(folder1).usingParams("include=storageClasses").getNode();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
assertNull(restResponse.getContent().getStorageClasses());
|
||||
}
|
||||
|
||||
@TestRail(section = { TestGroup.SANITY }, executionType = ExecutionType.SANITY,
|
||||
description = "Check that the storage classes default behavior of PUT /nodes/{nodeId}.")
|
||||
@Test(groups = { TestGroup.SANITY })
|
||||
public void updateNodeStorageClass() throws Exception
|
||||
{
|
||||
STEP("1. Update storage classes for a node with existing storage class.");
|
||||
JsonObject updateStorageClass = Json.createObjectBuilder().add("content",
|
||||
Json.createObjectBuilder().add("storageClasses", Json.createArrayBuilder().add("default")))
|
||||
.build();
|
||||
RestNodeModel restResponse = restClient.authenticateUser(user1).withCoreAPI()
|
||||
.usingNode(file1).usingParams("include=storageClasses").updateNode(updateStorageClass.toString());
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
assertTrue(restResponse.getContent().getStorageClasses().contains("default"));
|
||||
|
||||
STEP("2. Update storage classes for a node and check that storageClass is not displayed by default.");
|
||||
// Use existing update body
|
||||
restResponse = restClient.authenticateUser(user1).withCoreAPI()
|
||||
.usingNode(file1).updateNode(updateStorageClass.toString());
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
assertNull(restResponse.getContent().getStorageClasses());
|
||||
}
|
||||
|
||||
@TestRail(section = { TestGroup.SANITY }, executionType = ExecutionType.SANITY,
|
||||
description = "Verify that the BAD_REQUEST is returned when updating storage classes for a node with an invalid storage class")
|
||||
@Test(groups = { TestGroup.SANITY })
|
||||
public void updateNodeStorageClassWithInvalidStorageClassShouldReturn400() throws Exception
|
||||
{
|
||||
STEP("1. Update storage classes for a node with an invalid storage class.");
|
||||
JsonObject updateStorageClass = Json.createObjectBuilder().add("content",
|
||||
Json.createObjectBuilder().add("storageClasses", Json.createArrayBuilder().add("storageClassThatDoesntExist")))
|
||||
.build();
|
||||
RestNodeModel restResponse = restClient.authenticateUser(user1).withCoreAPI()
|
||||
.usingNode(file1).usingParams("include=storageClasses").updateNode(updateStorageClass.toString());
|
||||
restClient.assertStatusCodeIs(HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
27
pom.xml
27
pom.xml
@@ -2,7 +2,7 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>Alfresco Community Repo Parent</name>
|
||||
|
||||
@@ -51,23 +51,23 @@
|
||||
<dependency.alfresco-log-sanitizer.version>0.2</dependency.alfresco-log-sanitizer.version>
|
||||
<dependency.activiti-engine.version>5.23.0</dependency.activiti-engine.version>
|
||||
<dependency.activiti.version>5.23.0</dependency.activiti.version>
|
||||
<dependency.transform.model.version>1.3.1</dependency.transform.model.version>
|
||||
<dependency.alfresco-transform-model.version>1.4.0</dependency.alfresco-transform-model.version>
|
||||
<dependency.alfresco-greenmail.version>6.2</dependency.alfresco-greenmail.version>
|
||||
<dependency.acs-event-model.version>0.0.12</dependency.acs-event-model.version>
|
||||
<dependency.acs-event-model.version>0.0.13</dependency.acs-event-model.version>
|
||||
|
||||
<dependency.spring.version>5.3.3</dependency.spring.version>
|
||||
<dependency.spring.version>5.3.9</dependency.spring.version>
|
||||
<dependency.antlr.version>3.5.2</dependency.antlr.version>
|
||||
<dependency.jackson.version>2.12.3</dependency.jackson.version>
|
||||
<dependency.jackson-databind.version>2.12.4</dependency.jackson-databind.version>
|
||||
<dependency.cxf.version>3.4.4</dependency.cxf.version>
|
||||
<dependency.opencmis.version>1.0.0</dependency.opencmis.version>
|
||||
<dependency.webscripts.version>8.22</dependency.webscripts.version>
|
||||
<dependency.webscripts.version>8.23</dependency.webscripts.version>
|
||||
<dependency.bouncycastle.version>1.69</dependency.bouncycastle.version>
|
||||
<dependency.mockito-core.version>3.11.2</dependency.mockito-core.version>
|
||||
<dependency.mockito-all.version>1.10.19</dependency.mockito-all.version>
|
||||
<dependency.org-json.version>20210307</dependency.org-json.version>
|
||||
<dependency.commons-dbcp.version>1.4-DBCP330</dependency.commons-dbcp.version>
|
||||
<dependency.commons-io.version>2.8.0</dependency.commons-io.version>
|
||||
<dependency.commons-io.version>2.11.0</dependency.commons-io.version>
|
||||
<dependency.gson.version>2.8.5</dependency.gson.version>
|
||||
<dependency.httpclient.version>4.5.13</dependency.httpclient.version>
|
||||
<dependency.httpcore.version>4.4.14</dependency.httpcore.version>
|
||||
@@ -85,7 +85,7 @@
|
||||
<dependency.jboss.logging.version>3.4.2.Final</dependency.jboss.logging.version>
|
||||
<dependency.camel.version>3.7.4</dependency.camel.version>
|
||||
<dependency.activemq.version>5.16.1</dependency.activemq.version>
|
||||
<dependency.apache-compress.version>1.20</dependency.apache-compress.version>
|
||||
<dependency.apache-compress.version>1.21</dependency.apache-compress.version>
|
||||
<dependency.apache.taglibs.version>1.2.5</dependency.apache.taglibs.version>
|
||||
<dependency.awaitility.version>4.1.0</dependency.awaitility.version>
|
||||
<dependency.swagger-ui.version>3.38.0</dependency.swagger-ui.version>
|
||||
@@ -105,16 +105,16 @@
|
||||
|
||||
<alfresco.googledrive.version>3.2.1.3</alfresco.googledrive.version>
|
||||
<alfresco.aos-module.version>1.4.0.1</alfresco.aos-module.version>
|
||||
<alfresco.api-explorer.version>7.0.0</alfresco.api-explorer.version>
|
||||
<alfresco.api-explorer.version>7.1.0.1</alfresco.api-explorer.version> <!-- Also in alfresco-enterprise-share -->
|
||||
<alfresco.maven-plugin.version>2.2.0</alfresco.maven-plugin.version>
|
||||
|
||||
<dependency.postgresql.version>42.2.20</dependency.postgresql.version>
|
||||
<dependency.mysql.version>8.0.25</dependency.mysql.version>
|
||||
<dependency.mysql-image.version>8</dependency.mysql-image.version>
|
||||
<dependency.mariadb.version>2.7.2</dependency.mariadb.version>
|
||||
<dependency.tas-utility.version>3.0.44</dependency.tas-utility.version>
|
||||
<dependency.tas-utility.version>3.0.45</dependency.tas-utility.version>
|
||||
<dependency.rest-assured.version>3.3.0</dependency.rest-assured.version>
|
||||
<dependency.tas-restapi.version>1.61</dependency.tas-restapi.version>
|
||||
<dependency.tas-restapi.version>1.64</dependency.tas-restapi.version>
|
||||
<dependency.tas-cmis.version>1.30</dependency.tas-cmis.version>
|
||||
<dependency.tas-email.version>1.8</dependency.tas-email.version>
|
||||
<dependency.tas-webdav.version>1.6</dependency.tas-webdav.version>
|
||||
@@ -142,7 +142,7 @@
|
||||
<connection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</connection>
|
||||
<developerConnection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</developerConnection>
|
||||
<url>https://github.com/Alfresco/alfresco-community-repo</url>
|
||||
<tag>14.1</tag>
|
||||
<tag>14.3</tag>
|
||||
</scm>
|
||||
|
||||
<distributionManagement>
|
||||
@@ -638,7 +638,7 @@
|
||||
<dependency>
|
||||
<groupId>org.jsoup</groupId>
|
||||
<artifactId>jsoup</artifactId>
|
||||
<version>1.13.1</version>
|
||||
<version>1.14.2</version>
|
||||
</dependency>
|
||||
<!-- upgrade dependency from TIKA -->
|
||||
<dependency>
|
||||
@@ -649,7 +649,7 @@
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-csv</artifactId>
|
||||
<version>1.8</version>
|
||||
<version>1.9.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -851,6 +851,7 @@
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<version>1.18.20</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>14.1</version>
|
||||
<version>14.3</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@@ -29,6 +29,7 @@ import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.springframework.extensions.surf.util.Content;
|
||||
import org.springframework.extensions.webscripts.Description.FormatStyle;
|
||||
@@ -38,15 +39,15 @@ import org.springframework.extensions.webscripts.WebScriptRequest;
|
||||
import org.springframework.extensions.webscripts.WrappingWebScriptRequest;
|
||||
import org.springframework.util.FileCopyUtils;
|
||||
|
||||
public class BufferedRequest implements WrappingWebScriptRequest
|
||||
public class BufferedRequest implements WrappingWebScriptRequest, AutoCloseable
|
||||
{
|
||||
private TempOutputStreamFactory streamFactory;
|
||||
private WebScriptRequest req;
|
||||
private final Supplier<TempOutputStream> streamFactory;
|
||||
private final WebScriptRequest req;
|
||||
private TempOutputStream bufferStream;
|
||||
private InputStream contentStream;
|
||||
private BufferedReader contentReader;
|
||||
|
||||
public BufferedRequest(WebScriptRequest req, TempOutputStreamFactory streamFactory)
|
||||
public BufferedRequest(WebScriptRequest req, Supplier<TempOutputStream> streamFactory)
|
||||
{
|
||||
this.req = req;
|
||||
this.streamFactory = streamFactory;
|
||||
@@ -56,7 +57,7 @@ public class BufferedRequest implements WrappingWebScriptRequest
|
||||
{
|
||||
if (bufferStream == null)
|
||||
{
|
||||
bufferStream = streamFactory.createOutputStream();
|
||||
bufferStream = streamFactory.get();
|
||||
|
||||
try
|
||||
{
|
||||
@@ -81,7 +82,7 @@ public class BufferedRequest implements WrappingWebScriptRequest
|
||||
}
|
||||
if (contentStream == null)
|
||||
{
|
||||
contentStream = getBufferedBodyAsTempStream().getInputStream();
|
||||
contentStream = getBufferedBodyAsTempStream().toNewInputStream();
|
||||
}
|
||||
|
||||
return contentStream;
|
||||
@@ -95,7 +96,7 @@ public class BufferedRequest implements WrappingWebScriptRequest
|
||||
{
|
||||
contentStream.close();
|
||||
}
|
||||
catch (Exception e)
|
||||
catch (Exception ignore)
|
||||
{
|
||||
}
|
||||
contentStream = null;
|
||||
@@ -106,13 +107,14 @@ public class BufferedRequest implements WrappingWebScriptRequest
|
||||
{
|
||||
contentReader.close();
|
||||
}
|
||||
catch (Exception e)
|
||||
catch (Exception ignore)
|
||||
{
|
||||
}
|
||||
contentReader = null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void close()
|
||||
{
|
||||
reset();
|
||||
@@ -122,7 +124,7 @@ public class BufferedRequest implements WrappingWebScriptRequest
|
||||
{
|
||||
bufferStream.destroy();
|
||||
}
|
||||
catch (Exception e)
|
||||
catch (Exception ignore)
|
||||
{
|
||||
}
|
||||
bufferStream = null;
|
||||
|
@@ -28,6 +28,7 @@ package org.alfresco.repo.web.scripts;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.io.Writer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.apache.commons.logging.Log;
|
||||
@@ -42,25 +43,24 @@ import org.springframework.util.FileCopyUtils;
|
||||
/**
|
||||
* Transactional Buffered Response
|
||||
*/
|
||||
public class BufferedResponse implements WrappingWebScriptResponse
|
||||
public class BufferedResponse implements WrappingWebScriptResponse, AutoCloseable
|
||||
{
|
||||
// Logger
|
||||
protected static final Log logger = LogFactory.getLog(BufferedResponse.class);
|
||||
|
||||
private TempOutputStreamFactory streamFactory;
|
||||
private WebScriptResponse res;
|
||||
private int bufferSize;
|
||||
private TempOutputStream outputStream = null;
|
||||
private StringBuilderWriter outputWriter = null;
|
||||
|
||||
private final Supplier<TempOutputStream> streamFactory;
|
||||
private final WebScriptResponse res;
|
||||
private final int bufferSize;
|
||||
private TempOutputStream outputStream;
|
||||
private StringBuilderWriter outputWriter;
|
||||
|
||||
/**
|
||||
* Construct
|
||||
*
|
||||
* @param res WebScriptResponse
|
||||
*
|
||||
* @param res WebScriptResponse
|
||||
* @param bufferSize int
|
||||
*/
|
||||
public BufferedResponse(WebScriptResponse res, int bufferSize, TempOutputStreamFactory streamFactory)
|
||||
public BufferedResponse(WebScriptResponse res, int bufferSize, Supplier<TempOutputStream> streamFactory)
|
||||
{
|
||||
this.res = res;
|
||||
this.bufferSize = bufferSize;
|
||||
@@ -71,6 +71,7 @@ public class BufferedResponse implements WrappingWebScriptResponse
|
||||
* (non-Javadoc)
|
||||
* @see org.alfresco.web.scripts.WrappingWebScriptResponse#getNext()
|
||||
*/
|
||||
@Override
|
||||
public WebScriptResponse getNext()
|
||||
{
|
||||
return res;
|
||||
@@ -123,16 +124,18 @@ public class BufferedResponse implements WrappingWebScriptResponse
|
||||
* (non-Javadoc)
|
||||
* @see org.alfresco.web.scripts.WebScriptResponse#getOutputStream()
|
||||
*/
|
||||
@Override
|
||||
public OutputStream getOutputStream() throws IOException
|
||||
{
|
||||
if (outputStream == null)
|
||||
if (outputStream != null)
|
||||
{
|
||||
if (outputWriter != null)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Already buffering output writer");
|
||||
}
|
||||
outputStream = streamFactory.createOutputStream();
|
||||
return outputStream;
|
||||
}
|
||||
if (outputWriter != null)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Already buffering output writer");
|
||||
}
|
||||
outputStream = streamFactory.get();
|
||||
return outputStream;
|
||||
}
|
||||
|
||||
@@ -151,14 +154,15 @@ public class BufferedResponse implements WrappingWebScriptResponse
|
||||
*/
|
||||
public Writer getWriter() throws IOException
|
||||
{
|
||||
if (outputWriter == null)
|
||||
if (outputWriter != null)
|
||||
{
|
||||
if (outputStream != null)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Already buffering output stream");
|
||||
}
|
||||
outputWriter = new StringBuilderWriter(bufferSize);
|
||||
return outputWriter;
|
||||
}
|
||||
if (outputStream != null)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Already buffering output stream");
|
||||
}
|
||||
outputWriter = new StringBuilderWriter(bufferSize);
|
||||
return outputWriter;
|
||||
}
|
||||
|
||||
@@ -262,15 +266,7 @@ public class BufferedResponse implements WrappingWebScriptResponse
|
||||
if (logger.isDebugEnabled())
|
||||
logger.debug("Writing Transactional response: size=" + outputStream.getLength());
|
||||
|
||||
try
|
||||
{
|
||||
outputStream.flush();
|
||||
FileCopyUtils.copy(outputStream.getInputStream(), res.getOutputStream());
|
||||
}
|
||||
finally
|
||||
{
|
||||
outputStream.destroy();
|
||||
}
|
||||
FileCopyUtils.copy(outputStream.toNewInputStream(), res.getOutputStream());
|
||||
}
|
||||
}
|
||||
catch (IOException e)
|
||||
@@ -278,4 +274,20 @@ public class BufferedResponse implements WrappingWebScriptResponse
|
||||
throw new AlfrescoRuntimeException("Failed to commit buffered response", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close()
|
||||
{
|
||||
if (outputStream != null)
|
||||
{
|
||||
try
|
||||
{
|
||||
outputStream.destroy();
|
||||
}
|
||||
catch (Exception ignore)
|
||||
{
|
||||
}
|
||||
outputStream = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2019 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -25,12 +25,13 @@
|
||||
*/
|
||||
package org.alfresco.repo.web.scripts;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.SocketException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.transaction.Status;
|
||||
@@ -40,7 +41,6 @@ import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.error.ExceptionStackUtil;
|
||||
import org.alfresco.repo.model.Repository;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
|
||||
import org.alfresco.repo.transaction.AlfrescoTransactionSupport.TxnReadState;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
||||
@@ -95,8 +95,7 @@ public class RepositoryContainer extends AbstractRuntimeContainer
|
||||
private String tempDirectoryName = null;
|
||||
private int memoryThreshold = 4 * 1024 * 1024; // 4mb
|
||||
private long maxContentSize = (long) 4 * 1024 * 1024 * 1024; // 4gb
|
||||
private TempOutputStreamFactory streamFactory = null;
|
||||
private TempOutputStreamFactory responseStreamFactory = null;
|
||||
private Supplier<TempOutputStream> streamFactory = null;
|
||||
private String preserveHeadersPattern = null;
|
||||
|
||||
private Class<?>[] notPublicExceptions = new Class<?>[] {};
|
||||
@@ -107,17 +106,16 @@ public class RepositoryContainer extends AbstractRuntimeContainer
|
||||
*/
|
||||
public void setup()
|
||||
{
|
||||
File tempDirectory = TempFileProvider.getTempDir(tempDirectoryName);
|
||||
this.streamFactory = new TempOutputStreamFactory(tempDirectory, memoryThreshold, maxContentSize, encryptTempFiles, false);
|
||||
this.responseStreamFactory = new TempOutputStreamFactory(tempDirectory, memoryThreshold, maxContentSize,
|
||||
encryptTempFiles, false);
|
||||
streamFactory = TempOutputStream.factory(
|
||||
TempFileProvider.getTempDir(tempDirectoryName),
|
||||
memoryThreshold, maxContentSize, encryptTempFiles);
|
||||
}
|
||||
|
||||
public void setEncryptTempFiles(Boolean encryptTempFiles)
|
||||
{
|
||||
if(encryptTempFiles != null)
|
||||
{
|
||||
this.encryptTempFiles = encryptTempFiles.booleanValue();
|
||||
this.encryptTempFiles = encryptTempFiles;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -130,7 +128,7 @@ public class RepositoryContainer extends AbstractRuntimeContainer
|
||||
{
|
||||
if(memoryThreshold != null)
|
||||
{
|
||||
this.memoryThreshold = memoryThreshold.intValue();
|
||||
this.memoryThreshold = memoryThreshold;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -138,7 +136,7 @@ public class RepositoryContainer extends AbstractRuntimeContainer
|
||||
{
|
||||
if(maxContentSize != null)
|
||||
{
|
||||
this.maxContentSize = maxContentSize.longValue();
|
||||
this.maxContentSize = maxContentSize;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -246,8 +244,7 @@ public class RepositoryContainer extends AbstractRuntimeContainer
|
||||
*/
|
||||
public Map<String, Object> getScriptParameters()
|
||||
{
|
||||
Map<String, Object> params = new HashMap<String, Object>();
|
||||
params.putAll(super.getScriptParameters());
|
||||
Map<String, Object> params = new HashMap<>(super.getScriptParameters());
|
||||
addRepoParameters(params);
|
||||
return params;
|
||||
}
|
||||
@@ -259,16 +256,11 @@ public class RepositoryContainer extends AbstractRuntimeContainer
|
||||
public Map<String, Object> getTemplateParameters()
|
||||
{
|
||||
// Ensure we have a transaction - we might be generating the status template after the main transaction failed
|
||||
return fallbackTransactionHelper.doInTransaction(new RetryingTransactionCallback<Map<String, Object>>()
|
||||
{
|
||||
public Map<String, Object> execute() throws Throwable
|
||||
{
|
||||
Map<String, Object> params = new HashMap<String, Object>();
|
||||
params.putAll(RepositoryContainer.super.getTemplateParameters());
|
||||
params.put(TemplateService.KEY_IMAGE_RESOLVER, imageResolver.getImageResolver());
|
||||
addRepoParameters(params);
|
||||
return params;
|
||||
}
|
||||
return fallbackTransactionHelper.doInTransaction(() -> {
|
||||
Map<String, Object> params = new HashMap<>(RepositoryContainer.super.getTemplateParameters());
|
||||
params.put(TemplateService.KEY_IMAGE_RESOLVER, imageResolver.getImageResolver());
|
||||
addRepoParameters(params);
|
||||
return params;
|
||||
}, true);
|
||||
}
|
||||
|
||||
@@ -321,7 +313,7 @@ public class RepositoryContainer extends AbstractRuntimeContainer
|
||||
Throwable displayCause = ExceptionStackUtil.getCause(e, publicExceptions);
|
||||
if (displayCause == null && hideCause != null)
|
||||
{
|
||||
AlfrescoRuntimeException alf = null;
|
||||
final AlfrescoRuntimeException alf;
|
||||
if (e instanceof AlfrescoRuntimeException)
|
||||
{
|
||||
alf = (AlfrescoRuntimeException) e;
|
||||
@@ -342,117 +334,154 @@ public class RepositoryContainer extends AbstractRuntimeContainer
|
||||
}
|
||||
}
|
||||
|
||||
protected void executeScriptInternal(WebScriptRequest scriptReq, WebScriptResponse scriptRes, final Authenticator auth)
|
||||
protected void executeScriptInternal(final WebScriptRequest scriptReq, final WebScriptResponse scriptRes, final Authenticator auth)
|
||||
throws IOException
|
||||
{
|
||||
final WebScript script = scriptReq.getServiceMatch().getWebScript();
|
||||
final Description desc = script.getDescription();
|
||||
final boolean debug = logger.isDebugEnabled();
|
||||
|
||||
|
||||
// Escalate the webscript declared level of authentication to the container required authentication
|
||||
// eg. must be guest if MT is enabled unless credentials are empty
|
||||
RequiredAuthentication containerRequiredAuthentication = getRequiredAuthentication();
|
||||
final RequiredAuthentication containerRequiredAuthentication = getRequiredAuthentication();
|
||||
final RequiredAuthentication required = (desc.getRequiredAuthentication().compareTo(containerRequiredAuthentication) < 0 && !auth.emptyCredentials() ? containerRequiredAuthentication : desc.getRequiredAuthentication());
|
||||
final boolean isGuest = scriptReq.isGuest();
|
||||
|
||||
|
||||
if (required == RequiredAuthentication.none)
|
||||
{
|
||||
// TODO revisit - cleared here, in-lieu of WebClient clear
|
||||
//AuthenticationUtil.clearCurrentSecurityContext();
|
||||
|
||||
|
||||
transactionedExecuteAs(script, scriptReq, scriptRes);
|
||||
return;
|
||||
}
|
||||
else if ((required == RequiredAuthentication.user || required == RequiredAuthentication.admin) && isGuest)
|
||||
|
||||
// if the required authentication is not equal to guest, then it should be one of the following:
|
||||
// user | sysadmin | admin (the 'none' authentication is handled above)
|
||||
// in this case the guest user should not be able to execute those scripts.
|
||||
if (required != RequiredAuthentication.guest && isGuest)
|
||||
{
|
||||
throw new WebScriptException(HttpServletResponse.SC_UNAUTHORIZED, "Web Script " + desc.getId() + " requires user authentication; however, a guest has attempted access.");
|
||||
}
|
||||
else
|
||||
|
||||
try
|
||||
{
|
||||
try
|
||||
AuthenticationUtil.pushAuthentication();
|
||||
|
||||
//
|
||||
// Determine if user already authenticated
|
||||
//
|
||||
if (debug)
|
||||
{
|
||||
AuthenticationUtil.pushAuthentication();
|
||||
|
||||
//
|
||||
// Determine if user already authenticated
|
||||
//
|
||||
String currentUser = AuthenticationUtil.getFullyAuthenticatedUser();
|
||||
logger.debug("Current authentication: " + (currentUser == null ? "unauthenticated" : "authenticated as " + currentUser));
|
||||
logger.debug("Authentication required: " + required);
|
||||
logger.debug("Guest login requested: " + isGuest);
|
||||
}
|
||||
|
||||
//
|
||||
// Apply appropriate authentication to Web Script invocation
|
||||
//
|
||||
final RetryingTransactionCallback<Boolean> authWork = () -> {
|
||||
if (auth != null && !auth.authenticate(required, isGuest))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
// The user will now have been authenticated, based on HTTP Auth, Ticket, etc.
|
||||
// Check that the user they authenticated as has appropriate access to the script
|
||||
checkScriptAccess(required, desc.getId());
|
||||
|
||||
if (debug)
|
||||
{
|
||||
String currentUser = AuthenticationUtil.getFullyAuthenticatedUser();
|
||||
logger.debug("Current authentication: " + (currentUser == null ? "unauthenticated" : "authenticated as " + currentUser));
|
||||
logger.debug("Authentication required: " + required);
|
||||
logger.debug("Guest login requested: " + isGuest);
|
||||
logger.debug("Authentication: " + (currentUser == null ? "unauthenticated" : "authenticated as " + currentUser));
|
||||
}
|
||||
|
||||
//
|
||||
// Apply appropriate authentication to Web Script invocation
|
||||
//
|
||||
RetryingTransactionCallback<Boolean> authWork = new RetryingTransactionCallback<Boolean>()
|
||||
{
|
||||
public Boolean execute() throws Exception
|
||||
{
|
||||
if (auth == null || auth.authenticate(required, isGuest))
|
||||
{
|
||||
// The user will now have been authenticated, based on HTTP Auth, Ticket etc
|
||||
// Check that the user they authenticated as has appropriate access to the script
|
||||
|
||||
// Check to see if they supplied HTTP Auth or Ticket as guest, on a script that needs more
|
||||
if (required == RequiredAuthentication.user || required == RequiredAuthentication.admin)
|
||||
{
|
||||
String authenticatedUser = AuthenticationUtil.getFullyAuthenticatedUser();
|
||||
String runAsUser = AuthenticationUtil.getRunAsUser();
|
||||
|
||||
if ( (authenticatedUser == null) ||
|
||||
(authenticatedUser.equals(runAsUser) && authorityService.hasGuestAuthority()) ||
|
||||
(!authenticatedUser.equals(runAsUser) && authorityService.isGuestAuthority(authenticatedUser)) )
|
||||
{
|
||||
throw new WebScriptException(HttpServletResponse.SC_UNAUTHORIZED, "Web Script " + desc.getId() + " requires user authentication; however, a guest has attempted access.");
|
||||
}
|
||||
}
|
||||
|
||||
// Check to see if they're admin or system on an Admin only script
|
||||
if (required == RequiredAuthentication.admin && !(authorityService.hasAdminAuthority() || AuthenticationUtil.getFullyAuthenticatedUser().equals(AuthenticationUtil.getSystemUserName())))
|
||||
{
|
||||
throw new WebScriptException(HttpServletResponse.SC_UNAUTHORIZED, "Web Script " + desc.getId() + " requires admin authentication; however, a non-admin has attempted access.");
|
||||
}
|
||||
|
||||
if (debug)
|
||||
{
|
||||
String currentUser = AuthenticationUtil.getFullyAuthenticatedUser();
|
||||
logger.debug("Authentication: " + (currentUser == null ? "unauthenticated" : "authenticated as " + currentUser));
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
boolean readOnly = transactionService.isReadOnly();
|
||||
boolean requiresNew = !readOnly && AlfrescoTransactionSupport.getTransactionReadState() == TxnReadState.TXN_READ_ONLY;
|
||||
if (transactionService.getRetryingTransactionHelper().doInTransaction(authWork, readOnly, requiresNew))
|
||||
{
|
||||
// Execute Web Script if authentication passed
|
||||
// The Web Script has its own txn management with potential runAs() user
|
||||
transactionedExecuteAs(script, scriptReq, scriptRes);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new WebScriptException(HttpServletResponse.SC_UNAUTHORIZED, "Authentication failed for Web Script " + desc.getId());
|
||||
}
|
||||
}
|
||||
finally
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
final boolean readOnly = transactionService.isReadOnly();
|
||||
final boolean requiresNew = !readOnly && AlfrescoTransactionSupport.getTransactionReadState() == TxnReadState.TXN_READ_ONLY;
|
||||
if (!transactionService.getRetryingTransactionHelper().doInTransaction(authWork, readOnly, requiresNew))
|
||||
{
|
||||
//
|
||||
// Reset authentication for current thread
|
||||
//
|
||||
AuthenticationUtil.popAuthentication();
|
||||
|
||||
if (debug)
|
||||
{
|
||||
String currentUser = AuthenticationUtil.getFullyAuthenticatedUser();
|
||||
logger.debug("Authentication reset: " + (currentUser == null ? "unauthenticated" : "authenticated as " + currentUser));
|
||||
}
|
||||
throw new WebScriptException(HttpServletResponse.SC_UNAUTHORIZED, "Authentication failed for Web Script " + desc.getId());
|
||||
}
|
||||
|
||||
// Execute Web Script if authentication passed
|
||||
// The Web Script has its own txn management with potential runAs() user
|
||||
transactionedExecuteAs(script, scriptReq, scriptRes, required);
|
||||
}
|
||||
finally
|
||||
{
|
||||
//
|
||||
// Reset authentication for current thread
|
||||
//
|
||||
AuthenticationUtil.popAuthentication();
|
||||
|
||||
if (debug)
|
||||
{
|
||||
String currentUser = AuthenticationUtil.getFullyAuthenticatedUser();
|
||||
logger.debug("Authentication reset: " + (currentUser == null ? "unauthenticated" : "authenticated as " + currentUser));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isSystemUser()
|
||||
{
|
||||
return Objects.equals(AuthenticationUtil.getFullyAuthenticatedUser(), AuthenticationUtil.getSystemUserName());
|
||||
}
|
||||
|
||||
private boolean isSysAdminUser()
|
||||
{
|
||||
return authorityService.hasSysAdminAuthority();
|
||||
}
|
||||
|
||||
private boolean isAdmin()
|
||||
{
|
||||
return authorityService.hasAdminAuthority();
|
||||
}
|
||||
|
||||
public final boolean isAdminOrSystemUser()
|
||||
{
|
||||
return isAdmin() || isSystemUser();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check to see if they supplied HTTP Auth or Ticket as guest, on a script that needs more
|
||||
*/
|
||||
private void checkGuestAccess(RequiredAuthentication required, String scriptDescriptorId)
|
||||
{
|
||||
if (required == RequiredAuthentication.user || required == RequiredAuthentication.admin
|
||||
|| required == RequiredAuthentication.sysadmin)
|
||||
{
|
||||
final String authenticatedUser = AuthenticationUtil.getFullyAuthenticatedUser();
|
||||
final String runAsUser = AuthenticationUtil.getRunAsUser();
|
||||
|
||||
if ((authenticatedUser == null) || (authenticatedUser.equals(runAsUser)
|
||||
&& authorityService.hasGuestAuthority()) || (!authenticatedUser.equals(runAsUser)
|
||||
&& authorityService.isGuestAuthority(authenticatedUser)))
|
||||
{
|
||||
throw new WebScriptException(HttpServletResponse.SC_UNAUTHORIZED, "Web Script " + scriptDescriptorId
|
||||
+ " requires user authentication; however, a guest has attempted access.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void checkScriptAccess(RequiredAuthentication required, String scriptDescriptorId)
|
||||
{
|
||||
// first, check guest access
|
||||
checkGuestAccess(required, scriptDescriptorId);
|
||||
|
||||
// Check to see if the user is sysAdmin, admin or system on a sysadmin scripts
|
||||
if (required == RequiredAuthentication.sysadmin && !(isSysAdminUser() || isAdminOrSystemUser()))
|
||||
{
|
||||
throw new WebScriptException(HttpServletResponse.SC_UNAUTHORIZED, "Web Script " + scriptDescriptorId
|
||||
+ " requires system-admin authentication; however, a non-system-admin has attempted access.");
|
||||
}
|
||||
else if (required == RequiredAuthentication.admin && !isAdminOrSystemUser())
|
||||
{
|
||||
throw new WebScriptException(HttpServletResponse.SC_UNAUTHORIZED, "Web Script " + scriptDescriptorId
|
||||
+ " requires admin authentication; however, a non-admin has attempted access.");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -467,191 +496,160 @@ public class RepositoryContainer extends AbstractRuntimeContainer
|
||||
protected void transactionedExecute(final WebScript script, final WebScriptRequest scriptReq, final WebScriptResponse scriptRes)
|
||||
throws IOException
|
||||
{
|
||||
final Description description = script.getDescription();
|
||||
|
||||
try
|
||||
{
|
||||
final Description description = script.getDescription();
|
||||
if (description.getRequiredTransaction() == RequiredTransaction.none)
|
||||
{
|
||||
script.execute(scriptReq, scriptRes);
|
||||
return;
|
||||
}
|
||||
else
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
handleIOException(e);
|
||||
}
|
||||
|
||||
final RequiredTransactionParameters trxParams = description.getRequiredTransactionParameters();
|
||||
|
||||
try (final BufferedRequest bufferedReq = newBufferedRequest(trxParams, scriptReq, streamFactory);
|
||||
final BufferedResponse bufferedRes = newBufferedResponse(trxParams, scriptRes, streamFactory))
|
||||
{
|
||||
boolean readonly = description.getRequiredTransactionParameters().getCapability() == TransactionCapability.readonly;
|
||||
boolean requiresNew = description.getRequiredTransaction() == RequiredTransaction.requiresnew;
|
||||
|
||||
// log a warning if we detect a GET webscript being run in a readwrite transaction, GET calls should
|
||||
// NOT have any side effects so this scenario as a warning sign something maybe amiss, see ALF-10179.
|
||||
if (logger.isDebugEnabled() && !readonly && "GET".equalsIgnoreCase(
|
||||
description.getMethod()))
|
||||
{
|
||||
final BufferedRequest bufferedReq;
|
||||
final BufferedResponse bufferedRes;
|
||||
RequiredTransactionParameters trxParams = description.getRequiredTransactionParameters();
|
||||
if (trxParams.getCapability() == TransactionCapability.readwrite)
|
||||
{
|
||||
if (trxParams.getBufferSize() > 0)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
logger.debug("Creating Transactional Response for ReadWrite transaction; buffersize=" + trxParams.getBufferSize());
|
||||
logger.debug("Webscript with URL '" + scriptReq.getURL() +
|
||||
"' is a GET request but it's descriptor has declared a readwrite transaction is required");
|
||||
}
|
||||
|
||||
// create buffered request and response that allow transaction retrying
|
||||
bufferedReq = new BufferedRequest(scriptReq, streamFactory);
|
||||
bufferedRes = new BufferedResponse(scriptRes, trxParams.getBufferSize(), responseStreamFactory);
|
||||
}
|
||||
else
|
||||
try
|
||||
{
|
||||
final RetryingTransactionHelper transactionHelper = transactionService.getRetryingTransactionHelper();
|
||||
if (script instanceof LoginPost)
|
||||
{
|
||||
//login script requires read-write transaction because of authorization interceptor
|
||||
transactionHelper.setForceWritable(true);
|
||||
}
|
||||
transactionHelper.doInTransaction(() -> {
|
||||
try
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
logger.debug("Transactional Response bypassed for ReadWrite - buffersize=0");
|
||||
bufferedReq = null;
|
||||
bufferedRes = null;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
bufferedReq = null;
|
||||
bufferedRes = null;
|
||||
}
|
||||
|
||||
// encapsulate script within transaction
|
||||
RetryingTransactionCallback<Object> work = new RetryingTransactionCallback<Object>()
|
||||
{
|
||||
public Object execute() throws Exception
|
||||
{
|
||||
try
|
||||
logger.debug("Begin retry transaction block: " + description.getRequiredTransaction() + ","
|
||||
+ description.getRequiredTransactionParameters().getCapability());
|
||||
|
||||
if (bufferedReq == null || bufferedRes == null)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
logger.debug("Begin retry transaction block: " + description.getRequiredTransaction() + ","
|
||||
+ description.getRequiredTransactionParameters().getCapability());
|
||||
|
||||
if (bufferedRes == null)
|
||||
{
|
||||
script.execute(scriptReq, scriptRes);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Reset the request and response in case of a transaction retry
|
||||
bufferedReq.reset();
|
||||
// REPO-4388 don't reset specified headers
|
||||
bufferedRes.reset(preserveHeadersPattern);
|
||||
script.execute(bufferedReq, bufferedRes);
|
||||
}
|
||||
script.execute(scriptReq, scriptRes);
|
||||
}
|
||||
catch(Exception e)
|
||||
else
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Transaction exception: " + description.getRequiredTransaction() + ": " + e.getMessage());
|
||||
// Note: user transaction shouldn't be null, but just in case inside this exception handler
|
||||
UserTransaction userTrx = RetryingTransactionHelper.getActiveUserTransaction();
|
||||
if (userTrx != null)
|
||||
{
|
||||
logger.debug("Transaction status: " + userTrx.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
// Reset the request and response in case of a transaction retry
|
||||
bufferedReq.reset();
|
||||
// REPO-4388 don't reset specified headers
|
||||
bufferedRes.reset(preserveHeadersPattern);
|
||||
script.execute(bufferedReq, bufferedRes);
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Transaction exception: " + description.getRequiredTransaction() + ": " + e.getMessage());
|
||||
// Note: user transaction shouldn't be null, but just in case inside this exception handler
|
||||
UserTransaction userTrx = RetryingTransactionHelper.getActiveUserTransaction();
|
||||
if (userTrx != null)
|
||||
{
|
||||
if (userTrx.getStatus() != Status.STATUS_MARKED_ROLLBACK)
|
||||
logger.debug("Transaction status: " + userTrx.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
final UserTransaction userTrx = RetryingTransactionHelper.getActiveUserTransaction();
|
||||
if (userTrx != null)
|
||||
{
|
||||
if (userTrx.getStatus() != Status.STATUS_MARKED_ROLLBACK)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
logger.debug("Marking web script transaction for rollback");
|
||||
try
|
||||
{
|
||||
userTrx.setRollbackOnly();
|
||||
}
|
||||
catch (Throwable re)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
logger.debug("Marking web script transaction for rollback");
|
||||
try
|
||||
{
|
||||
userTrx.setRollbackOnly();
|
||||
}
|
||||
catch(Throwable re)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
logger.debug("Caught and ignoring exception during marking for rollback: " + re.getMessage());
|
||||
}
|
||||
logger.debug("Caught and ignoring exception during marking for rollback: " + re.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// re-throw original exception for retry
|
||||
throw e;
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
logger.debug("End retry transaction block: " + description.getRequiredTransaction() + ","
|
||||
+ description.getRequiredTransactionParameters().getCapability());
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
boolean readonly = description.getRequiredTransactionParameters().getCapability() == TransactionCapability.readonly;
|
||||
boolean requiresNew = description.getRequiredTransaction() == RequiredTransaction.requiresnew;
|
||||
|
||||
// log a warning if we detect a GET webscript being run in a readwrite transaction, GET calls should
|
||||
// NOT have any side effects so this scenario as a warning sign something maybe amiss, see ALF-10179.
|
||||
if (logger.isDebugEnabled() && !readonly && "GET".equalsIgnoreCase(description.getMethod()))
|
||||
{
|
||||
logger.debug("Webscript with URL '" + scriptReq.getURL() +
|
||||
"' is a GET request but it's descriptor has declared a readwrite transaction is required");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
RetryingTransactionHelper transactionHelper = transactionService.getRetryingTransactionHelper();
|
||||
if(script instanceof LoginPost)
|
||||
{
|
||||
//login script requires read-write transaction because of authorization intercepter
|
||||
transactionHelper.setForceWritable(true);
|
||||
}
|
||||
transactionHelper.doInTransaction(work, readonly, requiresNew);
|
||||
}
|
||||
catch (TooBusyException e)
|
||||
{
|
||||
// Map TooBusyException to a 503 status code
|
||||
throw new WebScriptException(HttpServletResponse.SC_SERVICE_UNAVAILABLE, e.getMessage(), e);
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Get rid of any temporary files
|
||||
if (bufferedReq != null)
|
||||
{
|
||||
bufferedReq.close();
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure a response is always flushed after successful execution
|
||||
if (bufferedRes != null)
|
||||
{
|
||||
bufferedRes.writeResponse();
|
||||
}
|
||||
|
||||
// re-throw original exception for retry
|
||||
throw e;
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
logger.debug("End retry transaction block: " + description.getRequiredTransaction() + ","
|
||||
+ description.getRequiredTransactionParameters().getCapability());
|
||||
}
|
||||
|
||||
return null;
|
||||
}, readonly, requiresNew);
|
||||
}
|
||||
}
|
||||
catch (IOException ioe)
|
||||
{
|
||||
Throwable socketException = ExceptionStackUtil.getCause(ioe, SocketException.class);
|
||||
Class<?> clientAbortException = null;
|
||||
try
|
||||
catch (TooBusyException e)
|
||||
{
|
||||
clientAbortException = Class.forName("org.apache.catalina.connector.ClientAbortException");
|
||||
// Map TooBusyException to a 503 status code
|
||||
throw new WebScriptException(HttpServletResponse.SC_SERVICE_UNAVAILABLE, e.getMessage(), e);
|
||||
}
|
||||
catch (ClassNotFoundException e)
|
||||
|
||||
// Ensure a response is always flushed after successful execution
|
||||
if (bufferedRes != null)
|
||||
{
|
||||
// do nothing
|
||||
}
|
||||
// Note: if you need to look for more exceptions in the stack, then create a static array and pass it in
|
||||
if ((socketException != null && socketException.getMessage().contains("Broken pipe")) || (clientAbortException != null && ExceptionStackUtil.getCause(ioe, clientAbortException) != null))
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.warn("Client has cut off communication", ioe);
|
||||
}
|
||||
else
|
||||
{
|
||||
logger.info("Client has cut off communication");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw ioe;
|
||||
bufferedRes.writeResponse();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static void handleIOException(final IOException ioe) throws IOException
|
||||
{
|
||||
Throwable socketException = ExceptionStackUtil.getCause(ioe, SocketException.class);
|
||||
Class<?> clientAbortException = null;
|
||||
try
|
||||
{
|
||||
clientAbortException = Class.forName("org.apache.catalina.connector.ClientAbortException");
|
||||
}
|
||||
catch (ClassNotFoundException e)
|
||||
{
|
||||
// do nothing
|
||||
}
|
||||
// Note: if you need to look for more exceptions in the stack, then create a static array and pass it in
|
||||
if ((socketException != null && socketException.getMessage().contains("Broken pipe")) ||
|
||||
(clientAbortException != null && ExceptionStackUtil.getCause(ioe, clientAbortException) != null))
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.warn("Client has cut off communication", ioe);
|
||||
}
|
||||
else
|
||||
{
|
||||
logger.info("Client has cut off communication");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw ioe;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute script within required level of transaction as required effective user.
|
||||
*
|
||||
* @param script WebScript
|
||||
*
|
||||
* @param script WebScript
|
||||
* @param scriptReq WebScriptRequest
|
||||
* @param scriptRes WebScriptResponse
|
||||
* @throws IOException
|
||||
@@ -659,22 +657,46 @@ public class RepositoryContainer extends AbstractRuntimeContainer
|
||||
private void transactionedExecuteAs(final WebScript script, final WebScriptRequest scriptReq,
|
||||
final WebScriptResponse scriptRes) throws IOException
|
||||
{
|
||||
String runAs = script.getDescription().getRunAs();
|
||||
final String runAs = script.getDescription().getRunAs();
|
||||
if (runAs == null)
|
||||
{
|
||||
transactionedExecute(script, scriptReq, scriptRes);
|
||||
}
|
||||
else
|
||||
{
|
||||
RunAsWork<Object> work = new RunAsWork<Object>()
|
||||
{
|
||||
public Object doWork() throws Exception
|
||||
{
|
||||
transactionedExecute(script, scriptReq, scriptRes);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
AuthenticationUtil.runAs(work, runAs);
|
||||
AuthenticationUtil.runAs(() -> {
|
||||
transactionedExecute(script, scriptReq, scriptRes);
|
||||
return null;
|
||||
}, runAs);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute script within required level of transaction as required effective user.
|
||||
*
|
||||
* @param script WebScript
|
||||
* @param scriptReq WebScriptRequest
|
||||
* @param scriptRes WebScriptResponse
|
||||
* @param requiredAuthentication Required authentication
|
||||
* @throws IOException
|
||||
*/
|
||||
private void transactionedExecuteAs(final WebScript script, final WebScriptRequest scriptReq,
|
||||
final WebScriptResponse scriptRes, RequiredAuthentication requiredAuthentication) throws IOException
|
||||
{
|
||||
// Execute as System if and only if, the current user is a member of System-Admin group, and he is not a super admin.
|
||||
// E.g. if 'jdoe' is a member of ALFRESCO_SYSTEM_ADMINISTRATORS group, then the work should be executed as System to satisfy the ACL checks.
|
||||
// But, if the current user is Admin (i.e. super admin, which by default he is a member fo the ALFRESCO_SYSTEM_ADMINISTRATORS group)
|
||||
// then don't wrap the work as RunAs, since he can do anything!
|
||||
if (requiredAuthentication == RequiredAuthentication.sysadmin && isSysAdminUser() && !isAdmin())
|
||||
{
|
||||
AuthenticationUtil.runAs(() -> {
|
||||
transactionedExecute(script, scriptReq, scriptRes);
|
||||
return null;
|
||||
}, AuthenticationUtil.SYSTEM_USER_NAME);
|
||||
}
|
||||
else
|
||||
{
|
||||
transactionedExecuteAs(script, scriptReq, scriptRes);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -688,17 +710,12 @@ public class RepositoryContainer extends AbstractRuntimeContainer
|
||||
{
|
||||
ContextRefreshedEvent refreshEvent = (ContextRefreshedEvent)event;
|
||||
ApplicationContext refreshContext = refreshEvent.getApplicationContext();
|
||||
if (refreshContext != null && refreshContext.equals(applicationContext))
|
||||
if (refreshContext.equals(applicationContext))
|
||||
{
|
||||
RunAsWork<Object> work = new RunAsWork<Object>()
|
||||
{
|
||||
public Object doWork() throws Exception
|
||||
{
|
||||
reset();
|
||||
return null;
|
||||
}
|
||||
};
|
||||
AuthenticationUtil.runAs(work, AuthenticationUtil.getSystemUserName());
|
||||
AuthenticationUtil.runAs(() -> {
|
||||
reset();
|
||||
return null;
|
||||
}, AuthenticationUtil.getSystemUserName());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -739,18 +756,54 @@ public class RepositoryContainer extends AbstractRuntimeContainer
|
||||
@Override
|
||||
public void reset()
|
||||
{
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Object>()
|
||||
{
|
||||
public Object execute() throws Exception
|
||||
{
|
||||
internalReset();
|
||||
return null;
|
||||
}
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(() -> {
|
||||
internalReset();
|
||||
return null;
|
||||
}, true, false);
|
||||
}
|
||||
|
||||
|
||||
private void internalReset()
|
||||
{
|
||||
super.reset();
|
||||
}
|
||||
|
||||
private static BufferedRequest newBufferedRequest(
|
||||
final RequiredTransactionParameters trxParams,
|
||||
final WebScriptRequest scriptReq,
|
||||
final Supplier<TempOutputStream> streamFactory)
|
||||
{
|
||||
if (trxParams.getCapability() != TransactionCapability.readwrite)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
if (trxParams.getBufferSize() <= 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// create buffered request that allow transaction retrying
|
||||
return new BufferedRequest(scriptReq, streamFactory);
|
||||
}
|
||||
|
||||
private static BufferedResponse newBufferedResponse(
|
||||
final RequiredTransactionParameters trxParams,
|
||||
final WebScriptResponse scriptRes,
|
||||
final Supplier<TempOutputStream> streamFactory)
|
||||
{
|
||||
if (trxParams.getCapability() != TransactionCapability.readwrite)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
if (trxParams.getBufferSize() <= 0)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
logger.debug("Transactional Response bypassed for ReadWrite - buffersize=0");
|
||||
return null;
|
||||
}
|
||||
if (logger.isDebugEnabled())
|
||||
logger.debug("Creating Transactional Response for ReadWrite transaction; buffersize=" + trxParams.getBufferSize());
|
||||
|
||||
// create buffered response that allow transaction retrying
|
||||
return new BufferedResponse(scriptRes, trxParams.getBufferSize(), streamFactory);
|
||||
}
|
||||
}
|
||||
|
@@ -36,6 +36,7 @@ import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.security.Key;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import javax.crypto.Cipher;
|
||||
import javax.crypto.CipherInputStream;
|
||||
@@ -88,13 +89,11 @@ public class TempOutputStream extends OutputStream
|
||||
private final File tempDir;
|
||||
private final int memoryThreshold;
|
||||
private final long maxContentSize;
|
||||
private boolean encrypt;
|
||||
private boolean deleteTempFileOnClose;
|
||||
private final boolean encrypt;
|
||||
|
||||
private long length = 0;
|
||||
private OutputStream outputStream;
|
||||
private File tempFile;
|
||||
private TempByteArrayOutputStream tempStream;
|
||||
|
||||
private Key symKey;
|
||||
private byte[] iv;
|
||||
@@ -112,58 +111,49 @@ public class TempOutputStream extends OutputStream
|
||||
* the max content size in B
|
||||
* @param encrypt
|
||||
* true if temp files should be encrypted
|
||||
* @param deleteTempFileOnClose
|
||||
* true if temp files should be deleted on output stream close
|
||||
* (useful if we need to cache the content for further reads). If
|
||||
* this is false then we need to make sure we call
|
||||
* {@link TempOutputStream}.destroy to clean up properly.
|
||||
*/
|
||||
public TempOutputStream(File tempDir, int memoryThreshold, long maxContentSize, boolean encrypt, boolean deleteTempFileOnClose)
|
||||
public TempOutputStream(File tempDir, int memoryThreshold, long maxContentSize, boolean encrypt)
|
||||
{
|
||||
this.tempDir = tempDir;
|
||||
this.memoryThreshold = (memoryThreshold < 0) ? DEFAULT_MEMORY_THRESHOLD : memoryThreshold;
|
||||
this.maxContentSize = maxContentSize;
|
||||
this.encrypt = encrypt;
|
||||
this.deleteTempFileOnClose = deleteTempFileOnClose;
|
||||
|
||||
this.tempStream = new TempByteArrayOutputStream();
|
||||
this.outputStream = this.tempStream;
|
||||
this.outputStream = new ByteArrayOutputStream();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the data as an InputStream
|
||||
*/
|
||||
public InputStream getInputStream() throws IOException
|
||||
public InputStream toNewInputStream() throws IOException
|
||||
{
|
||||
if (tempFile != null)
|
||||
closeOutputStream();
|
||||
|
||||
if (tempFile == null)
|
||||
{
|
||||
return new ByteArrayInputStream(((ByteArrayOutputStream) outputStream).toByteArray());
|
||||
}
|
||||
if (!encrypt)
|
||||
{
|
||||
if (encrypt)
|
||||
{
|
||||
final Cipher cipher;
|
||||
try
|
||||
{
|
||||
cipher = Cipher.getInstance(TRANSFORMATION);
|
||||
cipher.init(Cipher.DECRYPT_MODE, symKey, new IvParameterSpec(iv));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
destroy();
|
||||
|
||||
if (logger.isErrorEnabled())
|
||||
{
|
||||
logger.error("Cannot initialize decryption cipher", e);
|
||||
}
|
||||
|
||||
throw new IOException("Cannot initialize decryption cipher", e);
|
||||
}
|
||||
|
||||
return new BufferedInputStream(new CipherInputStream(new FileInputStream(tempFile), cipher));
|
||||
}
|
||||
return new BufferedInputStream(new FileInputStream(tempFile));
|
||||
}
|
||||
else
|
||||
try
|
||||
{
|
||||
return new ByteArrayInputStream(tempStream.getBuffer(), 0, tempStream.getCount());
|
||||
final Cipher cipher = Cipher.getInstance(TRANSFORMATION);
|
||||
cipher.init(Cipher.DECRYPT_MODE, symKey, new IvParameterSpec(iv));
|
||||
|
||||
return new BufferedInputStream(new CipherInputStream(new FileInputStream(tempFile), cipher));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
destroy();
|
||||
|
||||
if (logger.isErrorEnabled())
|
||||
{
|
||||
logger.error("Cannot initialize decryption cipher", e);
|
||||
}
|
||||
|
||||
throw new IOException("Cannot initialize decryption cipher", e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -190,7 +180,7 @@ public class TempOutputStream extends OutputStream
|
||||
@Override
|
||||
public void close() throws IOException
|
||||
{
|
||||
close(deleteTempFileOnClose);
|
||||
closeOutputStream();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -215,7 +205,9 @@ public class TempOutputStream extends OutputStream
|
||||
*/
|
||||
public void destroy() throws IOException
|
||||
{
|
||||
close(true);
|
||||
closeOutputStream();
|
||||
|
||||
deleteTempFile();
|
||||
}
|
||||
|
||||
public long getLength()
|
||||
@@ -282,102 +274,95 @@ public class TempOutputStream extends OutputStream
|
||||
}
|
||||
}
|
||||
|
||||
private void close(boolean deleteTempFileOnClose)
|
||||
private BufferedOutputStream createFileOutputStream(final File file) throws IOException
|
||||
{
|
||||
closeOutputStream();
|
||||
|
||||
if (deleteTempFileOnClose)
|
||||
if (!encrypt)
|
||||
{
|
||||
deleteTempFile();
|
||||
return new BufferedOutputStream(new FileOutputStream(file));
|
||||
}
|
||||
}
|
||||
|
||||
private BufferedOutputStream createOutputStream(File file) throws IOException
|
||||
{
|
||||
BufferedOutputStream fileOutputStream;
|
||||
if (encrypt)
|
||||
try
|
||||
{
|
||||
try
|
||||
// Generate a symmetric key
|
||||
final KeyGenerator keyGen = KeyGenerator.getInstance(ALGORITHM);
|
||||
keyGen.init(KEY_SIZE);
|
||||
symKey = keyGen.generateKey();
|
||||
|
||||
Cipher cipher = Cipher.getInstance(TRANSFORMATION);
|
||||
cipher.init(Cipher.ENCRYPT_MODE, symKey);
|
||||
|
||||
iv = cipher.getIV();
|
||||
|
||||
return new BufferedOutputStream(new CipherOutputStream(new FileOutputStream(file), cipher));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
if (logger.isErrorEnabled())
|
||||
{
|
||||
// Generate a symmetric key
|
||||
final KeyGenerator keyGen = KeyGenerator.getInstance(ALGORITHM);
|
||||
keyGen.init(KEY_SIZE);
|
||||
symKey = keyGen.generateKey();
|
||||
|
||||
Cipher cipher = Cipher.getInstance(TRANSFORMATION);
|
||||
cipher.init(Cipher.ENCRYPT_MODE, symKey);
|
||||
|
||||
iv = cipher.getIV();
|
||||
|
||||
fileOutputStream = new BufferedOutputStream(new CipherOutputStream(new FileOutputStream(file), cipher));
|
||||
logger.error("Cannot initialize encryption cipher", e);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
if (logger.isErrorEnabled())
|
||||
{
|
||||
logger.error("Cannot initialize encryption cipher", e);
|
||||
}
|
||||
|
||||
throw new IOException("Cannot initialize encryption cipher", e);
|
||||
}
|
||||
throw new IOException("Cannot initialize encryption cipher", e);
|
||||
}
|
||||
else
|
||||
{
|
||||
fileOutputStream = new BufferedOutputStream(new FileOutputStream(file));
|
||||
}
|
||||
|
||||
return fileOutputStream;
|
||||
}
|
||||
|
||||
private void update(int len) throws IOException
|
||||
{
|
||||
if (maxContentSize > -1 && length + len > maxContentSize)
|
||||
if (surpassesMaxContentSize(len))
|
||||
{
|
||||
destroy();
|
||||
throw new ContentLimitViolationException("Content size violation, limit = " + maxContentSize);
|
||||
}
|
||||
|
||||
if (tempFile == null && (tempStream.getCount() + len) > memoryThreshold)
|
||||
if (surpassesThreshold(len))
|
||||
{
|
||||
File file = TempFileProvider.createTempFile(TEMP_FILE_PREFIX, ".bin", tempDir);
|
||||
tempFile = TempFileProvider.createTempFile(TEMP_FILE_PREFIX, ".bin", tempDir);
|
||||
|
||||
BufferedOutputStream fileOutputStream = createOutputStream(file);
|
||||
fileOutputStream.write(this.tempStream.getBuffer(), 0, this.tempStream.getCount());
|
||||
final BufferedOutputStream fileOutputStream = createFileOutputStream(tempFile);
|
||||
fileOutputStream.write(((ByteArrayOutputStream) outputStream).toByteArray());
|
||||
fileOutputStream.flush();
|
||||
|
||||
try
|
||||
{
|
||||
tempStream.close();
|
||||
outputStream.close();
|
||||
}
|
||||
catch (IOException e)
|
||||
catch (IOException ignore)
|
||||
{
|
||||
// Ignore exception
|
||||
}
|
||||
tempStream = null;
|
||||
|
||||
tempFile = file;
|
||||
outputStream = fileOutputStream;
|
||||
}
|
||||
|
||||
length += len;
|
||||
}
|
||||
|
||||
private static class TempByteArrayOutputStream extends ByteArrayOutputStream
|
||||
private boolean surpassesMaxContentSize(final int len)
|
||||
{
|
||||
/**
|
||||
* @return The internal buffer where data is stored
|
||||
*/
|
||||
public byte[] getBuffer()
|
||||
{
|
||||
return buf;
|
||||
}
|
||||
return maxContentSize >= 0 && length + len > maxContentSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The number of valid bytes in the buffer.
|
||||
*/
|
||||
public int getCount()
|
||||
{
|
||||
return count;
|
||||
}
|
||||
private boolean surpassesThreshold(final int len)
|
||||
{
|
||||
return tempFile == null && length + len > memoryThreshold;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link TempOutputStream} factory/supplier.
|
||||
*
|
||||
* @param tempDir
|
||||
* the temporary directory, i.e. <code>isDir == true</code>, that
|
||||
* will be used as * parent directory for creating temp file backed
|
||||
* streams
|
||||
* @param memoryThreshold
|
||||
* the memory threshold in B
|
||||
* @param maxContentSize
|
||||
* the max content size in B
|
||||
* @param encrypt
|
||||
* true if temp files should be encrypted
|
||||
*/
|
||||
public static Supplier<TempOutputStream> factory(final File tempDir, final int memoryThreshold,
|
||||
final long maxContentSize, final boolean encrypt)
|
||||
{
|
||||
return () -> new TempOutputStream(tempDir, memoryThreshold, maxContentSize, encrypt);
|
||||
}
|
||||
}
|
||||
|
@@ -1,105 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2019 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.web.scripts;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
/**
|
||||
* Factory for {@link TempOutputStream}
|
||||
*/
|
||||
public class TempOutputStreamFactory
|
||||
{
|
||||
/**
|
||||
* A temporary directory, i.e. <code>isDir == true</code>, that will be used as
|
||||
* parent directory for creating temp file backed streams.
|
||||
*/
|
||||
private final File tempDir;
|
||||
private int memoryThreshold;
|
||||
private long maxContentSize;
|
||||
private boolean encrypt;
|
||||
private boolean deleteTempFileOnClose;
|
||||
|
||||
/**
|
||||
* Creates a {@link TempOutputStream} factory.
|
||||
*
|
||||
* @param tempDir
|
||||
* the temporary directory, i.e. <code>isDir == true</code>, that
|
||||
* will be used as * parent directory for creating temp file backed
|
||||
* streams
|
||||
* @param memoryThreshold
|
||||
* the memory threshold in B
|
||||
* @param maxContentSize
|
||||
* the max content size in B
|
||||
* @param encrypt
|
||||
* true if temp files should be encrypted
|
||||
* @param deleteTempFileOnClose
|
||||
* true if temp files should be deleted on output stream close
|
||||
* (useful if we need to cache the content for further reads). If
|
||||
* this is false then we need to make sure we call
|
||||
* {@link TempOutputStream}.destroy to clean up properly.
|
||||
*/
|
||||
public TempOutputStreamFactory(File tempDir, int memoryThreshold, long maxContentSize, boolean encrypt, boolean deleteTempFileOnClose)
|
||||
{
|
||||
this.tempDir = tempDir;
|
||||
this.memoryThreshold = memoryThreshold;
|
||||
this.maxContentSize = maxContentSize;
|
||||
this.encrypt = encrypt;
|
||||
this.deleteTempFileOnClose = deleteTempFileOnClose;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link TempOutputStream} object
|
||||
*/
|
||||
public TempOutputStream createOutputStream()
|
||||
{
|
||||
return new TempOutputStream(tempDir, memoryThreshold, maxContentSize, encrypt, deleteTempFileOnClose);
|
||||
}
|
||||
|
||||
public File getTempDir()
|
||||
{
|
||||
return tempDir;
|
||||
}
|
||||
|
||||
public int getMemoryThreshold()
|
||||
{
|
||||
return memoryThreshold;
|
||||
}
|
||||
|
||||
public long getMaxContentSize()
|
||||
{
|
||||
return maxContentSize;
|
||||
}
|
||||
|
||||
public boolean isEncrypt()
|
||||
{
|
||||
return encrypt;
|
||||
}
|
||||
|
||||
public boolean isDeleteTempFileOnClose()
|
||||
{
|
||||
return deleteTempFileOnClose;
|
||||
}
|
||||
}
|
@@ -30,8 +30,19 @@ import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.OptionalInt;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import com.google.common.primitives.Ints;
|
||||
|
||||
import org.alfresco.repo.bulkimport.BulkFilesystemImporter;
|
||||
import org.alfresco.repo.bulkimport.BulkImportParameters;
|
||||
import org.alfresco.repo.bulkimport.NodeImporter;
|
||||
import org.alfresco.repo.bulkimport.impl.MultiThreadedBulkFilesystemImporter;
|
||||
import org.alfresco.repo.model.Repository;
|
||||
import org.alfresco.service.cmr.model.FileFolderService;
|
||||
import org.alfresco.service.cmr.model.FileInfo;
|
||||
@@ -39,8 +50,12 @@ import org.alfresco.service.cmr.model.FileNotFoundException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
import org.springframework.extensions.webscripts.Cache;
|
||||
import org.springframework.extensions.webscripts.DeclarativeWebScript;
|
||||
import org.springframework.extensions.webscripts.Status;
|
||||
import org.springframework.extensions.webscripts.WebScriptException;
|
||||
import org.springframework.extensions.webscripts.WebScriptRequest;
|
||||
|
||||
/**
|
||||
* contains common fields and methods for the import web scripts.
|
||||
@@ -60,10 +75,10 @@ public class AbstractBulkFileSystemImportWebScript extends DeclarativeWebScript
|
||||
// Web scripts parameters (common)
|
||||
protected static final String PARAMETER_REPLACE_EXISTING = "replaceExisting";
|
||||
protected static final String PARAMETER_EXISTING_FILE_MODE = "existingFileMode";
|
||||
protected static final String PARAMETER_VALUE_REPLACE_EXISTING = "replaceExisting";
|
||||
protected static final String PARAMETER_VALUE_REPLACE_EXISTING = "true";
|
||||
protected static final String PARAMETER_SOURCE_DIRECTORY = "sourceDirectory";
|
||||
protected static final String PARAMETER_DISABLE_RULES = "disableRules";
|
||||
protected static final String PARAMETER_VALUE_DISABLE_RULES = "disableRules";
|
||||
protected static final String PARAMETER_VALUE_DISABLE_RULES = "true";
|
||||
|
||||
protected static final String IMPORT_ALREADY_IN_PROGRESS_MODEL_KEY = "importInProgress";
|
||||
protected static final String IMPORT_ALREADY_IN_PROGRESS_ERROR_KEY ="bfsit.error.importAlreadyInProgress";
|
||||
@@ -75,7 +90,7 @@ public class AbstractBulkFileSystemImportWebScript extends DeclarativeWebScript
|
||||
protected Repository repository;
|
||||
|
||||
protected volatile boolean importInProgress;
|
||||
|
||||
|
||||
protected NodeRef getTargetNodeRef(String targetNodeRefStr, String targetPath) throws FileNotFoundException
|
||||
{
|
||||
NodeRef targetNodeRef;
|
||||
@@ -219,4 +234,198 @@ public class AbstractBulkFileSystemImportWebScript extends DeclarativeWebScript
|
||||
this.repository = repository;
|
||||
}
|
||||
|
||||
protected class MultithreadedImportWebScriptLogic
|
||||
{
|
||||
private final MultiThreadedBulkFilesystemImporter bulkImporter;
|
||||
private final Supplier<NodeImporter> nodeImporterFactory;
|
||||
private final WebScriptRequest request;
|
||||
private final Status status;
|
||||
private final Cache cache;
|
||||
|
||||
public MultithreadedImportWebScriptLogic(MultiThreadedBulkFilesystemImporter bulkImporter, Supplier<NodeImporter> nodeImporterFactory, WebScriptRequest request, Status status, Cache cache)
|
||||
{
|
||||
this.bulkImporter = Objects.requireNonNull(bulkImporter);
|
||||
this.nodeImporterFactory = Objects.requireNonNull(nodeImporterFactory);
|
||||
this.request = Objects.requireNonNull(request);
|
||||
this.status = Objects.requireNonNull(status);
|
||||
this.cache = Objects.requireNonNull(cache);
|
||||
}
|
||||
|
||||
public Map<String, Object> executeImport()
|
||||
{
|
||||
Map<String, Object> model = new HashMap<>();
|
||||
cache.setNeverCache(true);
|
||||
String targetPath = null;
|
||||
|
||||
try
|
||||
{
|
||||
targetPath = request.getParameter(PARAMETER_TARGET_PATH);
|
||||
if (isRunning())
|
||||
{
|
||||
model.put(IMPORT_ALREADY_IN_PROGRESS_MODEL_KEY, I18NUtil.getMessage(IMPORT_ALREADY_IN_PROGRESS_ERROR_KEY));
|
||||
return model;
|
||||
}
|
||||
|
||||
final BulkImportParameters bulkImportParameters = getBulkImportParameters();
|
||||
final NodeImporter nodeImporter = nodeImporterFactory.get();
|
||||
|
||||
bulkImporter.asyncBulkImport(bulkImportParameters, nodeImporter);
|
||||
|
||||
waitForImportToBegin();
|
||||
|
||||
// redirect to the status Web Script
|
||||
status.setCode(Status.STATUS_MOVED_TEMPORARILY);
|
||||
status.setRedirect(true);
|
||||
status.setLocation(request.getServiceContextPath() + WEB_SCRIPT_URI_BULK_FILESYSTEM_IMPORT_STATUS);
|
||||
}
|
||||
catch (WebScriptException | IllegalArgumentException e)
|
||||
{
|
||||
status.setCode(Status.STATUS_BAD_REQUEST, e.getMessage());
|
||||
status.setRedirect(true);
|
||||
}
|
||||
catch (FileNotFoundException fnfe)
|
||||
{
|
||||
status.setCode(Status.STATUS_BAD_REQUEST,"The repository path '" + targetPath + "' does not exist !");
|
||||
status.setRedirect(true);
|
||||
}
|
||||
catch (Throwable t)
|
||||
{
|
||||
throw new WebScriptException(Status.STATUS_INTERNAL_SERVER_ERROR, buildTextMessage(t), t);
|
||||
}
|
||||
|
||||
return model;
|
||||
}
|
||||
|
||||
private void waitForImportToBegin() throws InterruptedException
|
||||
{
|
||||
// ACE-3047 fix, since bulk import is started asynchronously there is a chance that client
|
||||
// will get into the status page before import is actually started.
|
||||
// In this case wrong information (for previous import) will be displayed.
|
||||
// So lets ensure that import started before redirecting client to status page.
|
||||
int i = 0;
|
||||
while (!bulkImporter.getStatus().inProgress() && i < 10)
|
||||
{
|
||||
Thread.sleep(100);
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
private BulkImportParameters getBulkImportParameters() throws FileNotFoundException
|
||||
{
|
||||
final BulkImportParametersExtractor extractor = new BulkImportParametersExtractor(request::getParameter,
|
||||
AbstractBulkFileSystemImportWebScript.this::getTargetNodeRef,
|
||||
bulkImporter.getDefaultBatchSize(),
|
||||
bulkImporter.getDefaultNumThreads());
|
||||
return extractor.extract();
|
||||
}
|
||||
|
||||
private boolean isRunning()
|
||||
{
|
||||
return bulkImporter.getStatus().inProgress();
|
||||
}
|
||||
}
|
||||
|
||||
protected static class BulkImportParametersExtractor
|
||||
{
|
||||
private final Function<String, String> paramsProvider;
|
||||
private final NodeRefCreator nodeRefCreator;
|
||||
private final int defaultBatchSize;
|
||||
private final int defaultNumThreads;
|
||||
|
||||
public BulkImportParametersExtractor(final Function<String, String> paramsProvider, final NodeRefCreator nodeRefCreator,
|
||||
final int defaultBatchSize, final int defaultNumThreads)
|
||||
{
|
||||
this.paramsProvider = Objects.requireNonNull(paramsProvider);
|
||||
this.nodeRefCreator = Objects.requireNonNull(nodeRefCreator);
|
||||
this.defaultBatchSize = defaultBatchSize;
|
||||
this.defaultNumThreads = defaultNumThreads;
|
||||
}
|
||||
|
||||
public BulkImportParameters extract() throws FileNotFoundException
|
||||
{
|
||||
BulkImportParameters result = new BulkImportParameters();
|
||||
|
||||
result.setTarget(getTargetNodeRef());
|
||||
setExistingFileMode(result);
|
||||
result.setNumThreads(getOptionalPositiveInteger(PARAMETER_NUM_THREADS).orElse(defaultNumThreads));
|
||||
result.setBatchSize(getOptionalPositiveInteger(PARAMETER_BATCH_SIZE).orElse(defaultBatchSize));
|
||||
setDisableRules(result);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private void setExistingFileMode(BulkImportParameters params)
|
||||
{
|
||||
String replaceExistingStr = getParamStringValue(PARAMETER_REPLACE_EXISTING);
|
||||
String existingFileModeStr = getParamStringValue(PARAMETER_EXISTING_FILE_MODE);
|
||||
|
||||
if (!isNullOrEmpty(replaceExistingStr) && !isNullOrEmpty(existingFileModeStr))
|
||||
{
|
||||
// Check that we haven't had both the deprecated and new (existingFileMode)
|
||||
// parameters supplied.
|
||||
throw new IllegalStateException(
|
||||
String.format("Only one of these parameters may be used, not both: %s, %s",
|
||||
PARAMETER_REPLACE_EXISTING,
|
||||
PARAMETER_EXISTING_FILE_MODE));
|
||||
}
|
||||
|
||||
if (!isNullOrEmpty(existingFileModeStr))
|
||||
{
|
||||
params.setExistingFileMode(BulkImportParameters.ExistingFileMode.valueOf(existingFileModeStr));
|
||||
}
|
||||
else
|
||||
{
|
||||
params.setReplaceExisting(PARAMETER_VALUE_REPLACE_EXISTING.equals(replaceExistingStr));
|
||||
}
|
||||
}
|
||||
|
||||
private void setDisableRules(final BulkImportParameters params)
|
||||
{
|
||||
final String disableRulesStr = getParamStringValue(PARAMETER_DISABLE_RULES);
|
||||
params.setDisableRulesService(!isNullOrEmpty(disableRulesStr) && PARAMETER_VALUE_DISABLE_RULES.equals(disableRulesStr));
|
||||
}
|
||||
|
||||
private NodeRef getTargetNodeRef() throws FileNotFoundException
|
||||
{
|
||||
String targetNodeRefStr = getParamStringValue(PARAMETER_TARGET_NODEREF);
|
||||
String targetPath = getParamStringValue(PARAMETER_TARGET_PATH);
|
||||
return nodeRefCreator.fromNodeRefAndPath(targetNodeRefStr, targetPath);
|
||||
}
|
||||
|
||||
private OptionalInt getOptionalPositiveInteger(final String paramName)
|
||||
{
|
||||
final String strValue = getParamStringValue(paramName);
|
||||
if (isNullOrEmpty(strValue))
|
||||
{
|
||||
return OptionalInt.empty();
|
||||
}
|
||||
|
||||
final Integer asInt = Ints.tryParse(strValue);
|
||||
if (asInt == null || asInt < 1)
|
||||
{
|
||||
throw new WebScriptException("Error: parameter '" + paramName + "' must be an integer > 0.");
|
||||
}
|
||||
|
||||
return OptionalInt.of(asInt);
|
||||
}
|
||||
|
||||
private String getParamStringValue(String paramName)
|
||||
{
|
||||
Objects.requireNonNull(paramName);
|
||||
|
||||
return paramsProvider.apply(paramName);
|
||||
}
|
||||
|
||||
private boolean isNullOrEmpty(String str)
|
||||
{
|
||||
return str == null || str.trim().length() == 0;
|
||||
}
|
||||
|
||||
@FunctionalInterface
|
||||
protected interface NodeRefCreator
|
||||
{
|
||||
NodeRef fromNodeRefAndPath(String nodeRef, String path) throws FileNotFoundException;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -27,17 +27,12 @@
|
||||
package org.alfresco.repo.web.scripts.bulkimport.copy;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.repo.bulkimport.BulkImportParameters;
|
||||
import org.alfresco.repo.bulkimport.NodeImporter;
|
||||
import org.alfresco.repo.bulkimport.impl.MultiThreadedBulkFilesystemImporter;
|
||||
import org.alfresco.repo.bulkimport.impl.StreamingNodeImporterFactory;
|
||||
import org.alfresco.repo.web.scripts.bulkimport.AbstractBulkFileSystemImportWebScript;
|
||||
import org.alfresco.service.cmr.model.FileNotFoundException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
import org.springframework.extensions.webscripts.Cache;
|
||||
import org.springframework.extensions.webscripts.Status;
|
||||
import org.springframework.extensions.webscripts.WebScriptException;
|
||||
@@ -69,170 +64,22 @@ public class BulkFilesystemImportWebScript extends AbstractBulkFileSystemImportW
|
||||
@Override
|
||||
protected Map<String, Object> executeImpl(final WebScriptRequest request, final Status status, final Cache cache)
|
||||
{
|
||||
Map<String, Object> model = new HashMap<String, Object>();
|
||||
String targetNodeRefStr = null;
|
||||
String targetPath = null;
|
||||
String sourceDirectoryStr = null;
|
||||
@Deprecated String replaceExistingStr = null;
|
||||
String existingFileModeStr = null;
|
||||
String batchSizeStr = null;
|
||||
String numThreadsStr = null;
|
||||
String disableRulesStr = null;
|
||||
final MultithreadedImportWebScriptLogic importLogic = new MultithreadedImportWebScriptLogic(bulkImporter,
|
||||
() -> createNodeImporter(request), request, status, cache);
|
||||
return importLogic.executeImport();
|
||||
}
|
||||
|
||||
cache.setNeverCache(true);
|
||||
|
||||
try
|
||||
private NodeImporter createNodeImporter(WebScriptRequest request)
|
||||
{
|
||||
final String sourceDirectoryStr = request.getParameter(PARAMETER_SOURCE_DIRECTORY);
|
||||
if (sourceDirectoryStr == null || sourceDirectoryStr.trim().length() == 0)
|
||||
{
|
||||
if(!bulkImporter.getStatus().inProgress())
|
||||
{
|
||||
NodeRef targetNodeRef = null;
|
||||
File sourceDirectory = null;
|
||||
boolean replaceExisting = false;
|
||||
BulkImportParameters.ExistingFileMode existingFileMode = null;
|
||||
int batchSize = bulkImporter.getDefaultBatchSize();
|
||||
int numThreads = bulkImporter.getDefaultNumThreads();
|
||||
boolean disableRules = false;
|
||||
|
||||
// Retrieve, validate and convert parameters
|
||||
targetNodeRefStr = request.getParameter(PARAMETER_TARGET_NODEREF);
|
||||
targetPath = request.getParameter(PARAMETER_TARGET_PATH);
|
||||
sourceDirectoryStr = request.getParameter(PARAMETER_SOURCE_DIRECTORY);
|
||||
replaceExistingStr = request.getParameter(PARAMETER_REPLACE_EXISTING);
|
||||
existingFileModeStr = request.getParameter(PARAMETER_EXISTING_FILE_MODE);
|
||||
|
||||
batchSizeStr = request.getParameter(PARAMETER_BATCH_SIZE);
|
||||
numThreadsStr = request.getParameter(PARAMETER_NUM_THREADS);
|
||||
disableRulesStr = request.getParameter(PARAMETER_DISABLE_RULES);
|
||||
|
||||
targetNodeRef = getTargetNodeRef(targetNodeRefStr, targetPath);
|
||||
|
||||
if (sourceDirectoryStr == null || sourceDirectoryStr.trim().length() == 0)
|
||||
{
|
||||
throw new RuntimeException("Error: mandatory parameter '" + PARAMETER_SOURCE_DIRECTORY + "' was not provided.");
|
||||
}
|
||||
|
||||
sourceDirectory = new File(sourceDirectoryStr.trim());
|
||||
|
||||
if (replaceExistingStr != null && existingFileModeStr != null)
|
||||
{
|
||||
// Check that we haven't had both the deprecated and new (existingFileMode)
|
||||
// parameters supplied.
|
||||
throw new IllegalStateException(
|
||||
String.format("Only one of these parameters may be used, not both: %s, %s",
|
||||
PARAMETER_REPLACE_EXISTING,
|
||||
PARAMETER_EXISTING_FILE_MODE));
|
||||
}
|
||||
|
||||
if (replaceExistingStr != null && replaceExistingStr.trim().length() > 0)
|
||||
{
|
||||
replaceExisting = PARAMETER_VALUE_REPLACE_EXISTING.equals(replaceExistingStr);
|
||||
}
|
||||
|
||||
if (existingFileModeStr != null && existingFileModeStr.trim().length() > 0)
|
||||
{
|
||||
existingFileMode = BulkImportParameters.ExistingFileMode.valueOf(existingFileModeStr);
|
||||
}
|
||||
|
||||
if (disableRulesStr != null && disableRulesStr.trim().length() > 0)
|
||||
{
|
||||
disableRules = PARAMETER_VALUE_DISABLE_RULES.equals(disableRulesStr);
|
||||
}
|
||||
|
||||
// Initiate the import
|
||||
NodeImporter nodeImporter = nodeImporterFactory.getNodeImporter(sourceDirectory);
|
||||
BulkImportParameters bulkImportParameters = new BulkImportParameters();
|
||||
|
||||
if (numThreadsStr != null && numThreadsStr.trim().length() > 0)
|
||||
{
|
||||
try
|
||||
{
|
||||
numThreads = Integer.parseInt(numThreadsStr);
|
||||
if(numThreads < 1)
|
||||
{
|
||||
throw new RuntimeException("Error: parameter '" + PARAMETER_NUM_THREADS + "' must be an integer > 0.");
|
||||
}
|
||||
bulkImportParameters.setNumThreads(numThreads);
|
||||
}
|
||||
catch(NumberFormatException e)
|
||||
{
|
||||
throw new RuntimeException("Error: parameter '" + PARAMETER_NUM_THREADS + "' must be an integer > 0.");
|
||||
}
|
||||
}
|
||||
|
||||
if (batchSizeStr != null && batchSizeStr.trim().length() > 0)
|
||||
{
|
||||
try
|
||||
{
|
||||
batchSize = Integer.parseInt(batchSizeStr);
|
||||
if(batchSize < 1)
|
||||
{
|
||||
throw new RuntimeException("Error: parameter '" + PARAMETER_BATCH_SIZE + "' must be an integer > 0.");
|
||||
}
|
||||
bulkImportParameters.setBatchSize(batchSize);
|
||||
}
|
||||
catch(NumberFormatException e)
|
||||
{
|
||||
throw new RuntimeException("Error: parameter '" + PARAMETER_BATCH_SIZE + "' must be an integer > 0.");
|
||||
}
|
||||
}
|
||||
|
||||
if (existingFileMode != null)
|
||||
{
|
||||
bulkImportParameters.setExistingFileMode(existingFileMode);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Fall back to the old/deprecated way.
|
||||
bulkImportParameters.setReplaceExisting(replaceExisting);
|
||||
}
|
||||
|
||||
bulkImportParameters.setTarget(targetNodeRef);
|
||||
bulkImportParameters.setDisableRulesService(disableRules);
|
||||
|
||||
bulkImporter.asyncBulkImport(bulkImportParameters, nodeImporter);
|
||||
|
||||
// ACE-3047 fix, since bulk import is started asynchronously there is a chance that client
|
||||
// will get into the status page before import is actually started.
|
||||
// In this case wrong information (for previous import) will be displayed.
|
||||
// So lets ensure that import started before redirecting client to status page.
|
||||
int i = 0;
|
||||
while (!bulkImporter.getStatus().inProgress() && i < 10)
|
||||
{
|
||||
Thread.sleep(100);
|
||||
i++;
|
||||
}
|
||||
|
||||
// redirect to the status Web Script
|
||||
status.setCode(Status.STATUS_MOVED_TEMPORARILY);
|
||||
status.setRedirect(true);
|
||||
status.setLocation(request.getServiceContextPath() + WEB_SCRIPT_URI_BULK_FILESYSTEM_IMPORT_STATUS);
|
||||
}
|
||||
else
|
||||
{
|
||||
model.put(IMPORT_ALREADY_IN_PROGRESS_MODEL_KEY, I18NUtil.getMessage(IMPORT_ALREADY_IN_PROGRESS_ERROR_KEY));
|
||||
}
|
||||
throw new WebScriptException("Error: mandatory parameter '" + PARAMETER_SOURCE_DIRECTORY + "' was not provided.");
|
||||
}
|
||||
catch (WebScriptException wse)
|
||||
{
|
||||
status.setCode(Status.STATUS_BAD_REQUEST, wse.getMessage());
|
||||
status.setRedirect(true);
|
||||
}
|
||||
catch (FileNotFoundException fnfe)
|
||||
{
|
||||
status.setCode(Status.STATUS_BAD_REQUEST,"The repository path '" + targetPath + "' does not exist !");
|
||||
status.setRedirect(true);
|
||||
}
|
||||
catch(IllegalArgumentException iae)
|
||||
{
|
||||
status.setCode(Status.STATUS_BAD_REQUEST,iae.getMessage());
|
||||
status.setRedirect(true);
|
||||
}
|
||||
catch (Throwable t)
|
||||
{
|
||||
throw new WebScriptException(Status.STATUS_INTERNAL_SERVER_ERROR, buildTextMessage(t), t);
|
||||
}
|
||||
|
||||
return model;
|
||||
|
||||
final File sourceDirectory = new File(sourceDirectoryStr.trim());
|
||||
|
||||
return nodeImporterFactory.getNodeImporter(sourceDirectory);
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -23,149 +23,144 @@
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.web.scripts.model.filefolder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
||||
import org.alfresco.repo.model.filefolder.FileFolderLoader;
|
||||
import org.alfresco.service.cmr.model.FileNotFoundException;
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
import org.json.JSONTokener;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.extensions.webscripts.AbstractWebScript;
|
||||
import org.springframework.extensions.webscripts.Status;
|
||||
import org.springframework.extensions.webscripts.WebScriptException;
|
||||
import org.springframework.extensions.webscripts.WebScriptRequest;
|
||||
import org.springframework.extensions.webscripts.WebScriptResponse;
|
||||
|
||||
/**
|
||||
* Link to {@link FileFolderLoader}
|
||||
*/
|
||||
public class FileFolderLoaderPost extends AbstractWebScript implements ApplicationContextAware
|
||||
{
|
||||
public static final String KEY_FOLDER_PATH = "folderPath";
|
||||
public static final String KEY_FILE_COUNT = "fileCount";
|
||||
public static final String KEY_FILES_PER_TXN = "filesPerTxn";
|
||||
public static final String KEY_MIN_FILE_SIZE = "minFileSize";
|
||||
public static final String KEY_MAX_FILE_SIZE = "maxFileSize";
|
||||
public static final String KEY_MAX_UNIQUE_DOCUMENTS = "maxUniqueDocuments";
|
||||
public static final String KEY_FORCE_BINARY_STORAGE = "forceBinaryStorage";
|
||||
public static final String KEY_DESCRIPTION_COUNT = "descriptionCount";
|
||||
public static final String KEY_DESCRIPTION_SIZE = "descriptionSize";
|
||||
public static final String KEY_COUNT = "count";
|
||||
|
||||
public static final int DEFAULT_FILE_COUNT = 100;
|
||||
public static final int DEFAULT_FILES_PER_TXN = 100;
|
||||
public static final long DEFAULT_MIN_FILE_SIZE = 80*1024L;
|
||||
public static final long DEFAULT_MAX_FILE_SIZE = 120*1024L;
|
||||
public static final long DEFAULT_MAX_UNIQUE_DOCUMENTS = Long.MAX_VALUE;
|
||||
public static final int DEFAULT_DESCRIPTION_COUNT = 1;
|
||||
public static final long DEFAULT_DESCRIPTION_SIZE = 128L;
|
||||
public static final boolean DEFAULT_FORCE_BINARY_STORAGE = false;
|
||||
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException
|
||||
{
|
||||
this.applicationContext = applicationContext;
|
||||
}
|
||||
|
||||
public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException
|
||||
{
|
||||
FileFolderLoader loader = (FileFolderLoader) applicationContext.getBean("fileFolderLoader");
|
||||
|
||||
int count = 0;
|
||||
String folderPath = "";
|
||||
try
|
||||
{
|
||||
JSONObject json = new JSONObject(new JSONTokener(req.getContent().getContent()));
|
||||
folderPath = json.getString(KEY_FOLDER_PATH);
|
||||
if (folderPath == null)
|
||||
{
|
||||
throw new WebScriptException(Status.STATUS_BAD_REQUEST, KEY_FOLDER_PATH + " not supplied.");
|
||||
}
|
||||
int fileCount = 100;
|
||||
if (json.has(KEY_FILE_COUNT))
|
||||
{
|
||||
fileCount = json.getInt(KEY_FILE_COUNT);
|
||||
}
|
||||
int filesPerTxn = DEFAULT_FILES_PER_TXN;
|
||||
if (json.has(KEY_FILES_PER_TXN))
|
||||
{
|
||||
filesPerTxn = json.getInt(KEY_FILES_PER_TXN);
|
||||
}
|
||||
long minFileSize = DEFAULT_MIN_FILE_SIZE;
|
||||
if (json.has(KEY_MIN_FILE_SIZE))
|
||||
{
|
||||
minFileSize = json.getInt(KEY_MIN_FILE_SIZE);
|
||||
}
|
||||
long maxFileSize = DEFAULT_MAX_FILE_SIZE;
|
||||
if (json.has(KEY_MAX_FILE_SIZE))
|
||||
{
|
||||
maxFileSize = json.getInt(KEY_MAX_FILE_SIZE);
|
||||
}
|
||||
long maxUniqueDocuments = DEFAULT_MAX_UNIQUE_DOCUMENTS;
|
||||
if (json.has(KEY_MAX_UNIQUE_DOCUMENTS))
|
||||
{
|
||||
maxUniqueDocuments = json.getInt(KEY_MAX_UNIQUE_DOCUMENTS);
|
||||
}
|
||||
boolean forceBinaryStorage = DEFAULT_FORCE_BINARY_STORAGE;
|
||||
if (json.has(KEY_FORCE_BINARY_STORAGE))
|
||||
{
|
||||
forceBinaryStorage = json.getBoolean(KEY_FORCE_BINARY_STORAGE);
|
||||
}
|
||||
int descriptionCount = DEFAULT_DESCRIPTION_COUNT;
|
||||
if (json.has(KEY_DESCRIPTION_COUNT))
|
||||
{
|
||||
descriptionCount = json.getInt(KEY_DESCRIPTION_COUNT);
|
||||
}
|
||||
long descriptionSize = DEFAULT_DESCRIPTION_SIZE;
|
||||
if (json.has(KEY_DESCRIPTION_SIZE))
|
||||
{
|
||||
descriptionSize = json.getLong(KEY_DESCRIPTION_SIZE);
|
||||
}
|
||||
|
||||
// Perform the load
|
||||
count = loader.createFiles(
|
||||
folderPath,
|
||||
fileCount, filesPerTxn,
|
||||
minFileSize, maxFileSize,
|
||||
maxUniqueDocuments,
|
||||
forceBinaryStorage,
|
||||
descriptionCount, descriptionSize);
|
||||
}
|
||||
catch (FileNotFoundException e)
|
||||
{
|
||||
throw new WebScriptException(Status.STATUS_NOT_FOUND, "Folder not found: ", folderPath);
|
||||
}
|
||||
catch (IOException iox)
|
||||
{
|
||||
throw new WebScriptException(Status.STATUS_BAD_REQUEST, "Could not read content from req.", iox);
|
||||
}
|
||||
catch (JSONException je)
|
||||
{
|
||||
throw new WebScriptException(Status.STATUS_BAD_REQUEST, "Could not parse JSON from req.", je);
|
||||
}
|
||||
// Write the response
|
||||
OutputStream os = res.getOutputStream();
|
||||
try
|
||||
{
|
||||
JSONObject json = new JSONObject();
|
||||
json.put(KEY_COUNT, count);
|
||||
os.write(json.toString().getBytes("UTF-8"));
|
||||
}
|
||||
catch (JSONException e)
|
||||
{
|
||||
throw new WebScriptException(Status.STATUS_INTERNAL_SERVER_ERROR, "Failed to write JSON", e);
|
||||
}
|
||||
finally
|
||||
{
|
||||
os.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
package org.alfresco.repo.web.scripts.model.filefolder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
||||
import org.alfresco.repo.model.filefolder.FileFolderLoader;
|
||||
import org.alfresco.service.cmr.model.FileNotFoundException;
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
import org.json.JSONTokener;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.extensions.webscripts.AbstractWebScript;
|
||||
import org.springframework.extensions.webscripts.Status;
|
||||
import org.springframework.extensions.webscripts.WebScriptException;
|
||||
import org.springframework.extensions.webscripts.WebScriptRequest;
|
||||
import org.springframework.extensions.webscripts.WebScriptResponse;
|
||||
|
||||
/**
|
||||
* Link to {@link FileFolderLoader}
|
||||
*/
|
||||
public class FileFolderLoaderPost extends AbstractWebScript implements ApplicationContextAware
|
||||
{
|
||||
public static final String KEY_FOLDER_PATH = "folderPath";
|
||||
public static final String KEY_FILE_COUNT = "fileCount";
|
||||
public static final String KEY_FILES_PER_TXN = "filesPerTxn";
|
||||
public static final String KEY_MIN_FILE_SIZE = "minFileSize";
|
||||
public static final String KEY_MAX_FILE_SIZE = "maxFileSize";
|
||||
public static final String KEY_MAX_UNIQUE_DOCUMENTS = "maxUniqueDocuments";
|
||||
public static final String KEY_FORCE_BINARY_STORAGE = "forceBinaryStorage";
|
||||
public static final String KEY_DESCRIPTION_COUNT = "descriptionCount";
|
||||
public static final String KEY_DESCRIPTION_SIZE = "descriptionSize";
|
||||
public static final String KEY_COUNT = "count";
|
||||
|
||||
public static final int DEFAULT_FILE_COUNT = 100;
|
||||
public static final int DEFAULT_FILES_PER_TXN = 100;
|
||||
public static final long DEFAULT_MIN_FILE_SIZE = 80*1024L;
|
||||
public static final long DEFAULT_MAX_FILE_SIZE = 120*1024L;
|
||||
public static final long DEFAULT_MAX_UNIQUE_DOCUMENTS = Long.MAX_VALUE;
|
||||
public static final int DEFAULT_DESCRIPTION_COUNT = 1;
|
||||
public static final long DEFAULT_DESCRIPTION_SIZE = 128L;
|
||||
public static final boolean DEFAULT_FORCE_BINARY_STORAGE = false;
|
||||
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException
|
||||
{
|
||||
this.applicationContext = applicationContext;
|
||||
}
|
||||
|
||||
public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException
|
||||
{
|
||||
FileFolderLoader loader = (FileFolderLoader) applicationContext.getBean("fileFolderLoader");
|
||||
|
||||
int count = 0;
|
||||
String folderPath = "";
|
||||
try
|
||||
{
|
||||
JSONObject json = new JSONObject(new JSONTokener(req.getContent().getContent()));
|
||||
folderPath = json.getString(KEY_FOLDER_PATH);
|
||||
if (folderPath == null)
|
||||
{
|
||||
throw new WebScriptException(Status.STATUS_BAD_REQUEST, KEY_FOLDER_PATH + " not supplied.");
|
||||
}
|
||||
int fileCount = 100;
|
||||
if (json.has(KEY_FILE_COUNT))
|
||||
{
|
||||
fileCount = json.getInt(KEY_FILE_COUNT);
|
||||
}
|
||||
int filesPerTxn = DEFAULT_FILES_PER_TXN;
|
||||
if (json.has(KEY_FILES_PER_TXN))
|
||||
{
|
||||
filesPerTxn = json.getInt(KEY_FILES_PER_TXN);
|
||||
}
|
||||
long minFileSize = DEFAULT_MIN_FILE_SIZE;
|
||||
if (json.has(KEY_MIN_FILE_SIZE))
|
||||
{
|
||||
minFileSize = json.getInt(KEY_MIN_FILE_SIZE);
|
||||
}
|
||||
long maxFileSize = DEFAULT_MAX_FILE_SIZE;
|
||||
if (json.has(KEY_MAX_FILE_SIZE))
|
||||
{
|
||||
maxFileSize = json.getInt(KEY_MAX_FILE_SIZE);
|
||||
}
|
||||
long maxUniqueDocuments = DEFAULT_MAX_UNIQUE_DOCUMENTS;
|
||||
if (json.has(KEY_MAX_UNIQUE_DOCUMENTS))
|
||||
{
|
||||
maxUniqueDocuments = json.getInt(KEY_MAX_UNIQUE_DOCUMENTS);
|
||||
}
|
||||
boolean forceBinaryStorage = DEFAULT_FORCE_BINARY_STORAGE;
|
||||
if (json.has(KEY_FORCE_BINARY_STORAGE))
|
||||
{
|
||||
forceBinaryStorage = json.getBoolean(KEY_FORCE_BINARY_STORAGE);
|
||||
}
|
||||
int descriptionCount = DEFAULT_DESCRIPTION_COUNT;
|
||||
if (json.has(KEY_DESCRIPTION_COUNT))
|
||||
{
|
||||
descriptionCount = json.getInt(KEY_DESCRIPTION_COUNT);
|
||||
}
|
||||
long descriptionSize = DEFAULT_DESCRIPTION_SIZE;
|
||||
if (json.has(KEY_DESCRIPTION_SIZE))
|
||||
{
|
||||
descriptionSize = json.getLong(KEY_DESCRIPTION_SIZE);
|
||||
}
|
||||
|
||||
// Perform the load
|
||||
count = loader.createFiles(
|
||||
folderPath,
|
||||
fileCount, filesPerTxn,
|
||||
minFileSize, maxFileSize,
|
||||
maxUniqueDocuments,
|
||||
forceBinaryStorage,
|
||||
descriptionCount, descriptionSize);
|
||||
}
|
||||
catch (FileNotFoundException e)
|
||||
{
|
||||
throw new WebScriptException(Status.STATUS_NOT_FOUND, "Folder not found: ", folderPath);
|
||||
}
|
||||
catch (IOException iox)
|
||||
{
|
||||
throw new WebScriptException(Status.STATUS_BAD_REQUEST, "Could not read content from req.", iox);
|
||||
}
|
||||
catch (JSONException je)
|
||||
{
|
||||
throw new WebScriptException(Status.STATUS_BAD_REQUEST, "Could not parse JSON from req.", je);
|
||||
}
|
||||
// Write the response
|
||||
try (OutputStream os = res.getOutputStream())
|
||||
{
|
||||
JSONObject json = new JSONObject();
|
||||
json.put(KEY_COUNT, count);
|
||||
os.write(json.toString().getBytes("UTF-8"));
|
||||
}
|
||||
catch (JSONException e)
|
||||
{
|
||||
throw new WebScriptException(Status.STATUS_INTERNAL_SERVER_ERROR, "Failed to write JSON", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -125,16 +125,15 @@ public class PostSnapshotCommandProcessor implements CommandProcessor
|
||||
|
||||
logger.debug("success");
|
||||
resp.setStatus(Status.STATUS_OK);
|
||||
|
||||
OutputStream out = resp.getOutputStream();
|
||||
resp.setContentType("text/xml");
|
||||
resp.setContentEncoding("utf-8");
|
||||
|
||||
receiver.generateRequsite(transferId, out);
|
||||
|
||||
out.close();
|
||||
|
||||
}
|
||||
|
||||
try (OutputStream out = resp.getOutputStream())
|
||||
{
|
||||
resp.setContentType("text/xml");
|
||||
resp.setContentEncoding("utf-8");
|
||||
|
||||
receiver.generateRequsite(transferId, out);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.debug("exception caught", ex);
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -34,6 +34,7 @@ import org.alfresco.rest.api.model.UserInfo;
|
||||
import org.alfresco.rest.framework.resource.content.BinaryResource;
|
||||
import org.alfresco.rest.framework.resource.parameters.CollectionWithPagingInfo;
|
||||
import org.alfresco.rest.framework.resource.parameters.Parameters;
|
||||
import org.alfresco.service.cmr.repository.DirectAccessUrl;
|
||||
|
||||
/**
|
||||
* Handles trashcan / deleted nodes
|
||||
@@ -99,4 +100,29 @@ public interface DeletedNodes
|
||||
* @return
|
||||
*/
|
||||
CollectionWithPagingInfo<Rendition> getRenditions(String archivedId, Parameters parameters);
|
||||
|
||||
/**
|
||||
* Gets a presigned URL to directly access content.
|
||||
*
|
||||
* @param archivedId The node id for which to obtain the direct access {@code URL}
|
||||
* @param renditionId The rendition id for which to obtain the direct access {@code URL}
|
||||
* @param attachment {@code true} if an attachment {@code URL} is requested, {@code false} for an embedded {@code URL}, {@code true} by default.
|
||||
* @return A direct access {@code URL} object for the content.
|
||||
*/
|
||||
default DirectAccessUrl requestContentDirectUrl(String archivedId, String renditionId, boolean attachment)
|
||||
{
|
||||
return requestContentDirectUrl(archivedId, renditionId, attachment, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a presigned URL to directly access content.
|
||||
*
|
||||
* @param archivedId The node id for which to obtain the direct access {@code URL}
|
||||
* @param renditionId The rendition id for which to obtain the direct access {@code URL}
|
||||
* @param attachment {@code true} if an attachment {@code URL} is requested, {@code false} for an embedded {@code URL}, {@code true} by default.
|
||||
* @param validFor The time at which the direct access {@code URL} will expire.
|
||||
* @return A direct access {@code URL} object for the content.
|
||||
*/
|
||||
DirectAccessUrl requestContentDirectUrl(String archivedId, String renditionId, boolean attachment, Long validFor);
|
||||
|
||||
}
|
||||
|
@@ -0,0 +1,66 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.rest.api;
|
||||
|
||||
import org.alfresco.rest.api.impl.directurl.RestApiDirectUrlConfig;
|
||||
import org.alfresco.rest.api.model.DirectAccessUrlRequest;
|
||||
import org.alfresco.rest.framework.core.exceptions.DisabledServiceException;
|
||||
import org.apache.commons.lang3.BooleanUtils;
|
||||
|
||||
/**
|
||||
* Helper class for retrieving direct access URLs options.
|
||||
*
|
||||
* @author Sara Aspery
|
||||
*/
|
||||
public class DirectAccessUrlHelper
|
||||
{
|
||||
private RestApiDirectUrlConfig restApiDirectUrlConfig;
|
||||
|
||||
public void setRestApiDirectUrlConfig(RestApiDirectUrlConfig restApiDirectUrlConfig)
|
||||
{
|
||||
this.restApiDirectUrlConfig = restApiDirectUrlConfig;
|
||||
}
|
||||
|
||||
public Long getDefaultExpiryTimeInSec()
|
||||
{
|
||||
if (restApiDirectUrlConfig ==null || !restApiDirectUrlConfig.isEnabled())
|
||||
{
|
||||
throw new DisabledServiceException("Direct access url isn't available.");
|
||||
}
|
||||
|
||||
return restApiDirectUrlConfig.getDefaultExpiryTimeInSec();
|
||||
}
|
||||
|
||||
public boolean getAttachment(DirectAccessUrlRequest directAccessUrlRequest)
|
||||
{
|
||||
boolean attachment = true;
|
||||
if (directAccessUrlRequest != null )
|
||||
{
|
||||
attachment = BooleanUtils.toBooleanDefaultIfNull(directAccessUrlRequest.isAttachment(), true);
|
||||
}
|
||||
return attachment;
|
||||
}
|
||||
}
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2017 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -44,6 +44,7 @@ import org.alfresco.rest.framework.resource.content.BinaryResource;
|
||||
import org.alfresco.rest.framework.resource.parameters.CollectionWithPagingInfo;
|
||||
import org.alfresco.rest.framework.resource.parameters.Parameters;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.DirectAccessUrl;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
@@ -266,6 +267,49 @@ public interface Nodes
|
||||
*/
|
||||
Node unlock(String nodeId, Parameters parameters);
|
||||
|
||||
/**
|
||||
* Gets a presigned URL to directly access content.
|
||||
* @param nodeId The node id for which to obtain the direct access {@code URL}
|
||||
* @param attachment {@code true} if an attachment {@code URL} is requested, {@code false} for an embedded {@code URL}.
|
||||
* @return A direct access {@code URL} object for the content.
|
||||
*/
|
||||
default DirectAccessUrl requestContentDirectUrl(String nodeId, boolean attachment)
|
||||
{
|
||||
return requestContentDirectUrl(validateNode(nodeId), attachment);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a presigned URL to directly access content.
|
||||
* @param nodeRef The node reference for which to obtain the direct access {@code URL}
|
||||
* @param attachment {@code true} if an attachment {@code URL} is requested, {@code false} for an embedded {@code URL}.
|
||||
* @return A direct access {@code URL} object for the content.
|
||||
*/
|
||||
default DirectAccessUrl requestContentDirectUrl(NodeRef nodeRef, boolean attachment)
|
||||
{
|
||||
return requestContentDirectUrl(nodeRef, attachment, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a presigned URL to directly access content.
|
||||
* @param nodeId The node id for which to obtain the direct access {@code URL}
|
||||
* @param attachment {@code true} if an attachment {@code URL} is requested, {@code false} for an embedded {@code URL}.
|
||||
* @param validFor The time at which the direct access {@code URL} will expire.
|
||||
* @return A direct access {@code URL} object for the content.
|
||||
*/
|
||||
default DirectAccessUrl requestContentDirectUrl(String nodeId, boolean attachment, Long validFor)
|
||||
{
|
||||
return requestContentDirectUrl(validateNode(nodeId), attachment, validFor);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a presigned URL to directly access content.
|
||||
* @param nodeRef The node reference for which to obtain the direct access {@code URL}
|
||||
* @param attachment {@code true} if an attachment {@code URL} is requested, {@code false} for an embedded {@code URL}.
|
||||
* @param validFor The time at which the direct access {@code URL} will expire.
|
||||
* @return A direct access {@code URL} object for the content.
|
||||
*/
|
||||
DirectAccessUrl requestContentDirectUrl(NodeRef nodeRef, boolean attachment, Long validFor);
|
||||
|
||||
/**
|
||||
* Convert from node properties (map of QName to Serializable) retrieved from
|
||||
* the respository to a map of String to Object that can be formatted/expressed
|
||||
@@ -375,7 +419,6 @@ public interface Nodes
|
||||
|
||||
String PARAM_INCLUDE_ASSOCIATION = "association";
|
||||
String PARAM_INCLUDE_DEFINITION = "definition";
|
||||
String PARAM_INCLUDE_STORAGECLASSES = "storageClasses";
|
||||
|
||||
String PARAM_ISFOLDER = "isFolder";
|
||||
String PARAM_ISFILE = "isFile";
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -32,7 +32,9 @@ import org.alfresco.rest.framework.core.exceptions.NotFoundException;
|
||||
import org.alfresco.rest.framework.resource.content.BinaryResource;
|
||||
import org.alfresco.rest.framework.resource.parameters.CollectionWithPagingInfo;
|
||||
import org.alfresco.rest.framework.resource.parameters.Parameters;
|
||||
import org.alfresco.service.cmr.repository.DirectAccessUrl;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@@ -186,5 +188,58 @@ public interface Renditions
|
||||
* @return the rendition stream
|
||||
*/
|
||||
BinaryResource getContentNoValidation(NodeRef nodeRef, String versionId, String renditionId, Parameters parameters);
|
||||
|
||||
/**
|
||||
* Gets a presigned URL to directly access content.
|
||||
* @param nodeId the node id for which to obtain the direct access {@code URL}
|
||||
* @param versionId the version id (aka version label)
|
||||
* @param renditionId the rendition id
|
||||
* @param attachment {@code true} if an attachment {@code URL} is requested, {@code false} for an embedded {@code URL}
|
||||
* @return a direct access {@code URL} object for the content
|
||||
*/
|
||||
default DirectAccessUrl requestContentDirectUrl(String nodeId, String versionId, String renditionId, boolean attachment)
|
||||
{
|
||||
NodeRef nodeRef = new NodeRef(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE, nodeId);
|
||||
return requestContentDirectUrl(nodeRef, versionId, renditionId, attachment);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a presigned URL to directly access content.
|
||||
* @param nodeId the node id for which to obtain the direct access {@code URL}
|
||||
* @param versionId the version id (aka version label)
|
||||
* @param renditionId the rendition id
|
||||
* @param attachment {@code true} if an attachment {@code URL} is requested, {@code false} for an embedded {@code URL}
|
||||
* @param validFor the time at which the direct access {@code URL} will expire
|
||||
* @return a direct access {@code URL} object for the content
|
||||
*/
|
||||
default DirectAccessUrl requestContentDirectUrl(String nodeId, String versionId, String renditionId, boolean attachment, Long validFor)
|
||||
{
|
||||
NodeRef nodeRef = new NodeRef(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE, nodeId);
|
||||
return requestContentDirectUrl(nodeRef, versionId, renditionId, attachment, validFor);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a presigned URL to directly access content.
|
||||
* @param nodeRef the node reference for which to obtain the direct access {@code URL}
|
||||
* @param versionId the version id (aka version label)
|
||||
* @param renditionId the rendition id
|
||||
* @param attachment {@code true} if an attachment {@code URL} is requested, {@code false} for an embedded {@code URL}
|
||||
* @return a direct access {@code URL} object for the content.
|
||||
*/
|
||||
default DirectAccessUrl requestContentDirectUrl(NodeRef nodeRef, String versionId, String renditionId, boolean attachment)
|
||||
{
|
||||
return requestContentDirectUrl(nodeRef, versionId, renditionId, attachment, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a presigned URL to directly access content.
|
||||
* @param nodeRef the node reference for which to obtain the direct access {@code URL}
|
||||
* @param versionId the version id (aka version label)
|
||||
* @param renditionId the rendition id
|
||||
* @param attachment {@code true} if an attachment {@code URL} is requested, {@code false} for an embedded {@code URL}
|
||||
* @param validFor the time at which the direct access {@code URL} will expire
|
||||
* @return a direct access {@code URL} object for the content.
|
||||
*/
|
||||
DirectAccessUrl requestContentDirectUrl(NodeRef nodeRef, String versionId, String renditionId, boolean attachment, Long validFor);
|
||||
}
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -25,6 +25,7 @@
|
||||
*/
|
||||
package org.alfresco.rest.api.discovery;
|
||||
|
||||
import org.alfresco.rest.api.impl.directurl.RestApiDirectUrlConfig;
|
||||
import org.alfresco.rest.api.model.DiscoveryDetails;
|
||||
import org.alfresco.rest.api.model.ModulePackage;
|
||||
import org.alfresco.rest.api.model.RepositoryInfo;
|
||||
@@ -41,6 +42,7 @@ import org.alfresco.service.cmr.audit.AuditService;
|
||||
import org.alfresco.service.cmr.module.ModuleDetails;
|
||||
import org.alfresco.service.cmr.module.ModuleService;
|
||||
import org.alfresco.service.cmr.quickshare.QuickShareService;
|
||||
import org.alfresco.service.cmr.repository.ContentService;
|
||||
import org.alfresco.service.cmr.thumbnail.ThumbnailService;
|
||||
import org.alfresco.service.descriptor.Descriptor;
|
||||
import org.alfresco.service.descriptor.DescriptorService;
|
||||
@@ -67,6 +69,8 @@ public class DiscoveryApiWebscript extends AbstractWebScript implements Recogniz
|
||||
private ModuleService moduleService;
|
||||
private ApiAssistant assistant;
|
||||
private ThumbnailService thumbnailService;
|
||||
private RestApiDirectUrlConfig restApiDirectUrlConfig;
|
||||
private ContentService contentService;
|
||||
|
||||
private boolean enabled = true;
|
||||
private final static String DISABLED = "Not Implemented";
|
||||
@@ -106,6 +110,16 @@ public class DiscoveryApiWebscript extends AbstractWebScript implements Recogniz
|
||||
this.thumbnailService = thumbnailService;
|
||||
}
|
||||
|
||||
public void setRestApiDirectUrlConfig(RestApiDirectUrlConfig restApiDirectUrlConfig)
|
||||
{
|
||||
this.restApiDirectUrlConfig = restApiDirectUrlConfig;
|
||||
}
|
||||
|
||||
public void setContentService(ContentService contentService)
|
||||
{
|
||||
this.contentService = contentService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception
|
||||
{
|
||||
@@ -116,6 +130,8 @@ public class DiscoveryApiWebscript extends AbstractWebScript implements Recogniz
|
||||
PropertyCheck.mandatory(this, "moduleService", moduleService);
|
||||
PropertyCheck.mandatory(this, "assistant", assistant);
|
||||
PropertyCheck.mandatory(this, "thumbnailService", thumbnailService);
|
||||
PropertyCheck.mandatory(this, "restApiDirectUrlConfig", restApiDirectUrlConfig);
|
||||
PropertyCheck.mandatory(this, "contentService", contentService);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -154,7 +170,8 @@ public class DiscoveryApiWebscript extends AbstractWebScript implements Recogniz
|
||||
.setReadOnly(repoAdminService.getUsage().isReadOnly())
|
||||
.setAuditEnabled(auditService.isAuditEnabled())
|
||||
.setQuickShareEnabled(quickShareService.isQuickShareEnabled())
|
||||
.setThumbnailGenerationEnabled(thumbnailService.getThumbnailsEnabled()));
|
||||
.setThumbnailGenerationEnabled(thumbnailService.getThumbnailsEnabled())
|
||||
.setDirectAccessUrlEnabled(isContentDirectUrlEnabled()));
|
||||
}
|
||||
|
||||
private List<ModulePackage> getModules()
|
||||
@@ -194,4 +211,10 @@ public class DiscoveryApiWebscript extends AbstractWebScript implements Recogniz
|
||||
throw new DisabledServiceException(DISABLED);
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean isContentDirectUrlEnabled()
|
||||
{
|
||||
return (restApiDirectUrlConfig.isEnabled() && contentService.isContentDirectUrlEnabled());
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,56 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
package org.alfresco.rest.api.impl;
|
||||
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.alfresco.rest.api.ContentStorageClasses;
|
||||
import org.alfresco.rest.api.model.StorageClass;
|
||||
import org.alfresco.rest.framework.resource.parameters.CollectionWithPagingInfo;
|
||||
import org.alfresco.rest.framework.resource.parameters.Paging;
|
||||
import org.alfresco.service.cmr.repository.ContentService;
|
||||
|
||||
/**
|
||||
* Centralises access to storage classes functionality
|
||||
*/
|
||||
public class ContentStorageClassesImpl implements ContentStorageClasses
|
||||
{
|
||||
private ContentService contentService;
|
||||
|
||||
public void setContentService(ContentService contentService)
|
||||
{
|
||||
this.contentService = contentService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionWithPagingInfo<StorageClass> getStorageClasses(Paging paging)
|
||||
{
|
||||
Set<String> storageClasses = contentService.getSupportedStorageClasses();
|
||||
return CollectionWithPagingInfo.asPaged(paging, storageClasses.stream().map(StorageClass::new).collect(Collectors.toList()));
|
||||
}
|
||||
}
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -55,6 +55,7 @@ import org.alfresco.rest.framework.resource.content.BinaryResource;
|
||||
import org.alfresco.rest.framework.resource.parameters.CollectionWithPagingInfo;
|
||||
import org.alfresco.rest.framework.resource.parameters.Parameters;
|
||||
import org.alfresco.rest.framework.tools.RecognizedParamsExtractor;
|
||||
import org.alfresco.service.cmr.repository.DirectAccessUrl;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
@@ -244,4 +245,23 @@ public class DeletedNodesImpl implements DeletedNodes, RecognizedParamsExtractor
|
||||
NodeRef nodeRef = new NodeRef(StoreRef.STORE_REF_ARCHIVE_SPACESSTORE, archivedId);
|
||||
return renditions.getRenditions(nodeRef, parameters);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public DirectAccessUrl requestContentDirectUrl(String originalNodeId, String renditionId, boolean attachment, Long validFor)
|
||||
{
|
||||
//First check the node is valid and has been archived.
|
||||
NodeRef validatedNodeRef = nodes.validateNode(StoreRef.STORE_REF_ARCHIVE_SPACESSTORE, originalNodeId);
|
||||
|
||||
if (renditionId != null)
|
||||
{
|
||||
return renditions.requestContentDirectUrl(validatedNodeRef, null, renditionId, attachment, validFor);
|
||||
}
|
||||
else
|
||||
{
|
||||
return nodes.requestContentDirectUrl(validatedNodeRef, attachment, validFor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2019 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -56,8 +56,6 @@ import org.alfresco.repo.action.executer.ContentMetadataExtracter;
|
||||
import org.alfresco.repo.activities.ActivityType;
|
||||
import org.alfresco.repo.content.ContentLimitViolationException;
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.repo.content.StorageClassSet;
|
||||
import org.alfresco.repo.content.UnsupportedStorageClassException;
|
||||
import org.alfresco.repo.domain.node.AuditablePropertiesEntity;
|
||||
import org.alfresco.repo.lock.mem.Lifetime;
|
||||
import org.alfresco.repo.model.Repository;
|
||||
@@ -107,7 +105,6 @@ import org.alfresco.rest.framework.core.exceptions.RequestEntityTooLargeExceptio
|
||||
import org.alfresco.rest.framework.core.exceptions.UnsupportedMediaTypeException;
|
||||
import org.alfresco.rest.framework.resource.content.BasicContentInfo;
|
||||
import org.alfresco.rest.framework.resource.content.BinaryResource;
|
||||
import org.alfresco.rest.framework.resource.content.ContentInfo;
|
||||
import org.alfresco.rest.framework.resource.content.ContentInfoImpl;
|
||||
import org.alfresco.rest.framework.resource.content.NodeBinaryResource;
|
||||
import org.alfresco.rest.framework.resource.parameters.CollectionWithPagingInfo;
|
||||
@@ -142,6 +139,7 @@ import org.alfresco.service.cmr.repository.ContentData;
|
||||
import org.alfresco.service.cmr.repository.ContentIOException;
|
||||
import org.alfresco.service.cmr.repository.ContentService;
|
||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||
import org.alfresco.service.cmr.repository.DirectAccessUrl;
|
||||
import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException;
|
||||
import org.alfresco.service.cmr.repository.InvalidNodeRefException;
|
||||
import org.alfresco.service.cmr.repository.MimetypeService;
|
||||
@@ -1051,12 +1049,6 @@ public class NodesImpl implements Nodes
|
||||
node.setNodeType(nodeTypeQName.toPrefixString(namespaceService));
|
||||
node.setPath(pathInfo);
|
||||
|
||||
if (includeParam.contains(PARAM_INCLUDE_STORAGECLASSES) && node.getIsFile()
|
||||
&& node.getContent().getSizeInBytes() > 0)
|
||||
{
|
||||
node.getContent().setStorageClasses(contentService.findStorageClasses(nodeRef));
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
@@ -1886,8 +1878,7 @@ public class NodesImpl implements Nodes
|
||||
if (isContent)
|
||||
{
|
||||
// create empty file node - note: currently will be set to default encoding only (UTF-8)
|
||||
nodeRef = createNewFile(parentNodeRef, nodeName, nodeTypeQName, null, null, props,
|
||||
assocTypeQName, parameters, versionMajor, versionComment);
|
||||
nodeRef = createNewFile(parentNodeRef, nodeName, nodeTypeQName, null, props, assocTypeQName, parameters, versionMajor, versionComment);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -2372,16 +2363,7 @@ public class NodesImpl implements Nodes
|
||||
}
|
||||
|
||||
processNodePermissions(nodeRef, nodeInfo);
|
||||
|
||||
if (nodeInfo.getContent() != null && nodeInfo.getContent().getStorageClasses() != null)
|
||||
{
|
||||
try {
|
||||
contentService.updateStorageClasses(nodeRef, nodeInfo.getContent().getStorageClasses(), null);
|
||||
} catch (UnsupportedStorageClassException usce) {
|
||||
throw new IllegalArgumentException(usce.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return nodeRef;
|
||||
}
|
||||
|
||||
@@ -2786,13 +2768,7 @@ public class NodesImpl implements Nodes
|
||||
behaviourFilter.disableBehaviour(nodeRef, ContentModel.ASPECT_VERSIONABLE);
|
||||
try
|
||||
{
|
||||
writeContent(nodeRef,
|
||||
fileName,
|
||||
stream,
|
||||
true,
|
||||
contentInfo instanceof ContentInfo ?
|
||||
((ContentInfo) contentInfo).getStorageClasses() :
|
||||
null);
|
||||
writeContent(nodeRef, fileName, stream, true);
|
||||
|
||||
if ((isVersioned) || (versionMajor != null) || (versionComment != null) )
|
||||
{
|
||||
@@ -2831,17 +2807,10 @@ public class NodesImpl implements Nodes
|
||||
}
|
||||
|
||||
private void writeContent(NodeRef nodeRef, String fileName, InputStream stream, boolean guessEncoding)
|
||||
{
|
||||
writeContent(nodeRef, fileName, stream, guessEncoding, null);
|
||||
}
|
||||
|
||||
private void writeContent(NodeRef nodeRef, String fileName, InputStream stream,
|
||||
boolean guessEncoding, StorageClassSet storageClassSet)
|
||||
{
|
||||
try
|
||||
{
|
||||
ContentWriter writer = contentService
|
||||
.getWriter(nodeRef, ContentModel.PROP_CONTENT, true, storageClassSet);
|
||||
ContentWriter writer = contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
|
||||
|
||||
String mimeType = mimetypeService.guessMimetype(fileName);
|
||||
if ((mimeType != null) && (!mimeType.equals(MimetypeMap.MIMETYPE_BINARY)))
|
||||
@@ -2968,7 +2937,6 @@ public class NodesImpl implements Nodes
|
||||
String relativePath = null;
|
||||
String renditionNames = null;
|
||||
boolean versioningEnabled = true;
|
||||
String storageClassesParam = null;
|
||||
|
||||
Map<String, Object> qnameStrProps = new HashMap<>();
|
||||
Map<QName, Serializable> properties = null;
|
||||
@@ -3006,10 +2974,6 @@ public class NodesImpl implements Nodes
|
||||
}
|
||||
break;
|
||||
|
||||
case "storageclasses":
|
||||
storageClassesParam = getStringOrNull(field.getValue());
|
||||
break;
|
||||
|
||||
case "overwrite":
|
||||
overwrite = Boolean.valueOf(field.getValue());
|
||||
break;
|
||||
@@ -3078,9 +3042,8 @@ public class NodesImpl implements Nodes
|
||||
parentNodeRef = getOrCreatePath(parentNodeRef, relativePath);
|
||||
final QName assocTypeQName = ContentModel.ASSOC_CONTAINS;
|
||||
final Set<String> renditions = getRequestedRenditions(renditionNames);
|
||||
final StorageClassSet storageClasses = getRequestedStorageClasses(storageClassesParam);
|
||||
|
||||
validateProperties(qnameStrProps, EXCLUDED_NS, Collections.emptyList());
|
||||
validateProperties(qnameStrProps, EXCLUDED_NS, Arrays.asList());
|
||||
try
|
||||
{
|
||||
// Map the given properties, if any.
|
||||
@@ -3106,13 +3069,8 @@ public class NodesImpl implements Nodes
|
||||
else if (overwrite && nodeService.hasAspect(existingFile, ContentModel.ASPECT_VERSIONABLE))
|
||||
{
|
||||
// overwrite existing (versionable) file
|
||||
|
||||
BasicContentInfo contentInfo = new ContentInfoImpl(content.getMimetype(),
|
||||
content.getEncoding(), -1,
|
||||
null, storageClasses);
|
||||
return updateExistingFile(parentNodeRef, existingFile, fileName, contentInfo,
|
||||
content.getInputStream(), parameters, versionMajor,
|
||||
versionComment);
|
||||
BasicContentInfo contentInfo = new ContentInfoImpl(content.getMimetype(), content.getEncoding(), -1, null);
|
||||
return updateExistingFile(parentNodeRef, existingFile, fileName, contentInfo, content.getInputStream(), parameters, versionMajor, versionComment);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -3130,9 +3088,7 @@ public class NodesImpl implements Nodes
|
||||
versionMajor = versioningEnabled ? versionMajor : null;
|
||||
|
||||
// Create a new file.
|
||||
NodeRef nodeRef = createNewFile(parentNodeRef, fileName, nodeTypeQName, content,
|
||||
storageClasses, properties, assocTypeQName, parameters,
|
||||
versionMajor, versionComment);
|
||||
NodeRef nodeRef = createNewFile(parentNodeRef, fileName, nodeTypeQName, content, properties, assocTypeQName, parameters, versionMajor, versionComment);
|
||||
|
||||
// Create the response
|
||||
final Node fileNode = getFolderOrDocumentFullInfo(nodeRef, parentNodeRef, nodeTypeQName, parameters);
|
||||
@@ -3149,10 +3105,6 @@ public class NodesImpl implements Nodes
|
||||
{
|
||||
throw new PermissionDeniedException(ade.getMessage());
|
||||
}
|
||||
catch (UnsupportedStorageClassException usce)
|
||||
{
|
||||
throw new InvalidArgumentException(usce.getMessage());
|
||||
}
|
||||
|
||||
/*
|
||||
* NOTE: Do not clean formData temp files to allow for retries. It's
|
||||
@@ -3161,9 +3113,8 @@ public class NodesImpl implements Nodes
|
||||
*/
|
||||
}
|
||||
|
||||
private NodeRef createNewFile(NodeRef parentNodeRef, String fileName, QName nodeType,
|
||||
Content content, StorageClassSet storageClassSet, Map<QName, Serializable> props,
|
||||
QName assocTypeQName, Parameters params, Boolean versionMajor, String versionComment)
|
||||
private NodeRef createNewFile(NodeRef parentNodeRef, String fileName, QName nodeType, Content content, Map<QName, Serializable> props, QName assocTypeQName, Parameters params,
|
||||
Boolean versionMajor, String versionComment)
|
||||
{
|
||||
NodeRef nodeRef = createNodeImpl(parentNodeRef, fileName, nodeType, props, assocTypeQName);
|
||||
|
||||
@@ -3175,7 +3126,7 @@ public class NodesImpl implements Nodes
|
||||
else
|
||||
{
|
||||
// Write content
|
||||
writeContent(nodeRef, fileName, content.getInputStream(), true, storageClassSet);
|
||||
writeContent(nodeRef, fileName, content.getInputStream(), true);
|
||||
}
|
||||
|
||||
if ((versionMajor != null) || (versionComment != null))
|
||||
@@ -3253,21 +3204,6 @@ public class NodesImpl implements Nodes
|
||||
return renditions;
|
||||
}
|
||||
|
||||
static StorageClassSet getRequestedStorageClasses(String storageClassesParam)
|
||||
{
|
||||
if (storageClassesParam == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
String[] storageClasses = Arrays.stream(storageClassesParam.split(","))
|
||||
.map(String::trim)
|
||||
.filter(sc -> !sc.isEmpty())
|
||||
.toArray(String[]::new);
|
||||
|
||||
return new StorageClassSet(storageClasses);
|
||||
}
|
||||
|
||||
private void requestRenditions(Set<String> renditionNames, Node fileNode)
|
||||
{
|
||||
if (renditionNames != null)
|
||||
@@ -3478,6 +3414,20 @@ public class NodesImpl implements Nodes
|
||||
return getFolderOrDocument(nodeId, parameters);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public DirectAccessUrl requestContentDirectUrl(NodeRef nodeRef, boolean attachment, Long validFor)
|
||||
{
|
||||
DirectAccessUrl directAccessUrl = contentService.requestContentDirectUrl(nodeRef, attachment, validFor);
|
||||
if (directAccessUrl == null)
|
||||
{
|
||||
throw new DisabledServiceException("Direct access url isn't available.");
|
||||
}
|
||||
return directAccessUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if same permission is sent more than once
|
||||
* @param locallySetPermissions
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software LimitedP
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software LimitedP
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -26,6 +26,19 @@
|
||||
|
||||
package org.alfresco.rest.api.impl;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.InputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.StringJoiner;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.alfresco.heartbeat.RenditionsDataCollector;
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.query.PagingResults;
|
||||
@@ -61,6 +74,7 @@ import org.alfresco.rest.workflow.api.impl.MapBasedQueryWalker;
|
||||
import org.alfresco.service.ServiceRegistry;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.ContentData;
|
||||
import org.alfresco.service.cmr.repository.DirectAccessUrl;
|
||||
import org.alfresco.service.cmr.repository.MimetypeService;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
@@ -78,19 +92,6 @@ import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.context.ResourceLoaderAware;
|
||||
import org.springframework.core.io.ResourceLoader;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.InputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.StringJoiner;
|
||||
import java.util.TreeMap;
|
||||
|
||||
/**
|
||||
* @author Jamal Kaabi-Mofrad, janv
|
||||
*/
|
||||
@@ -483,6 +484,22 @@ public class RenditionsImpl implements Renditions, ResourceLoaderAware
|
||||
return getContentImpl(nodeRef, renditionId, parameters);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public DirectAccessUrl requestContentDirectUrl(NodeRef nodeRef, String versionId, String renditionId, boolean attachment, Long validFor)
|
||||
{
|
||||
final NodeRef validatedNodeRef = validateNode(nodeRef.getStoreRef(), nodeRef.getId(), versionId, null);
|
||||
NodeRef renditionNodeRef = getRenditionByName(validatedNodeRef, renditionId, null);
|
||||
|
||||
if (renditionNodeRef == null)
|
||||
{
|
||||
throw new NotFoundException("The rendition with id: " + renditionId + " was not found.");
|
||||
}
|
||||
|
||||
return nodes.requestContentDirectUrl(renditionNodeRef, attachment, validFor);
|
||||
}
|
||||
|
||||
private BinaryResource getContentImpl(NodeRef nodeRef, String renditionId, Parameters parameters)
|
||||
{
|
||||
NodeRef renditionNodeRef = getRenditionByName(nodeRef, renditionId, parameters);
|
||||
|
@@ -0,0 +1,94 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.rest.api.impl.directurl;
|
||||
|
||||
import org.alfresco.repo.content.directurl.AbstractDirectUrlConfig;
|
||||
import org.alfresco.repo.content.directurl.InvalidDirectAccessUrlConfigException;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
/**
|
||||
* REST API direct access URL configuration settings.
|
||||
*
|
||||
* @author Sara Aspery
|
||||
*/
|
||||
public class RestApiDirectUrlConfig extends AbstractDirectUrlConfig
|
||||
{
|
||||
private static final Log logger = LogFactory.getLog(RestApiDirectUrlConfig.class);
|
||||
|
||||
/**
|
||||
* Configuration initialise
|
||||
*/
|
||||
public void init()
|
||||
{
|
||||
validate();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void validate()
|
||||
{
|
||||
// Disable direct access URLs for the REST API if any error found in the REST API direct access URL config
|
||||
try
|
||||
{
|
||||
validateDirectAccessUrlConfig();
|
||||
}
|
||||
catch (InvalidDirectAccessUrlConfigException ex)
|
||||
{
|
||||
logger.error("Disabling REST API direct access URLs due to configuration error: " + ex.getMessage());
|
||||
setEnabled(false);
|
||||
}
|
||||
logger.info("REST API direct access URLs are " + (isEnabled() ? "enabled" : "disabled"));
|
||||
}
|
||||
|
||||
/* Helper method to validate the REST API direct access url configuration settings */
|
||||
private void validateDirectAccessUrlConfig() throws InvalidDirectAccessUrlConfigException
|
||||
{
|
||||
if (isEnabled())
|
||||
{
|
||||
if (getDefaultExpiryTimeInSec() == null)
|
||||
{
|
||||
logger.warn(String.format("Default expiry time property is missing: setting to system-wide default [%s].", getSysWideDefaultExpiryTimeInSec()));
|
||||
setDefaultExpiryTimeInSec(getSysWideDefaultExpiryTimeInSec());
|
||||
}
|
||||
|
||||
if (getDefaultExpiryTimeInSec() < 1)
|
||||
{
|
||||
String errorMsg = String.format("REST API direct access URL default expiry time [%s] is invalid.", getDefaultExpiryTimeInSec());
|
||||
throw new InvalidDirectAccessUrlConfigException(errorMsg);
|
||||
}
|
||||
|
||||
if (getDefaultExpiryTimeInSec() > getSysWideMaxExpiryTimeInSec())
|
||||
{
|
||||
String errorMsg = String.format("REST API direct access URL default expiry time [%s] exceeds system-wide maximum expiry time [%s].",
|
||||
getDefaultExpiryTimeInSec(), getSysWideMaxExpiryTimeInSec());
|
||||
throw new InvalidDirectAccessUrlConfigException(errorMsg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -25,10 +25,6 @@
|
||||
*/
|
||||
package org.alfresco.rest.api.model;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.repo.content.StorageClassSet;
|
||||
|
||||
/**
|
||||
* Representation of content info
|
||||
*
|
||||
@@ -41,7 +37,6 @@ public class ContentInfo
|
||||
private String mimeTypeName;
|
||||
private Long sizeInBytes;
|
||||
private String encoding;
|
||||
private StorageClassSet storageClassSet;
|
||||
|
||||
public ContentInfo()
|
||||
{
|
||||
@@ -55,15 +50,6 @@ public class ContentInfo
|
||||
this.encoding = encoding;
|
||||
}
|
||||
|
||||
public ContentInfo(String mimeType, String mimeTypeName, Long sizeInBytes, String encoding, StorageClassSet storageClassSet)
|
||||
{
|
||||
this.mimeType = mimeType;
|
||||
this.mimeTypeName = mimeTypeName;
|
||||
this.sizeInBytes = sizeInBytes;
|
||||
this.encoding = encoding;
|
||||
this.storageClassSet = storageClassSet;
|
||||
}
|
||||
|
||||
public String getMimeType() {
|
||||
return mimeType;
|
||||
}
|
||||
@@ -84,21 +70,10 @@ public class ContentInfo
|
||||
return encoding;
|
||||
}
|
||||
|
||||
public StorageClassSet getStorageClasses()
|
||||
{
|
||||
return storageClassSet;
|
||||
}
|
||||
|
||||
public void setStorageClasses(StorageClassSet storageClassSet)
|
||||
{
|
||||
this.storageClassSet = storageClassSet;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Override
|
||||
public String toString()
|
||||
{
|
||||
return "ContentInfo [mimeType=" + mimeType + ", mimeTypeName=" + mimeTypeName
|
||||
+ ", encoding=" + encoding + ", sizeInBytes=" + sizeInBytes + ", storageClasses=" + storageClassSet
|
||||
+ "]";
|
||||
+ ", encoding=" + encoding + ", sizeInBytes=" + sizeInBytes + "]";
|
||||
}
|
||||
}
|
||||
|
@@ -26,28 +26,21 @@
|
||||
package org.alfresco.rest.api.model;
|
||||
|
||||
/**
|
||||
* Represents a storage class.
|
||||
* Direct Access URL request.
|
||||
*
|
||||
* @author Sara Aspery
|
||||
*/
|
||||
public class StorageClass
|
||||
public class DirectAccessUrlRequest
|
||||
{
|
||||
private String id;
|
||||
private Boolean attachment;
|
||||
|
||||
public StorageClass(String id)
|
||||
public Boolean isAttachment()
|
||||
{
|
||||
this.id = id;
|
||||
return attachment;
|
||||
}
|
||||
|
||||
public StorageClass()
|
||||
public void setAttachment(Boolean attachment)
|
||||
{
|
||||
}
|
||||
|
||||
public void setId(String id)
|
||||
{
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getId()
|
||||
{
|
||||
return id;
|
||||
this.attachment = attachment;
|
||||
}
|
||||
}
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -416,6 +416,7 @@ public class RepositoryInfo
|
||||
private boolean isAuditEnabled;
|
||||
private boolean isQuickShareEnabled;
|
||||
private boolean isThumbnailGenerationEnabled;
|
||||
private boolean isDirectAccessUrlEnabled;
|
||||
|
||||
public StatusInfo()
|
||||
{
|
||||
@@ -465,6 +466,17 @@ public class RepositoryInfo
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean getIsDirectAccessUrlEnabled()
|
||||
{
|
||||
return isDirectAccessUrlEnabled;
|
||||
}
|
||||
|
||||
public StatusInfo setDirectAccessUrlEnabled(boolean isDirectAccessUrlEnabled)
|
||||
{
|
||||
this.isDirectAccessUrlEnabled = isDirectAccessUrlEnabled;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString()
|
||||
{
|
||||
@@ -473,6 +485,7 @@ public class RepositoryInfo
|
||||
.append(", isAuditEnabled=").append(isAuditEnabled)
|
||||
.append(", isQuickShareEnabled=").append(isQuickShareEnabled)
|
||||
.append(", isThumbnailGenerationEnabled=").append(isThumbnailGenerationEnabled)
|
||||
.append(", isDirectAccessUrlEnabled=").append(isDirectAccessUrlEnabled)
|
||||
.append(']');
|
||||
return sb.toString();
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -26,16 +26,27 @@
|
||||
|
||||
package org.alfresco.rest.api.nodes;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.alfresco.repo.content.directurl.DirectAccessUrlDisabledException;
|
||||
import org.alfresco.rest.api.DirectAccessUrlHelper;
|
||||
import org.alfresco.rest.api.Renditions;
|
||||
import org.alfresco.rest.api.model.DirectAccessUrlRequest;
|
||||
import org.alfresco.rest.api.model.Rendition;
|
||||
import org.alfresco.rest.framework.BinaryProperties;
|
||||
import org.alfresco.rest.framework.Operation;
|
||||
import org.alfresco.rest.framework.WebApiDescription;
|
||||
import org.alfresco.rest.framework.WebApiParam;
|
||||
import org.alfresco.rest.framework.core.ResourceParameter;
|
||||
import org.alfresco.rest.framework.core.exceptions.DisabledServiceException;
|
||||
import org.alfresco.rest.framework.resource.RelationshipResource;
|
||||
import org.alfresco.rest.framework.resource.actions.interfaces.RelationshipResourceAction;
|
||||
import org.alfresco.rest.framework.resource.actions.interfaces.RelationshipResourceBinaryAction;
|
||||
import org.alfresco.rest.framework.resource.content.BinaryResource;
|
||||
import org.alfresco.rest.framework.resource.parameters.CollectionWithPagingInfo;
|
||||
import org.alfresco.rest.framework.resource.parameters.Parameters;
|
||||
import org.alfresco.rest.framework.webscripts.WithResponse;
|
||||
import org.alfresco.service.cmr.repository.DirectAccessUrl;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.util.PropertyCheck;
|
||||
@@ -58,12 +69,18 @@ public class NodeRenditionsRelation implements RelationshipResourceAction.Read<R
|
||||
{
|
||||
|
||||
private Renditions renditions;
|
||||
private DirectAccessUrlHelper directAccessUrlHelper;
|
||||
|
||||
public void setRenditions(Renditions renditions)
|
||||
{
|
||||
this.renditions = renditions;
|
||||
}
|
||||
|
||||
public void setDirectAccessUrlHelper(DirectAccessUrlHelper directAccessUrlHelper)
|
||||
{
|
||||
this.directAccessUrlHelper = directAccessUrlHelper;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception
|
||||
{
|
||||
@@ -102,4 +119,26 @@ public class NodeRenditionsRelation implements RelationshipResourceAction.Read<R
|
||||
return renditions.getContent(nodeRef, renditionId, parameters);
|
||||
}
|
||||
|
||||
@Operation("request-direct-access-url")
|
||||
@WebApiParam (name = "directAccessUrlRequest", title = "Request direct access url", description = "Options for direct access url request", kind = ResourceParameter.KIND.HTTP_BODY_OBJECT)
|
||||
@WebApiDescription(title = "Request content url",
|
||||
description="Generates a direct access URL.",
|
||||
successStatus = HttpServletResponse.SC_OK)
|
||||
public DirectAccessUrl requestContentDirectUrl(String nodeId, String renditionId, DirectAccessUrlRequest directAccessUrlRequest, Parameters parameters, WithResponse withResponse)
|
||||
{
|
||||
boolean attachment = directAccessUrlHelper.getAttachment(directAccessUrlRequest);
|
||||
Long validFor = directAccessUrlHelper.getDefaultExpiryTimeInSec();
|
||||
NodeRef nodeRef = new NodeRef(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE, nodeId);
|
||||
|
||||
DirectAccessUrl directAccessUrl;
|
||||
try
|
||||
{
|
||||
directAccessUrl = renditions.requestContentDirectUrl(nodeRef, null, renditionId, attachment, validFor);
|
||||
}
|
||||
catch (DirectAccessUrlDisabledException ex)
|
||||
{
|
||||
throw new DisabledServiceException(ex.getMessage());
|
||||
}
|
||||
return directAccessUrl;
|
||||
}
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -26,6 +26,8 @@
|
||||
|
||||
package org.alfresco.rest.api.nodes;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.rest.api.Renditions;
|
||||
import org.alfresco.rest.api.model.Rendition;
|
||||
import org.alfresco.rest.framework.BinaryProperties;
|
||||
@@ -42,8 +44,6 @@ import org.alfresco.util.PropertyCheck;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.extensions.webscripts.Status;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
* Node version renditions
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -26,16 +26,22 @@
|
||||
package org.alfresco.rest.api.nodes;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.content.directurl.DirectAccessUrlDisabledException;
|
||||
import org.alfresco.repo.node.integrity.IntegrityException;
|
||||
import org.alfresco.repo.policy.BehaviourFilter;
|
||||
import org.alfresco.repo.version.Version2Model;
|
||||
import org.alfresco.repo.version.VersionModel;
|
||||
import org.alfresco.rest.api.DirectAccessUrlHelper;
|
||||
import org.alfresco.rest.api.model.DirectAccessUrlRequest;
|
||||
import org.alfresco.rest.api.model.Node;
|
||||
import org.alfresco.rest.api.model.UserInfo;
|
||||
import org.alfresco.rest.api.model.VersionOptions;
|
||||
import org.alfresco.rest.framework.BinaryProperties;
|
||||
import org.alfresco.rest.framework.Operation;
|
||||
import org.alfresco.rest.framework.WebApiDescription;
|
||||
import org.alfresco.rest.framework.WebApiParam;
|
||||
import org.alfresco.rest.framework.core.ResourceParameter;
|
||||
import org.alfresco.rest.framework.core.exceptions.DisabledServiceException;
|
||||
import org.alfresco.rest.framework.core.exceptions.EntityNotFoundException;
|
||||
import org.alfresco.rest.framework.core.exceptions.PermissionDeniedException;
|
||||
import org.alfresco.rest.framework.resource.RelationshipResource;
|
||||
@@ -46,6 +52,7 @@ import org.alfresco.rest.framework.resource.parameters.CollectionWithPagingInfo;
|
||||
import org.alfresco.rest.framework.resource.parameters.Parameters;
|
||||
import org.alfresco.rest.framework.webscripts.WithResponse;
|
||||
import org.alfresco.service.cmr.coci.CheckOutCheckInService;
|
||||
import org.alfresco.service.cmr.repository.DirectAccessUrl;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.security.AccessStatus;
|
||||
import org.alfresco.service.cmr.security.PermissionService;
|
||||
@@ -80,6 +87,12 @@ public class NodeVersionsRelation extends AbstractNodeRelation implements
|
||||
{
|
||||
protected VersionService versionService;
|
||||
protected BehaviourFilter behaviourFilter;
|
||||
private DirectAccessUrlHelper directAccessUrlHelper;
|
||||
|
||||
public void setDirectAccessUrlHelper(DirectAccessUrlHelper directAccessUrlHelper)
|
||||
{
|
||||
this.directAccessUrlHelper = directAccessUrlHelper;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet()
|
||||
@@ -288,4 +301,32 @@ public class NodeVersionsRelation extends AbstractNodeRelation implements
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Operation("request-direct-access-url")
|
||||
@WebApiParam (name = "directAccessUrlRequest", title = "Request direct access url", description = "Options for direct access url request", kind = ResourceParameter.KIND.HTTP_BODY_OBJECT)
|
||||
@WebApiDescription(title = "Request content url",
|
||||
description="Generates a direct access URL.",
|
||||
successStatus = HttpServletResponse.SC_OK)
|
||||
public DirectAccessUrl requestContentDirectUrl(String nodeId, String versionId, DirectAccessUrlRequest directAccessUrlRequest, Parameters parameters, WithResponse withResponse)
|
||||
{
|
||||
boolean attachment = directAccessUrlHelper.getAttachment(directAccessUrlRequest);
|
||||
Long validFor = directAccessUrlHelper.getDefaultExpiryTimeInSec();
|
||||
Version version = findVersion(nodeId, versionId);
|
||||
if (version != null)
|
||||
{
|
||||
NodeRef versionNodeRef = version.getFrozenStateNodeRef();
|
||||
|
||||
DirectAccessUrl directAccessUrl;
|
||||
try
|
||||
{
|
||||
directAccessUrl = nodes.requestContentDirectUrl(versionNodeRef, attachment, validFor);
|
||||
}
|
||||
catch (DirectAccessUrlDisabledException ex)
|
||||
{
|
||||
throw new DisabledServiceException(ex.getMessage());
|
||||
}
|
||||
return directAccessUrl;
|
||||
}
|
||||
throw new EntityNotFoundException(nodeId+"-"+versionId);
|
||||
}
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -25,11 +25,13 @@
|
||||
*/
|
||||
package org.alfresco.rest.api.nodes;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.InputStream;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.alfresco.repo.content.directurl.DirectAccessUrlDisabledException;
|
||||
import org.alfresco.rest.api.DirectAccessUrlHelper;
|
||||
import org.alfresco.rest.api.Nodes;
|
||||
import org.alfresco.rest.api.model.DirectAccessUrlRequest;
|
||||
import org.alfresco.rest.api.model.LockInfo;
|
||||
import org.alfresco.rest.api.model.Node;
|
||||
import org.alfresco.rest.api.model.NodeTarget;
|
||||
@@ -37,6 +39,8 @@ import org.alfresco.rest.framework.BinaryProperties;
|
||||
import org.alfresco.rest.framework.Operation;
|
||||
import org.alfresco.rest.framework.WebApiDescription;
|
||||
import org.alfresco.rest.framework.WebApiParam;
|
||||
import org.alfresco.rest.framework.core.ResourceParameter;
|
||||
import org.alfresco.rest.framework.core.exceptions.DisabledServiceException;
|
||||
import org.alfresco.rest.framework.core.exceptions.EntityNotFoundException;
|
||||
import org.alfresco.rest.framework.resource.EntityResource;
|
||||
import org.alfresco.rest.framework.resource.actions.interfaces.BinaryResourceAction;
|
||||
@@ -45,7 +49,10 @@ import org.alfresco.rest.framework.resource.content.BasicContentInfo;
|
||||
import org.alfresco.rest.framework.resource.content.BinaryResource;
|
||||
import org.alfresco.rest.framework.resource.parameters.Parameters;
|
||||
import org.alfresco.rest.framework.webscripts.WithResponse;
|
||||
import org.alfresco.service.cmr.repository.DirectAccessUrl;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.util.ParameterCheck;
|
||||
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
|
||||
/**
|
||||
@@ -61,13 +68,19 @@ public class NodesEntityResource implements
|
||||
BinaryResourceAction.Read, BinaryResourceAction.Update<Node>, InitializingBean
|
||||
{
|
||||
private Nodes nodes;
|
||||
private DirectAccessUrlHelper directAccessUrlHelper;
|
||||
|
||||
public void setNodes(Nodes nodes)
|
||||
{
|
||||
this.nodes = nodes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDirectAccessUrlHelper(DirectAccessUrlHelper directAccessUrlHelper)
|
||||
{
|
||||
this.directAccessUrlHelper = directAccessUrlHelper;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet()
|
||||
{
|
||||
ParameterCheck.mandatory("nodes", this.nodes);
|
||||
@@ -189,5 +202,27 @@ public class NodesEntityResource implements
|
||||
return nodes.unlock(nodeId, parameters);
|
||||
}
|
||||
|
||||
@Operation("request-direct-access-url")
|
||||
@WebApiParam(name = "directAccessUrlRequest", title = "Request direct access url", description = "Options for direct access url request", kind = ResourceParameter.KIND.HTTP_BODY_OBJECT)
|
||||
@WebApiDescription(title = "Request content url",
|
||||
description="Generates a direct access URL.",
|
||||
successStatus = HttpServletResponse.SC_OK)
|
||||
public DirectAccessUrl requestContentDirectUrl(String nodeId, DirectAccessUrlRequest directAccessUrlRequest, Parameters parameters, WithResponse withResponse)
|
||||
{
|
||||
boolean attachment = directAccessUrlHelper.getAttachment(directAccessUrlRequest);
|
||||
Long validFor = directAccessUrlHelper.getDefaultExpiryTimeInSec();
|
||||
NodeRef nodeRef = nodes.validateNode(nodeId);
|
||||
|
||||
DirectAccessUrl directAccessUrl;
|
||||
try
|
||||
{
|
||||
directAccessUrl = nodes.requestContentDirectUrl(nodeRef, attachment, validFor);
|
||||
}
|
||||
catch (DirectAccessUrlDisabledException ex)
|
||||
{
|
||||
throw new DisabledServiceException(ex.getMessage());
|
||||
}
|
||||
return directAccessUrl;
|
||||
}
|
||||
}
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user