mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-09-24 14:32:01 +00:00
Compare commits
115 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
43799408a8 | ||
|
e7305006f0 | ||
|
40c30411af | ||
|
91f8b43237 | ||
|
6fccf828e1 | ||
|
3fac3373c9 | ||
|
ee857ce1de | ||
|
483d7fab21 | ||
|
590209b299 | ||
|
376514df67 | ||
|
7144a2dd94 | ||
|
b4da3d8c20 | ||
|
62de9ff0c0 | ||
|
a11acce720 | ||
|
1128011e15 | ||
|
d0cb45de0d | ||
|
2b48195896 | ||
|
fbb95d6a7f | ||
|
502427e852 | ||
|
3ff2d79641 | ||
|
f274b88ece | ||
|
21550ec30b | ||
|
8665267225 | ||
|
984b0bc719 | ||
|
5b89fc0be7 | ||
|
bf3a3382fd | ||
|
14d007fae8 | ||
|
79317ddc9d | ||
|
c0e762fe5e | ||
|
5109b99520 | ||
|
dfc6306331 | ||
|
731f98921f | ||
|
0b21dbdc0a | ||
|
dd928356b8 | ||
|
1844d8bdb9 | ||
|
17eef66f5c | ||
|
1d1f269a70 | ||
|
2ccf6044b8 | ||
|
bdd09784e1 | ||
|
de5d70be46 | ||
|
8cacba0988 | ||
|
60187bf9a2 | ||
|
ff4634be19 | ||
|
9c64b45908 | ||
|
d97d8fba04 | ||
|
368b571d9c | ||
|
e9da7d222b | ||
|
8c059460f9 | ||
|
bd0aaa08b3 | ||
|
93d678dc30 | ||
|
9648189827 | ||
|
9bfd274127 | ||
|
dcf9f65f6b | ||
|
784fae5834 | ||
|
eddd8a1065 | ||
|
0941746518 | ||
|
a1f0f35f60 | ||
|
25d96a50cd | ||
|
3c7f024fed | ||
|
6a1a197701 | ||
|
5cff5092a0 | ||
|
b50f912df2 | ||
|
b4f00dddb8 | ||
|
02c103f39a | ||
|
3c23fa20c5 | ||
|
576b6faac9 | ||
|
d86415401d | ||
|
460cc1f2cd | ||
|
370fef10fd | ||
|
efadc239d4 | ||
|
de90e37578 | ||
|
6e438d2e4f | ||
|
a86fa21880 | ||
|
db74a6e7f2 | ||
|
8c773ac97c | ||
|
5a2b3cf64d | ||
|
8ab910d2b1 | ||
|
fa70f1cd45 | ||
|
75a2e0f901 | ||
|
84997bcf86 | ||
|
536b3ddd6d | ||
|
bb86c97b11 | ||
|
558f117f24 | ||
|
bbae71658d | ||
|
9ee9653463 | ||
|
44570cec8a | ||
|
33eb0354fa | ||
|
0d3e2dc8bb | ||
|
ac62c52a33 | ||
|
0d30d40d8f | ||
|
87e365df7e | ||
|
6ee01e808b | ||
|
57427f4765 | ||
|
f6a12760c9 | ||
|
560a050af3 | ||
|
5ed82930d2 | ||
|
f8a32022c3 | ||
|
137df0ff4c | ||
|
23bd8c064c | ||
|
e3384eaee4 | ||
|
c7e5716a4a | ||
|
7fa0d30100 | ||
|
6d33e57d05 | ||
|
f0bfc647e4 | ||
|
757dbbbb1f | ||
|
8c555b62d4 | ||
|
d0138c9702 | ||
|
71849cd4ac | ||
|
dba27bb86c | ||
|
6a37bc93c0 | ||
|
686ffcb19c | ||
|
b36a1a9364 | ||
|
a12a31120d | ||
|
bf2e53344d | ||
|
aa4570c895 |
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -145,7 +145,7 @@ jobs:
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/get-build-info@v8.16.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/free-hosted-runner-disk-space@v8.16.0
|
||||
- uses: Alfresco/alfresco-build-tools/.github/actions/setup-java-build@v8.16.0
|
||||
- uses: Alfresco/ya-pmd-scan@v4.1.0
|
||||
- uses: Alfresco/ya-pmd-scan@v4.3.0
|
||||
with:
|
||||
classpath-build-command: "mvn test-compile -ntp -Pags -pl \"-:alfresco-community-repo-docker\""
|
||||
|
||||
|
@@ -1273,7 +1273,7 @@
|
||||
"filename": "repository/src/main/resources/alfresco/repository.properties",
|
||||
"hashed_secret": "84551ae5442affc9f1a2d3b4c86ae8b24860149d",
|
||||
"is_verified": false,
|
||||
"line_number": 770,
|
||||
"line_number": 771,
|
||||
"is_secret": false
|
||||
}
|
||||
],
|
||||
@@ -1539,7 +1539,7 @@
|
||||
"filename": "repository/src/test/java/org/alfresco/repo/rendition2/AbstractRenditionIntegrationTest.java",
|
||||
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
|
||||
"is_verified": false,
|
||||
"line_number": 127,
|
||||
"line_number": 130,
|
||||
"is_secret": false
|
||||
}
|
||||
],
|
||||
@@ -1627,7 +1627,7 @@
|
||||
"filename": "repository/src/test/java/org/alfresco/repo/security/authentication/identityservice/SpringBasedIdentityServiceFacadeUnitTest.java",
|
||||
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
|
||||
"is_verified": false,
|
||||
"line_number": 46,
|
||||
"line_number": 48,
|
||||
"is_secret": false
|
||||
}
|
||||
],
|
||||
@@ -1888,5 +1888,5 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"generated_at": "2024-10-09T09:32:52Z"
|
||||
}
|
||||
"generated_at": "2025-05-15T21:47:13Z"
|
||||
}
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-amps</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-parent</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-automation-community-repo</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<build>
|
||||
|
@@ -45,7 +45,7 @@ import com.github.dockerjava.netty.NettyDockerCmdExecFactory;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import org.alfresco.utility.Utility;
|
||||
import org.apache.commons.lang.SystemUtils;
|
||||
import org.apache.commons.lang3.SystemUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@@ -26,13 +26,6 @@
|
||||
*/
|
||||
package org.alfresco.rest.rm.community.hold;
|
||||
|
||||
import static org.alfresco.rest.rm.community.base.TestData.HOLD_DESCRIPTION;
|
||||
import static org.alfresco.rest.rm.community.base.TestData.HOLD_REASON;
|
||||
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentAlias.FILE_PLAN_ALIAS;
|
||||
import static org.alfresco.rest.rm.community.model.user.UserPermissions.PERMISSION_FILING;
|
||||
import static org.alfresco.rest.rm.community.model.user.UserPermissions.PERMISSION_READ_RECORDS;
|
||||
import static org.alfresco.rest.rm.community.util.CommonTestUtils.generateTestPrefix;
|
||||
import static org.alfresco.utility.report.log.Step.STEP;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
@@ -44,12 +37,25 @@ import static org.springframework.http.HttpStatus.NOT_FOUND;
|
||||
import static org.springframework.http.HttpStatus.OK;
|
||||
import static org.springframework.http.HttpStatus.UNAUTHORIZED;
|
||||
|
||||
import static org.alfresco.rest.rm.community.base.TestData.HOLD_DESCRIPTION;
|
||||
import static org.alfresco.rest.rm.community.base.TestData.HOLD_REASON;
|
||||
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentAlias.FILE_PLAN_ALIAS;
|
||||
import static org.alfresco.rest.rm.community.model.user.UserPermissions.PERMISSION_FILING;
|
||||
import static org.alfresco.rest.rm.community.model.user.UserPermissions.PERMISSION_READ_RECORDS;
|
||||
import static org.alfresco.rest.rm.community.util.CommonTestUtils.generateTestPrefix;
|
||||
import static org.alfresco.utility.report.log.Step.STEP;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.testng.annotations.AfterClass;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import org.alfresco.dataprep.CMISUtil;
|
||||
import org.alfresco.dataprep.ContentActions;
|
||||
import org.alfresco.rest.rm.community.base.BaseRMRestTest;
|
||||
@@ -71,10 +77,6 @@ import org.alfresco.utility.constants.UserRole;
|
||||
import org.alfresco.utility.model.FileModel;
|
||||
import org.alfresco.utility.model.FolderModel;
|
||||
import org.alfresco.utility.model.UserModel;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.testng.annotations.AfterClass;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
/**
|
||||
* API tests for adding items to holds via the bulk process
|
||||
@@ -82,7 +84,7 @@ import org.testng.annotations.Test;
|
||||
public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
{
|
||||
private static final String ACCESS_DENIED_ERROR_MESSAGE = "Access Denied. You do not have the appropriate " +
|
||||
"permissions to perform this operation.";
|
||||
"permissions to perform this operation.";
|
||||
private static final int NUMBER_OF_FILES = 5;
|
||||
private final List<FileModel> addedFiles = new ArrayList<>();
|
||||
private final List<UserModel> users = new ArrayList<>();
|
||||
@@ -102,8 +104,9 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
{
|
||||
STEP("Create a hold.");
|
||||
hold = getRestAPIFactory().getFilePlansAPI(getAdminUser()).createHold(
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(), FILE_PLAN_ALIAS);
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(),
|
||||
FILE_PLAN_ALIAS);
|
||||
holds.add(hold);
|
||||
|
||||
STEP("Create test files.");
|
||||
@@ -117,8 +120,8 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
for (int i = 0; i < NUMBER_OF_FILES; i++)
|
||||
{
|
||||
FileModel documentHeld = dataContent.usingAdmin()
|
||||
.usingResource(i % 2 == 0 ? folder1 : folder2)
|
||||
.createContent(CMISUtil.DocumentType.TEXT_PLAIN);
|
||||
.usingResource(i % 2 == 0 ? folder1 : folder2)
|
||||
.createContent(CMISUtil.DocumentType.TEXT_PLAIN);
|
||||
addedFiles.add(documentHeld);
|
||||
}
|
||||
|
||||
@@ -128,29 +131,37 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
|
||||
STEP("Wait until all files are searchable.");
|
||||
await().atMost(30, TimeUnit.SECONDS)
|
||||
.until(() -> getRestAPIFactory().getSearchAPI(null).search(searchRequest).getPagination()
|
||||
.getTotalItems() == NUMBER_OF_FILES);
|
||||
.until(() -> getRestAPIFactory().getSearchAPI(null).search(searchRequest).getPagination()
|
||||
.getTotalItems() == NUMBER_OF_FILES);
|
||||
|
||||
RestRequestQueryModel ancestorReq = getContentFromFolderAndAllSubfoldersQuery(rootFolder.getNodeRefWithoutVersion());
|
||||
SearchRequest ancestorSearchRequest = new SearchRequest();
|
||||
ancestorSearchRequest.setQuery(ancestorReq);
|
||||
|
||||
STEP("Wait until paths are indexed.");
|
||||
// to improve stability on CI - seems that sometimes during big load we need to wait longer for the condition
|
||||
await().atMost(120, TimeUnit.SECONDS)
|
||||
.until(() -> getRestAPIFactory().getSearchAPI(null).search(ancestorSearchRequest).getPagination()
|
||||
.getTotalItems() == NUMBER_OF_FILES);
|
||||
|
||||
holdBulkOperation = HoldBulkOperation.builder()
|
||||
.query(queryReq)
|
||||
.op(HoldBulkOperationType.ADD).build();
|
||||
.query(queryReq)
|
||||
.op(HoldBulkOperationType.ADD).build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* Then the content is added to the hold and the status of the bulk operation is DONE
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API Then the content is added to the hold and the status of the bulk operation is DONE
|
||||
*/
|
||||
@Test
|
||||
public void addContentFromTestSiteToHoldUsingBulkAPI()
|
||||
{
|
||||
UserModel userAddHoldPermission = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole.SiteCollaborator, hold.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
UserRole.SiteCollaborator, hold.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
users.add(userAddHoldPermission);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
HoldBulkOperationEntry bulkOperationEntry = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
|
||||
// Verify the status code
|
||||
assertStatusCode(ACCEPTED);
|
||||
@@ -158,50 +169,49 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
|
||||
STEP("Wait until all files are added to the hold.");
|
||||
await().atMost(20, TimeUnit.SECONDS).until(
|
||||
() -> getRestAPIFactory().getHoldsAPI(getAdminUser()).getChildren(hold.getId()).getEntries().size()
|
||||
== NUMBER_OF_FILES);
|
||||
() -> getRestAPIFactory().getHoldsAPI(getAdminUser()).getChildren(hold.getId()).getEntries().size() == NUMBER_OF_FILES);
|
||||
List<String> holdChildrenNodeRefs = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getChildren(hold.getId()).getEntries().stream().map(HoldChildEntry::getEntry).map(
|
||||
HoldChild::getId).toList();
|
||||
.getChildren(hold.getId()).getEntries().stream().map(HoldChildEntry::getEntry).map(
|
||||
HoldChild::getId)
|
||||
.toList();
|
||||
assertEquals(addedFiles.stream().map(FileModel::getNodeRefWithoutVersion).sorted().toList(),
|
||||
holdChildrenNodeRefs.stream().sorted().toList());
|
||||
holdChildrenNodeRefs.stream().sorted().toList());
|
||||
|
||||
STEP("Check the bulk status.");
|
||||
HoldBulkStatus holdBulkStatus = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatus(hold.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
.getBulkStatus(hold.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
assertBulkProcessStatus(holdBulkStatus, NUMBER_OF_FILES, 0, null, holdBulkOperation);
|
||||
|
||||
STEP("Check the bulk statuses.");
|
||||
HoldBulkStatusCollection holdBulkStatusCollection = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatuses(hold.getId());
|
||||
.getBulkStatuses(hold.getId());
|
||||
assertEquals(Arrays.asList(holdBulkStatus),
|
||||
holdBulkStatusCollection.getEntries().stream().map(HoldBulkStatusEntry::getEntry).toList());
|
||||
holdBulkStatusCollection.getEntries().stream().map(HoldBulkStatusEntry::getEntry).toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a folder and all subfolders to a hold using the bulk API
|
||||
* Then the content is added to the hold and the status of the bulk operation is DONE
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a folder and all subfolders to a hold using the bulk API Then the content is added to the hold and the status of the bulk operation is DONE
|
||||
*/
|
||||
@Test
|
||||
public void addContentFromFolderAndAllSubfoldersToHoldUsingBulkAPI()
|
||||
{
|
||||
hold3 = getRestAPIFactory().getFilePlansAPI(getAdminUser()).createHold(
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(), FILE_PLAN_ALIAS);
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(),
|
||||
FILE_PLAN_ALIAS);
|
||||
holds.add(hold3);
|
||||
|
||||
UserModel userAddHoldPermission = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole.SiteCollaborator, hold3.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
UserRole.SiteCollaborator, hold3.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
users.add(userAddHoldPermission);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
// Get content from folder and all subfolders of the root folder
|
||||
HoldBulkOperation bulkOperation = HoldBulkOperation.builder()
|
||||
.query(getContentFromFolderAndAllSubfoldersQuery(rootFolder.getNodeRefWithoutVersion()))
|
||||
.op(HoldBulkOperationType.ADD).build();
|
||||
.query(getContentFromFolderAndAllSubfoldersQuery(rootFolder.getNodeRefWithoutVersion()))
|
||||
.op(HoldBulkOperationType.ADD).build();
|
||||
HoldBulkOperationEntry bulkOperationEntry = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.startBulkProcess(bulkOperation, hold3.getId());
|
||||
.startBulkProcess(bulkOperation, hold3.getId());
|
||||
|
||||
// Verify the status code
|
||||
assertStatusCode(ACCEPTED);
|
||||
@@ -209,43 +219,40 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
|
||||
STEP("Wait until all files are added to the hold.");
|
||||
await().atMost(20, TimeUnit.SECONDS).until(
|
||||
() -> getRestAPIFactory().getHoldsAPI(getAdminUser()).getChildren(hold3.getId()).getEntries().size()
|
||||
== NUMBER_OF_FILES);
|
||||
() -> getRestAPIFactory().getHoldsAPI(getAdminUser()).getChildren(hold3.getId()).getEntries().size() == NUMBER_OF_FILES);
|
||||
List<String> holdChildrenNodeRefs = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getChildren(hold3.getId()).getEntries().stream().map(HoldChildEntry::getEntry).map(
|
||||
HoldChild::getId).toList();
|
||||
.getChildren(hold3.getId()).getEntries().stream().map(HoldChildEntry::getEntry).map(
|
||||
HoldChild::getId)
|
||||
.toList();
|
||||
assertEquals(addedFiles.stream().map(FileModel::getNodeRefWithoutVersion).sorted().toList(),
|
||||
holdChildrenNodeRefs.stream().sorted().toList());
|
||||
holdChildrenNodeRefs.stream().sorted().toList());
|
||||
|
||||
STEP("Check the bulk status.");
|
||||
HoldBulkStatus holdBulkStatus = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatus(hold3.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
.getBulkStatus(hold3.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
assertBulkProcessStatus(holdBulkStatus, NUMBER_OF_FILES, 0, null, bulkOperation);
|
||||
|
||||
STEP("Check the bulk statuses.");
|
||||
HoldBulkStatusCollection holdBulkStatusCollection = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatuses(hold3.getId());
|
||||
.getBulkStatuses(hold3.getId());
|
||||
assertEquals(List.of(holdBulkStatus),
|
||||
holdBulkStatusCollection.getEntries().stream().map(HoldBulkStatusEntry::getEntry).toList());
|
||||
holdBulkStatusCollection.getEntries().stream().map(HoldBulkStatusEntry::getEntry).toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user without the add to hold capability
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* Then the user receives access denied error
|
||||
* Given a user without the add to hold capability When the user adds content from a site to a hold using the bulk API Then the user receives access denied error
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessWithUserWithoutAddToHoldCapability()
|
||||
{
|
||||
UserModel userWithoutAddToHoldCapability = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole
|
||||
.SiteCollaborator,
|
||||
hold.getId(), UserRoles.ROLE_RM_POWER_USER, PERMISSION_FILING);
|
||||
UserRole.SiteCollaborator,
|
||||
hold.getId(), UserRoles.ROLE_RM_POWER_USER, PERMISSION_FILING);
|
||||
users.add(userWithoutAddToHoldCapability);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
getRestAPIFactory().getHoldsAPI(userWithoutAddToHoldCapability)
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
|
||||
STEP("Verify the response status code and the error message.");
|
||||
assertStatusCode(FORBIDDEN);
|
||||
@@ -253,21 +260,19 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user without the filing permission on a hold
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* Then the user receives access denied error
|
||||
* Given a user without the filing permission on a hold When the user adds content from a site to a hold using the bulk API Then the user receives access denied error
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessWithUserWithoutFilingPermissionOnAHold()
|
||||
{
|
||||
// User without filing permission on a hold
|
||||
UserModel userWithoutPermission = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole.SiteCollaborator, hold.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_READ_RECORDS);
|
||||
UserRole.SiteCollaborator, hold.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_READ_RECORDS);
|
||||
users.add(userWithoutPermission);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
getRestAPIFactory().getHoldsAPI(userWithoutPermission)
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
|
||||
STEP("Verify the response status code and the error message.");
|
||||
assertStatusCode(FORBIDDEN);
|
||||
@@ -276,68 +281,63 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user without the write permission on all the content
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* Then all processed items are marked as errors and the last error message contains access denied error
|
||||
* Given a user without the write permission on all the content When the user adds content from a site to a hold using the bulk API Then all processed items are marked as errors and the last error message contains access denied error
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessWithUserWithoutWritePermissionOnTheContent()
|
||||
{
|
||||
// User without write permission on the content
|
||||
UserModel userWithoutPermission = roleService.createUserWithSiteRoleRMRoleAndPermission(
|
||||
testSite, UserRole.SiteConsumer,
|
||||
hold.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
testSite, UserRole.SiteConsumer,
|
||||
hold.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
users.add(userWithoutPermission);
|
||||
|
||||
// Wait until permissions are reverted
|
||||
SearchRequest searchRequest = new SearchRequest();
|
||||
searchRequest.setQuery(holdBulkOperation.getQuery());
|
||||
await().atMost(30, TimeUnit.SECONDS)
|
||||
.until(() -> getRestAPIFactory().getSearchAPI(userWithoutPermission).search(searchRequest).getPagination()
|
||||
.getTotalItems() == NUMBER_OF_FILES);
|
||||
.until(() -> getRestAPIFactory().getSearchAPI(userWithoutPermission).search(searchRequest).getPagination()
|
||||
.getTotalItems() == NUMBER_OF_FILES);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
HoldBulkOperationEntry bulkOperationEntry = getRestAPIFactory().getHoldsAPI(
|
||||
userWithoutPermission).startBulkProcess(holdBulkOperation, hold.getId());
|
||||
userWithoutPermission).startBulkProcess(holdBulkOperation, hold.getId());
|
||||
|
||||
STEP("Verify the response.");
|
||||
assertStatusCode(ACCEPTED);
|
||||
|
||||
await().atMost(20, TimeUnit.SECONDS).until(() ->
|
||||
Objects.equals(getRestAPIFactory().getHoldsAPI(userWithoutPermission)
|
||||
await().atMost(20, TimeUnit.SECONDS).until(() -> Objects.equals(getRestAPIFactory().getHoldsAPI(userWithoutPermission)
|
||||
.getBulkStatus(hold.getId(), bulkOperationEntry.getBulkStatusId()).getStatus(), "DONE"));
|
||||
|
||||
HoldBulkStatus holdBulkStatus = getRestAPIFactory().getHoldsAPI(userWithoutPermission)
|
||||
.getBulkStatus(hold.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
.getBulkStatus(hold.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
assertBulkProcessStatus(holdBulkStatus, NUMBER_OF_FILES, NUMBER_OF_FILES, ACCESS_DENIED_ERROR_MESSAGE,
|
||||
holdBulkOperation);
|
||||
holdBulkOperation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user without the write permission on one file
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* Then all processed items are added to the hold except the one that the user does not have write permission
|
||||
* And the status of the bulk operation is DONE, contains the error message and the number of errors is 1
|
||||
* Given a user without the write permission on one file When the user adds content from a site to a hold using the bulk API Then all processed items are added to the hold except the one that the user does not have write permission And the status of the bulk operation is DONE, contains the error message and the number of errors is 1
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessWithUserWithoutWritePermissionOnOneFile()
|
||||
{
|
||||
hold2 = getRestAPIFactory().getFilePlansAPI(getAdminUser()).createHold(
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(), FILE_PLAN_ALIAS);
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(),
|
||||
FILE_PLAN_ALIAS);
|
||||
holds.add(hold2);
|
||||
|
||||
UserModel userAddHoldPermission = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole.SiteCollaborator, hold2.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
UserRole.SiteCollaborator, hold2.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
users.add(userAddHoldPermission);
|
||||
|
||||
contentActions.setPermissionForUser(getAdminUser().getUsername(), getAdminUser().getPassword(),
|
||||
testSite.getId(), addedFiles.get(0).getName(), userAddHoldPermission.getUsername(),
|
||||
UserRole.SiteConsumer.getRoleId(), false);
|
||||
testSite.getId(), addedFiles.get(0).getName(), userAddHoldPermission.getUsername(),
|
||||
UserRole.SiteConsumer.getRoleId(), false);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
HoldBulkOperationEntry bulkOperationEntry = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.startBulkProcess(holdBulkOperation, hold2.getId());
|
||||
.startBulkProcess(holdBulkOperation, hold2.getId());
|
||||
|
||||
// Verify the status code
|
||||
assertStatusCode(ACCEPTED);
|
||||
@@ -345,56 +345,50 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
|
||||
STEP("Wait until all files are added to the hold.");
|
||||
await().atMost(30, TimeUnit.SECONDS).until(
|
||||
() -> getRestAPIFactory().getHoldsAPI(getAdminUser()).getChildren(hold2.getId()).getEntries().size()
|
||||
== NUMBER_OF_FILES - 1);
|
||||
() -> getRestAPIFactory().getHoldsAPI(getAdminUser()).getChildren(hold2.getId()).getEntries().size() == NUMBER_OF_FILES - 1);
|
||||
await().atMost(30, TimeUnit.SECONDS).until(
|
||||
() -> getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatus(hold2.getId(), bulkOperationEntry.getBulkStatusId()).getProcessedItems()
|
||||
== NUMBER_OF_FILES);
|
||||
() -> getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatus(hold2.getId(), bulkOperationEntry.getBulkStatusId()).getProcessedItems() == NUMBER_OF_FILES);
|
||||
List<String> holdChildrenNodeRefs = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getChildren(hold2.getId()).getEntries().stream().map(HoldChildEntry::getEntry).map(
|
||||
HoldChild::getId).toList();
|
||||
.getChildren(hold2.getId()).getEntries().stream().map(HoldChildEntry::getEntry).map(
|
||||
HoldChild::getId)
|
||||
.toList();
|
||||
assertEquals(addedFiles.stream().skip(1).map(FileModel::getNodeRefWithoutVersion).sorted().toList(),
|
||||
holdChildrenNodeRefs.stream().sorted().toList());
|
||||
holdChildrenNodeRefs.stream().sorted().toList());
|
||||
|
||||
STEP("Check the bulk status.");
|
||||
HoldBulkStatus holdBulkStatus = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatus(hold2.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
.getBulkStatus(hold2.getId(), bulkOperationEntry.getBulkStatusId());
|
||||
assertBulkProcessStatus(holdBulkStatus, NUMBER_OF_FILES, 1, ACCESS_DENIED_ERROR_MESSAGE, holdBulkOperation);
|
||||
|
||||
STEP("Check the bulk statuses.");
|
||||
HoldBulkStatusCollection holdBulkStatusCollection = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.getBulkStatuses(hold2.getId());
|
||||
.getBulkStatuses(hold2.getId());
|
||||
assertEquals(List.of(holdBulkStatus),
|
||||
holdBulkStatusCollection.getEntries().stream().map(HoldBulkStatusEntry::getEntry).toList());
|
||||
holdBulkStatusCollection.getEntries().stream().map(HoldBulkStatusEntry::getEntry).toList());
|
||||
|
||||
// Revert the permissions
|
||||
contentActions.setPermissionForUser(getAdminUser().getUsername(), getAdminUser().getPassword(),
|
||||
testSite.getId(), addedFiles.get(0).getName(), userAddHoldPermission.getUsername(),
|
||||
UserRole.SiteCollaborator.getRoleId(), true);
|
||||
testSite.getId(), addedFiles.get(0).getName(), userAddHoldPermission.getUsername(),
|
||||
UserRole.SiteCollaborator.getRoleId(), true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an unauthenticated user
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* Then the user receives unauthorized error
|
||||
* Given an unauthenticated user When the user adds content from a site to a hold using the bulk API Then the user receives unauthorized error
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessAsUnauthenticatedUser()
|
||||
{
|
||||
STEP("Start bulk process as unauthenticated user");
|
||||
getRestAPIFactory().getHoldsAPI(new UserModel(getAdminUser().getUsername(), "wrongPassword"))
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
.startBulkProcess(holdBulkOperation, hold.getId());
|
||||
|
||||
STEP("Verify the response status code.");
|
||||
assertStatusCode(UNAUTHORIZED);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* And the hold does not exist
|
||||
* Then the user receives not found error
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API And the hold does not exist Then the user receives not found error
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessForNonExistentHold()
|
||||
@@ -407,10 +401,7 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* and the bulk operation is invalid
|
||||
* Then the user receives bad request error
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API and the bulk operation is invalid Then the user receives bad request error
|
||||
*/
|
||||
@Test
|
||||
public void testGetBulkStatusesForInvalidOperation()
|
||||
@@ -418,7 +409,7 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
STEP("Start bulk process for non existent hold");
|
||||
|
||||
HoldBulkOperation invalidHoldBulkOperation = HoldBulkOperation.builder().op(null)
|
||||
.query(holdBulkOperation.getQuery()).build();
|
||||
.query(holdBulkOperation.getQuery()).build();
|
||||
getRestAPIFactory().getHoldsAPI(getAdminUser()).startBulkProcess(invalidHoldBulkOperation, hold.getId());
|
||||
|
||||
STEP("Verify the response status code.");
|
||||
@@ -426,10 +417,7 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* And the hold does not exist
|
||||
* Then the user receives not found error
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API And the hold does not exist Then the user receives not found error
|
||||
*/
|
||||
@Test
|
||||
public void testGetBulkStatusForNonExistentHold()
|
||||
@@ -442,10 +430,7 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* And the bulk status does not exist
|
||||
* Then the user receives not found error
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API And the bulk status does not exist Then the user receives not found error
|
||||
*/
|
||||
@Test
|
||||
public void testGetBulkStatusForNonExistentBulkStatus()
|
||||
@@ -458,10 +443,7 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* And the hold does not exist
|
||||
* Then the user receives not found error
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API And the hold does not exist Then the user receives not found error
|
||||
*/
|
||||
@Test
|
||||
public void testGetBulkStatusesForNonExistentHold()
|
||||
@@ -474,9 +456,7 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from all sites to a hold using the bulk API to exceed the limit (30 items)
|
||||
* Then the user receives bad request error
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from all sites to a hold using the bulk API to exceed the limit (30 items) Then the user receives bad request error
|
||||
*/
|
||||
@Test
|
||||
public void testExceedingBulkOperationLimit()
|
||||
@@ -486,8 +466,8 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
queryReq.setLanguage("afts");
|
||||
|
||||
HoldBulkOperation exceedLimitOp = HoldBulkOperation.builder()
|
||||
.query(queryReq)
|
||||
.op(HoldBulkOperationType.ADD).build();
|
||||
.query(queryReq)
|
||||
.op(HoldBulkOperationType.ADD).build();
|
||||
|
||||
STEP("Start bulk process to exceed the limit");
|
||||
getRestAPIFactory().getHoldsAPI(getAdminUser()).startBulkProcess(exceedLimitOp, hold.getId());
|
||||
@@ -497,26 +477,24 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* And then the user cancels the bulk operation
|
||||
* Then the user receives OK status code
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API And then the user cancels the bulk operation Then the user receives OK status code
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessCancellationWithAllowedUser()
|
||||
{
|
||||
Hold hold4 = getRestAPIFactory().getFilePlansAPI(getAdminUser()).createHold(
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(), FILE_PLAN_ALIAS);
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(),
|
||||
FILE_PLAN_ALIAS);
|
||||
holds.add(hold4);
|
||||
|
||||
UserModel userAddHoldPermission = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole.SiteCollaborator, hold4.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
UserRole.SiteCollaborator, hold4.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
users.add(userAddHoldPermission);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
HoldBulkOperationEntry bulkOperationEntry = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.startBulkProcess(holdBulkOperation, hold4.getId());
|
||||
.startBulkProcess(holdBulkOperation, hold4.getId());
|
||||
|
||||
// Verify the status code
|
||||
assertStatusCode(ACCEPTED);
|
||||
@@ -524,47 +502,44 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
|
||||
STEP("Cancel the bulk operation.");
|
||||
getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.cancelBulkOperation(hold4.getId(), bulkOperationEntry.getBulkStatusId(), new BulkBodyCancel());
|
||||
.cancelBulkOperation(hold4.getId(), bulkOperationEntry.getBulkStatusId(), new BulkBodyCancel());
|
||||
|
||||
// Verify the status code
|
||||
assertStatusCode(OK);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a user with the add to hold capability and hold filing permission
|
||||
* When the user adds content from a site to a hold using the bulk API
|
||||
* And a 2nd user without the add to hold capability cancels the bulk operation
|
||||
* Then the 2nd user receives access denied error
|
||||
* Given a user with the add to hold capability and hold filing permission When the user adds content from a site to a hold using the bulk API And a 2nd user without the add to hold capability cancels the bulk operation Then the 2nd user receives access denied error
|
||||
*/
|
||||
@Test
|
||||
public void testBulkProcessCancellationWithUserWithoutAddToHoldCapability()
|
||||
{
|
||||
Hold hold5 = getRestAPIFactory().getFilePlansAPI(getAdminUser()).createHold(
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(), FILE_PLAN_ALIAS);
|
||||
Hold.builder().name("HOLD" + generateTestPrefix(AddToHoldsV1Tests.class)).description(HOLD_DESCRIPTION)
|
||||
.reason(HOLD_REASON).build(),
|
||||
FILE_PLAN_ALIAS);
|
||||
holds.add(hold5);
|
||||
|
||||
UserModel userAddHoldPermission = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole.SiteCollaborator, hold5.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
UserRole.SiteCollaborator, hold5.getId(), UserRoles.ROLE_RM_MANAGER, PERMISSION_FILING);
|
||||
users.add(userAddHoldPermission);
|
||||
|
||||
STEP("Add content from the site to the hold using the bulk API.");
|
||||
HoldBulkOperationEntry bulkOperationEntry = getRestAPIFactory().getHoldsAPI(userAddHoldPermission)
|
||||
.startBulkProcess(holdBulkOperation, hold5.getId());
|
||||
.startBulkProcess(holdBulkOperation, hold5.getId());
|
||||
|
||||
// Verify the status code
|
||||
assertStatusCode(ACCEPTED);
|
||||
assertEquals(NUMBER_OF_FILES, bulkOperationEntry.getTotalItems());
|
||||
|
||||
UserModel userWithoutAddToHoldCapability = roleService.createUserWithSiteRoleRMRoleAndPermission(testSite,
|
||||
UserRole
|
||||
.SiteCollaborator,
|
||||
hold5.getId(), UserRoles.ROLE_RM_POWER_USER, PERMISSION_FILING);
|
||||
UserRole.SiteCollaborator,
|
||||
hold5.getId(), UserRoles.ROLE_RM_POWER_USER, PERMISSION_FILING);
|
||||
users.add(userWithoutAddToHoldCapability);
|
||||
|
||||
STEP("Cancel the bulk operation.");
|
||||
getRestAPIFactory().getHoldsAPI(userWithoutAddToHoldCapability)
|
||||
.cancelBulkOperation(hold5.getId(), bulkOperationEntry.getBulkStatusId(), new BulkBodyCancel());
|
||||
.cancelBulkOperation(hold5.getId(), bulkOperationEntry.getBulkStatusId(), new BulkBodyCancel());
|
||||
|
||||
STEP("Verify the response status code and the error message.");
|
||||
assertStatusCode(FORBIDDEN);
|
||||
@@ -572,7 +547,7 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
}
|
||||
|
||||
private void assertBulkProcessStatus(HoldBulkStatus holdBulkStatus, long expectedProcessedItems,
|
||||
int expectedErrorsCount, String expectedErrorMessage, HoldBulkOperation holdBulkOperation)
|
||||
int expectedErrorsCount, String expectedErrorMessage, HoldBulkOperation holdBulkOperation)
|
||||
{
|
||||
assertEquals("DONE", holdBulkStatus.getStatus());
|
||||
assertEquals(expectedProcessedItems, holdBulkStatus.getTotalItems());
|
||||
|
@@ -42,7 +42,7 @@ import org.alfresco.rest.v0.RMRolesAndActionsAPI;
|
||||
import org.alfresco.rest.v0.RecordsAPI;
|
||||
import org.alfresco.rest.v0.RecordCategoriesAPI;
|
||||
import org.alfresco.test.AlfrescoTest;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.json.JSONObject;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.testng.annotations.Test;
|
||||
|
@@ -44,7 +44,7 @@ import org.alfresco.rest.v0.service.DispositionScheduleService;
|
||||
import org.alfresco.test.AlfrescoTest;
|
||||
import org.alfresco.utility.model.RepoTestModel;
|
||||
import org.alfresco.utility.model.UserModel;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.HttpStatus;
|
||||
|
@@ -23,7 +23,7 @@ Recorded content can be explicitly destroyed whilst maintaining the original nod
|
||||
* License: Alfresco Community
|
||||
* Issue Tracker Link: [JIRA RM](https://issues.alfresco.com/jira/projects/RM/summary)
|
||||
* Contribution Model: Alfresco Closed Source
|
||||
* Documentation: [docs.alfresco.com (Records Management)](http://docs.alfresco.com/rm2.4/concepts/welcome-rm.html)
|
||||
* Documentation: [docs.alfresco.com (Records Management)](https://support.hyland.com/r/Alfresco/Alfresco-Governance-Services-Community-Edition/23.4/Alfresco-Governance-Services-Community-Edition/Introduction)
|
||||
|
||||
***
|
||||
|
||||
|
@@ -21,18 +21,18 @@ RM is split into two main parts - a repository integration and a Share integrati
|
||||
* [Community License](../LICENSE.txt)
|
||||
* [Enterprise License](../../rm-enterprise/LICENSE.txt) (this file will only be present in clones of the Enterprise repository)
|
||||
* [Issue Tracker Link](https://issues.alfresco.com/jira/projects/RM)
|
||||
* [Community Documentation Link](http://docs.alfresco.com/rm-community/concepts/welcome-rm.html)
|
||||
* [Enterprise Documentation Link](http://docs.alfresco.com/rm/concepts/welcome-rm.html)
|
||||
* [Community Documentation Link](https://support.hyland.com/r/Alfresco/Alfresco-Governance-Services-Community-Edition/23.4/Alfresco-Governance-Services-Community-Edition/Introduction)
|
||||
* [Enterprise Documentation Link](https://support.hyland.com/r/Alfresco/Alfresco-Governance-Services/23.4/Alfresco-Governance-Services/Introduction)
|
||||
* [Contribution Model](../../CONTRIBUTING.md)
|
||||
|
||||
***
|
||||
|
||||
### Prerequisite Knowledge
|
||||
An understanding of Alfresco Content Services is assumed. The following pages from the [developer documentation](http://docs.alfresco.com/5.2/concepts/dev-for-developers.html) give useful background information:
|
||||
An understanding of Alfresco Content Services is assumed. The following pages from the [developer documentation](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services-Community-Edition/23.4/Alfresco-Content-Services-Community-Edition/Develop) give useful background information:
|
||||
|
||||
* [ACS Architecture](http://docs.alfresco.com/5.2/concepts/dev-arch-overview.html)
|
||||
* [Platform Extensions](http://docs.alfresco.com/5.2/concepts/dev-platform-extensions.html)
|
||||
* [Share Extensions](http://docs.alfresco.com/5.2/concepts/dev-extensions-share.html)
|
||||
* [ACS Architecture](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/Software-Architecture)
|
||||
* [Platform Extensions](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/Extension-Points-Overview)
|
||||
* [Share Extensions](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/Share-UI-Extension-Points)
|
||||
|
||||
***
|
||||
|
||||
@@ -44,12 +44,12 @@ The RM Share module communicates with the repository module via REST APIs. Inter
|
||||
* A DAO layer responsible for CRUD operations against the database.
|
||||
|
||||
#### REST API
|
||||
The REST API endpoints fall into two main types - v0 (Webscripts) and v1. The [v0 API](http://docs.alfresco.com/5.2/references/dev-extension-points-webscripts.html) is older and not recommended for integrations. The [v1 API](http://docs.alfresco.com/5.1/pra/1/topics/pra-welcome-aara.html) is newer but isn't yet feature complete. If you are running RM locally then the GS API Explorer will be available at [this link](http://localhost:8080/gs-api-explorer/).
|
||||
The REST API endpoints fall into two main types - v0 (Webscripts) and v1. The [v0 API](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/In-Process-Platform-Extension-Points/Web-Scripts) is older and not recommended for integrations. The [v1 API](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/REST-API-Guide) is newer but isn't yet feature complete. If you are running RM locally then the GS API Explorer will be available at [this link](http://localhost:8080/gs-api-explorer/).
|
||||
|
||||
Internally the GS v1 REST API is built on the [Alfresco v1 REST API framework](https://community.alfresco.com/community/ecm/blog/2016/10/11/v1-rest-api-part-1-introduction). It aims to be consistent with this in terms of behaviour and naming.
|
||||
|
||||
#### Java Public API
|
||||
The Java service layer is fronted by a [Java Public API](http://docs.alfresco.com/5.2/concepts/java-public-api-list.html), which we will ensure backward compatible with previous releases. Before we remove any methods there will first be a release containing that method deprecated to allow third party integrations to migrate to a new method. The Java Public API also includes a set of POJO objects which are needed to communicate with the services. It is easy to identify classes that are part of the Java Public API as they are annotated `@AlfrescoPublicApi`.
|
||||
The Java service layer is fronted by a [Java Public API](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/Reference/Java-Foundation-API), which we will ensure backward compatible with previous releases. Before we remove any methods there will first be a release containing that method deprecated to allow third party integrations to migrate to a new method. The Java Public API also includes a set of POJO objects which are needed to communicate with the services. It is easy to identify classes that are part of the Java Public API as they are annotated `@AlfrescoPublicApi`.
|
||||
|
||||
Each Java service will have at least four beans defined for it:
|
||||
|
||||
@@ -61,7 +61,7 @@ Each Java service will have at least four beans defined for it:
|
||||
#### DAOs
|
||||
The DAOs are not part of the Java Public API, but handle CRUD operations against RM stored data. We have some custom queries to improve performance for particularly heavy operations.
|
||||
|
||||
We use standard Alfresco [data modelling](http://docs.alfresco.com/5.2/references/dev-extension-points-content-model.html) to store RM metadata. We extend the [Alfresco patching mechanism](http://docs.alfresco.com/5.2/references/dev-extension-points-patch.html) to provide community and enterprise schema upgrades.
|
||||
We use standard Alfresco [data modelling](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/In-Process-Platform-Extension-Points/Content-Model-Extension-Point) to store RM metadata. We extend the [Alfresco patching mechanism](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/In-Process-Platform-Extension-Points/Patches) to provide community and enterprise schema upgrades.
|
||||
|
||||
***
|
||||
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-parent</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -119,6 +119,11 @@ rm.patch.v35.holdNewChildAssocPatch.batchSize=1000
|
||||
rm.haspermissionmap.read=Read
|
||||
rm.haspermissionmap.write=WriteProperties,AddChildren,ReadContent
|
||||
|
||||
# Extended Permissions
|
||||
# Enable matching the given username with the correct casing username when retrieving an IPR group.
|
||||
# Only needs to be used if there are owners that don't have the username in the correct casing.
|
||||
rm.extendedSecurity.enableUsernameNormalization=false
|
||||
|
||||
#
|
||||
# Extended auto-version behaviour. If true and other auto-version properties are satisfied, then
|
||||
# a document will be auto-versioned when its type is changed.
|
||||
|
@@ -34,4 +34,7 @@
|
||||
<!-- content cleanser -->
|
||||
<bean id="contentCleanser.522022M" class="org.alfresco.module.org_alfresco_module_rm.content.cleanser.ContentCleanser522022M"/>
|
||||
|
||||
<!-- content cleanser -->
|
||||
<bean id="contentCleanser.SevenPass" class="org.alfresco.module.org_alfresco_module_rm.content.cleanser.ContentCleanserSevenPass"/>
|
||||
|
||||
</beans>
|
||||
|
@@ -611,6 +611,7 @@
|
||||
<property name="authorityService" ref="authorityService"/>
|
||||
<property name="permissionService" ref="permissionService"/>
|
||||
<property name="transactionService" ref="transactionService"/>
|
||||
<property name="enableUsernameNormalization" value="${rm.extendedSecurity.enableUsernameNormalization}" />
|
||||
</bean>
|
||||
|
||||
<bean id="ExtendedSecurityService" class="org.springframework.aop.framework.ProxyFactoryBean">
|
||||
|
@@ -8,7 +8,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -0,0 +1,51 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Records Management Module
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* -
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
* -
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
* -
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
* -
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
package org.alfresco.module.org_alfresco_module_rm.content.cleanser;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
/**
|
||||
* DoD 5220-22M Seven Pass data cleansing implementation.
|
||||
*
|
||||
*/
|
||||
public class ContentCleanserSevenPass extends ContentCleanser522022M
|
||||
{
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.content.cleanser.ContentCleanser#cleanse(java.io.File)
|
||||
*/
|
||||
@Override
|
||||
public void cleanse(File file)
|
||||
{
|
||||
super.cleanse(file);
|
||||
overwrite(file, overwriteZeros);
|
||||
overwrite(file, overwriteZeros);
|
||||
overwrite(file, overwriteOnes);
|
||||
overwrite(file, overwriteRandom);
|
||||
|
||||
}
|
||||
}
|
@@ -34,6 +34,12 @@ import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.context.event.ContextRefreshedEvent;
|
||||
import org.springframework.dao.ConcurrencyFailureException;
|
||||
import org.springframework.extensions.webscripts.ui.common.StringUtils;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.model.RenditionModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.capability.RMPermissionModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService;
|
||||
@@ -42,7 +48,10 @@ import org.alfresco.module.org_alfresco_module_rm.role.FilePlanRoleService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.util.ServiceBaseImpl;
|
||||
import org.alfresco.query.PagingRequest;
|
||||
import org.alfresco.query.PagingResults;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.security.authority.RMAuthority;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
@@ -54,12 +63,6 @@ import org.alfresco.service.namespace.RegexQNamePattern;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.alfresco.util.ParameterCheck;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.context.event.ContextRefreshedEvent;
|
||||
import org.springframework.extensions.webscripts.ui.common.StringUtils;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
|
||||
/**
|
||||
* Extended security service implementation.
|
||||
@@ -68,9 +71,9 @@ import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransacti
|
||||
* @since 2.1
|
||||
*/
|
||||
public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
implements ExtendedSecurityService,
|
||||
RecordsManagementModel,
|
||||
ApplicationListener<ContextRefreshedEvent>
|
||||
implements ExtendedSecurityService,
|
||||
RecordsManagementModel,
|
||||
ApplicationListener<ContextRefreshedEvent>
|
||||
{
|
||||
/** ipr group names */
|
||||
static final String ROOT_IPR_GROUP = "INPLACE_RECORD_MANAGEMENT";
|
||||
@@ -95,8 +98,11 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/** transaction service */
|
||||
private TransactionService transactionService;
|
||||
|
||||
private boolean enableUsernameNormalization;
|
||||
|
||||
/**
|
||||
* @param filePlanService file plan service
|
||||
* @param filePlanService
|
||||
* file plan service
|
||||
*/
|
||||
public void setFilePlanService(FilePlanService filePlanService)
|
||||
{
|
||||
@@ -104,7 +110,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
}
|
||||
|
||||
/**
|
||||
* @param filePlanRoleService file plan role service
|
||||
* @param filePlanRoleService
|
||||
* file plan role service
|
||||
*/
|
||||
public void setFilePlanRoleService(FilePlanRoleService filePlanRoleService)
|
||||
{
|
||||
@@ -112,7 +119,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
}
|
||||
|
||||
/**
|
||||
* @param authorityService authority service
|
||||
* @param authorityService
|
||||
* authority service
|
||||
*/
|
||||
public void setAuthorityService(AuthorityService authorityService)
|
||||
{
|
||||
@@ -120,7 +128,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
}
|
||||
|
||||
/**
|
||||
* @param permissionService permission service
|
||||
* @param permissionService
|
||||
* permission service
|
||||
*/
|
||||
public void setPermissionService(PermissionService permissionService)
|
||||
{
|
||||
@@ -128,13 +137,23 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
}
|
||||
|
||||
/**
|
||||
* @param transactionService transaction service
|
||||
* @param transactionService
|
||||
* transaction service
|
||||
*/
|
||||
public void setTransactionService(TransactionService transactionService)
|
||||
{
|
||||
this.transactionService = transactionService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param enableUsernameNormalization
|
||||
* enable username normalization to ensure correct casing
|
||||
*/
|
||||
public void setEnableUsernameNormalization(boolean enableUsernameNormalization)
|
||||
{
|
||||
this.enableUsernameNormalization = enableUsernameNormalization;
|
||||
}
|
||||
|
||||
/**
|
||||
* Application context refresh event handler
|
||||
*/
|
||||
@@ -142,19 +161,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
public void onApplicationEvent(ContextRefreshedEvent contextRefreshedEvent)
|
||||
{
|
||||
// run as System on bootstrap
|
||||
AuthenticationUtil.runAs(new RunAsWork<Object>()
|
||||
{
|
||||
AuthenticationUtil.runAs(new RunAsWork<Object>() {
|
||||
public Object doWork()
|
||||
{
|
||||
RetryingTransactionCallback<Void> callback = new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
RetryingTransactionCallback<Void> callback = new RetryingTransactionCallback<Void>() {
|
||||
public Void execute()
|
||||
{
|
||||
// if the root group doesn't exist then create it
|
||||
if (!authorityService.authorityExists(getRootIRPGroup()))
|
||||
{
|
||||
authorityService.createAuthority(AuthorityType.GROUP, ROOT_IPR_GROUP, ROOT_IPR_GROUP,
|
||||
Collections.singleton(RMAuthority.ZONE_APP_RM));
|
||||
Collections.singleton(RMAuthority.ZONE_APP_RM));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -174,7 +191,7 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
return GROUP_PREFIX + ROOT_IPR_GROUP;
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService#hasExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef)
|
||||
*/
|
||||
@Override
|
||||
@@ -224,8 +241,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Helper to get authorities for a given group
|
||||
*
|
||||
* @param group group name
|
||||
* @return Set<String> immediate authorities
|
||||
* @param group
|
||||
* group name
|
||||
* @return Set<String> immediate authorities
|
||||
*/
|
||||
private Set<String> getAuthorities(String group)
|
||||
{
|
||||
@@ -284,8 +302,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
* <p>
|
||||
* Return null if none found.
|
||||
*
|
||||
* @param nodeRef node reference
|
||||
* @return Pair<String, String> where first is the read group and second if the write group, null if none found
|
||||
* @param nodeRef
|
||||
* node reference
|
||||
* @return Pair<String, String> where first is the read group and second if the write group, null if none found
|
||||
*/
|
||||
private Pair<String, String> getIPRGroups(NodeRef nodeRef)
|
||||
{
|
||||
@@ -321,17 +340,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Given a set of readers and writers find or create the appropriate IPR groups.
|
||||
* <p>
|
||||
* The IPR groups are named with hashes of the authority lists in order to reduce
|
||||
* the set of groups that require exact match. A further index is used to handle
|
||||
* a situation where there is a hash clash, but a difference in the authority lists.
|
||||
* The IPR groups are named with hashes of the authority lists in order to reduce the set of groups that require exact match. A further index is used to handle a situation where there is a hash clash, but a difference in the authority lists.
|
||||
* <p>
|
||||
* When no match is found the groups are created. Once created
|
||||
* When no match is found the groups are created. Once created
|
||||
*
|
||||
* @param filePlan file plan
|
||||
* @param readers authorities with read
|
||||
* @param writers authorities with write
|
||||
* @return Pair<String, String> where first is the full name of the read group and
|
||||
* second is the full name of the write group
|
||||
* @param filePlan
|
||||
* file plan
|
||||
* @param readers
|
||||
* authorities with read
|
||||
* @param writers
|
||||
* authorities with write
|
||||
* @return Pair<String, String> where first is the full name of the read group and second is the full name of the write group
|
||||
*/
|
||||
private Pair<String, String> createOrFindIPRGroups(Set<String> readers, Set<String> writers)
|
||||
{
|
||||
@@ -343,20 +362,28 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Create or find an IPR group based on the provided prefix and authorities.
|
||||
*
|
||||
* @param groupPrefix group prefix
|
||||
* @param authorities authorities
|
||||
* @return String full group name
|
||||
* @param groupPrefix
|
||||
* group prefix
|
||||
* @param authorities
|
||||
* authorities
|
||||
* @return String full group name
|
||||
*/
|
||||
private String createOrFindIPRGroup(String groupPrefix, Set<String> authorities)
|
||||
{
|
||||
String group = null;
|
||||
|
||||
// If enabled, the authorities are forced to match the correct casing of the usernames in case they were set
|
||||
// with the incorrect casing.
|
||||
// If not, it will just use the authorities as they are.
|
||||
// In normal circumstances, the authorities are in the correct casing, so this is disabled by default.
|
||||
Set<String> authoritySet = normalizeAuthorities(authorities);
|
||||
|
||||
// find group or determine what the next index is if no group exists or there is a clash
|
||||
Pair<String, Integer> groupResult = findIPRGroup(groupPrefix, authorities);
|
||||
Pair<String, Integer> groupResult = findIPRGroup(groupPrefix, authoritySet);
|
||||
|
||||
if (groupResult.getFirst() == null)
|
||||
{
|
||||
group = createIPRGroup(groupPrefix, authorities, groupResult.getSecond());
|
||||
group = createIPRGroup(groupPrefix, authoritySet, groupResult.getSecond());
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -369,13 +396,13 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Given a group name prefix and the authorities, finds the exact match existing group.
|
||||
* <p>
|
||||
* If the group does not exist then the group returned is null and the index shows the next available
|
||||
* group index for creation.
|
||||
* If the group does not exist then the group returned is null and the index shows the next available group index for creation.
|
||||
*
|
||||
* @param groupPrefix group name prefix
|
||||
* @param authorities authorities
|
||||
* @return Pair<String, Integer> where first is the name of the found group, null if none found and second
|
||||
* if the next available create index
|
||||
* @param groupPrefix
|
||||
* group name prefix
|
||||
* @param authorities
|
||||
* authorities
|
||||
* @return Pair<String, Integer> where first is the name of the found group, null if none found and second if the next available create index
|
||||
*/
|
||||
private Pair<String, Integer> findIPRGroup(String groupPrefix, Set<String> authorities)
|
||||
{
|
||||
@@ -391,12 +418,13 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
while (hasMoreItems == true)
|
||||
{
|
||||
// get matching authorities
|
||||
PagingResults<String> results = authorityService.getAuthorities(AuthorityType.GROUP,
|
||||
RMAuthority.ZONE_APP_RM,
|
||||
groupShortNamePrefix,
|
||||
false,
|
||||
false,
|
||||
new PagingRequest(MAX_ITEMS*pageCount, MAX_ITEMS));
|
||||
PagingResults<String> results = authorityService.getAuthorities(
|
||||
AuthorityType.GROUP,
|
||||
RMAuthority.ZONE_APP_RM,
|
||||
groupShortNamePrefix,
|
||||
false,
|
||||
false,
|
||||
new PagingRequest(MAX_ITEMS * pageCount, MAX_ITEMS));
|
||||
|
||||
// record the total count
|
||||
nextGroupIndex = nextGroupIndex + results.getPage().size();
|
||||
@@ -413,29 +441,88 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
|
||||
// determine if there are any more pages to inspect
|
||||
hasMoreItems = results.hasMoreItems();
|
||||
pageCount ++;
|
||||
pageCount++;
|
||||
}
|
||||
|
||||
return new Pair<>(iprGroup, nextGroupIndex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a set of authorities, normalizes the authority names to ensure correct casing.
|
||||
*
|
||||
* @param authNames
|
||||
* @return
|
||||
*/
|
||||
private Set<String> normalizeAuthorities(Set<String> authNames)
|
||||
{
|
||||
// If disabled or no authorities, return as is
|
||||
if (!enableUsernameNormalization || authNames == null || authNames.isEmpty())
|
||||
{
|
||||
return authNames;
|
||||
}
|
||||
|
||||
Set<String> normalizedAuthorities = new HashSet<>();
|
||||
for (String authorityName : authNames)
|
||||
{
|
||||
normalizedAuthorities.add(normalizeAuthorityName(authorityName));
|
||||
}
|
||||
return normalizedAuthorities;
|
||||
}
|
||||
|
||||
/**
|
||||
* Usernames are case insensitive but affect the IPR group matching when set with different casing. For a given authority of type user, this method normalizes the authority name. If group, it returns the name as-is.
|
||||
*
|
||||
* @param authorityName
|
||||
* the authority name to normalize
|
||||
* @return the normalized authority name
|
||||
*/
|
||||
private String normalizeAuthorityName(String authorityName)
|
||||
{
|
||||
if (authorityName == null || authorityName.startsWith(GROUP_PREFIX))
|
||||
{
|
||||
return authorityName;
|
||||
}
|
||||
|
||||
// For users, attempt to get the correct casing from the username property of the user node
|
||||
if (authorityService.authorityExists(authorityName))
|
||||
{
|
||||
try
|
||||
{
|
||||
NodeRef authorityNodeRef = authorityService.getAuthorityNodeRef(authorityName);
|
||||
if (authorityNodeRef != null)
|
||||
{
|
||||
String username = (String) nodeService.getProperty(authorityNodeRef, ContentModel.PROP_USERNAME);
|
||||
return username != null ? username : authorityName;
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
// If anything goes wrong, fallback to the original name
|
||||
}
|
||||
}
|
||||
|
||||
return authorityName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether a group exactly matches a list of authorities.
|
||||
*
|
||||
* @param authorities list of authorities
|
||||
* @param group group
|
||||
* @param authorities
|
||||
* list of authorities
|
||||
* @param group
|
||||
* group
|
||||
* @return
|
||||
*/
|
||||
private boolean isIPRGroupTrueMatch(String group, Set<String> authorities)
|
||||
{
|
||||
//Remove GROUP_EVERYONE for proper comparison as GROUP_EVERYONE is never included in an IPR group
|
||||
// Remove GROUP_EVERYONE for proper comparison as GROUP_EVERYONE is never included in an IPR group
|
||||
Set<String> plainAuthorities = new HashSet<String>();
|
||||
if (authorities != null)
|
||||
{
|
||||
plainAuthorities.addAll(authorities);
|
||||
plainAuthorities.remove(PermissionService.ALL_AUTHORITIES);
|
||||
}
|
||||
Set<String> contained = authorityService.getContainedAuthorities(null, group, true);
|
||||
Set<String> contained = authorityService.getContainedAuthorities(null, group, true);
|
||||
return contained.equals(plainAuthorities);
|
||||
}
|
||||
|
||||
@@ -444,15 +531,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
* <p>
|
||||
* 'package' scope to help testing.
|
||||
*
|
||||
* @param prefix prefix
|
||||
* @param authorities authorities
|
||||
* @return String group prefix short name
|
||||
* @param prefix
|
||||
* prefix
|
||||
* @param authorities
|
||||
* authorities
|
||||
* @return String group prefix short name
|
||||
*/
|
||||
/*package*/ String getIPRGroupPrefixShortName(String prefix, Set<String> authorities)
|
||||
/* package */ String getIPRGroupPrefixShortName(String prefix, Set<String> authorities)
|
||||
{
|
||||
StringBuilder builder = new StringBuilder(128)
|
||||
.append(prefix)
|
||||
.append(getAuthoritySetHashCode(authorities));
|
||||
.append(prefix)
|
||||
.append(getAuthoritySetHashCode(authorities));
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
@@ -464,13 +553,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
* <p>
|
||||
* 'package' scope to help testing.
|
||||
*
|
||||
* @param prefix prefix
|
||||
* @param readers read authorities
|
||||
* @param writers write authorities
|
||||
* @param index group index
|
||||
* @return String group short name
|
||||
* @param prefix
|
||||
* prefix
|
||||
* @param readers
|
||||
* read authorities
|
||||
* @param writers
|
||||
* write authorities
|
||||
* @param index
|
||||
* group index
|
||||
* @return String group short name
|
||||
*/
|
||||
/*package*/ String getIPRGroupShortName(String prefix, Set<String> authorities, int index)
|
||||
/* package */ String getIPRGroupShortName(String prefix, Set<String> authorities, int index)
|
||||
{
|
||||
return getIPRGroupShortName(prefix, authorities, Integer.toString(index));
|
||||
}
|
||||
@@ -480,17 +573,21 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
* <p>
|
||||
* Note this excludes the "GROUP_" prefix.
|
||||
*
|
||||
* @param prefix prefix
|
||||
* @param readers read authorities
|
||||
* @param writers write authorities
|
||||
* @param index group index
|
||||
* @return String group short name
|
||||
* @param prefix
|
||||
* prefix
|
||||
* @param readers
|
||||
* read authorities
|
||||
* @param writers
|
||||
* write authorities
|
||||
* @param index
|
||||
* group index
|
||||
* @return String group short name
|
||||
*/
|
||||
private String getIPRGroupShortName(String prefix, Set<String> authorities, String index)
|
||||
{
|
||||
StringBuilder builder = new StringBuilder(128)
|
||||
.append(getIPRGroupPrefixShortName(prefix, authorities))
|
||||
.append(index);
|
||||
.append(getIPRGroupPrefixShortName(prefix, authorities))
|
||||
.append(index);
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
@@ -498,8 +595,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Gets the hashcode value of a set of authorities.
|
||||
*
|
||||
* @param authorities set of authorities
|
||||
* @return int hash code
|
||||
* @param authorities
|
||||
* set of authorities
|
||||
* @return int hash code
|
||||
*/
|
||||
private int getAuthoritySetHashCode(Set<String> authorities)
|
||||
{
|
||||
@@ -514,10 +612,13 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Creates a new IPR group.
|
||||
*
|
||||
* @param groupNamePrefix group name prefix
|
||||
* @param children child authorities
|
||||
* @param index group index
|
||||
* @return String full name of created group
|
||||
* @param groupNamePrefix
|
||||
* group name prefix
|
||||
* @param children
|
||||
* child authorities
|
||||
* @param index
|
||||
* group index
|
||||
* @return String full name of created group
|
||||
*/
|
||||
private String createIPRGroup(String groupNamePrefix, Set<String> children, int index)
|
||||
{
|
||||
@@ -547,10 +648,10 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
}
|
||||
}
|
||||
}
|
||||
catch(DuplicateChildNodeNameException ex)
|
||||
catch (DuplicateChildNodeNameException ex)
|
||||
{
|
||||
// the group was concurrently created
|
||||
group = authorityService.getName(AuthorityType.GROUP, groupShortName);
|
||||
// Rethrow as ConcurrencyFailureException so that is can be retried and linked to the group created by the concurrent transaction
|
||||
throw new ConcurrencyFailureException("IPR group creation failed due to concurrent duplicate group name creation: " + groupShortName);
|
||||
}
|
||||
|
||||
return group;
|
||||
@@ -559,8 +660,10 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Assign IPR groups to a node reference with the correct permissions.
|
||||
*
|
||||
* @param iprGroups iprGroups, first read and second write
|
||||
* @param nodeRef node reference
|
||||
* @param iprGroups
|
||||
* iprGroups, first read and second write
|
||||
* @param nodeRef
|
||||
* node reference
|
||||
*/
|
||||
private void assignIPRGroupsToNode(Pair<String, String> iprGroups, NodeRef nodeRef)
|
||||
{
|
||||
@@ -598,7 +701,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Clear the nodes IPR permissions
|
||||
*
|
||||
* @param nodeRef node reference
|
||||
* @param nodeRef
|
||||
* node reference
|
||||
*/
|
||||
private void clearPermissions(NodeRef nodeRef, Pair<String, String> iprGroups)
|
||||
{
|
||||
@@ -610,7 +714,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#getExtendedReaders(org.alfresco.service.cmr.repository.NodeRef)
|
||||
*/
|
||||
@Override @Deprecated public Set<String> getExtendedReaders(NodeRef nodeRef)
|
||||
@Override
|
||||
@Deprecated
|
||||
public Set<String> getExtendedReaders(NodeRef nodeRef)
|
||||
{
|
||||
return getReaders(nodeRef);
|
||||
}
|
||||
@@ -618,7 +724,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#getExtendedWriters(org.alfresco.service.cmr.repository.NodeRef)
|
||||
*/
|
||||
@Override @Deprecated public Set<String> getExtendedWriters(NodeRef nodeRef)
|
||||
@Override
|
||||
@Deprecated
|
||||
public Set<String> getExtendedWriters(NodeRef nodeRef)
|
||||
{
|
||||
return getWriters(nodeRef);
|
||||
}
|
||||
@@ -626,7 +734,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#addExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set)
|
||||
*/
|
||||
@Override @Deprecated public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
|
||||
{
|
||||
set(nodeRef, readers, writers);
|
||||
}
|
||||
@@ -634,7 +744,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#addExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set, boolean)
|
||||
*/
|
||||
@Override @Deprecated public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers, boolean applyToParents)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers, boolean applyToParents)
|
||||
{
|
||||
set(nodeRef, readers, writers);
|
||||
}
|
||||
@@ -642,7 +754,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeAllExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef)
|
||||
*/
|
||||
@Override @Deprecated public void removeAllExtendedSecurity(NodeRef nodeRef)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void removeAllExtendedSecurity(NodeRef nodeRef)
|
||||
{
|
||||
remove(nodeRef);
|
||||
}
|
||||
@@ -650,7 +764,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set)
|
||||
*/
|
||||
@Override @Deprecated public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
|
||||
{
|
||||
remove(nodeRef);
|
||||
}
|
||||
@@ -658,7 +774,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set, boolean)
|
||||
*/
|
||||
@Override @Deprecated public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String>writers, boolean applyToParents)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers, boolean applyToParents)
|
||||
{
|
||||
remove(nodeRef);
|
||||
}
|
||||
@@ -666,7 +784,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeAllExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, boolean)
|
||||
*/
|
||||
@Override @Deprecated public void removeAllExtendedSecurity(NodeRef nodeRef, boolean applyToParents)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void removeAllExtendedSecurity(NodeRef nodeRef, boolean applyToParents)
|
||||
{
|
||||
remove(nodeRef);
|
||||
}
|
||||
|
@@ -29,14 +29,23 @@ package org.alfresco.module.org_alfresco_module_rm.test.legacy.service;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.springframework.dao.ConcurrencyFailureException;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase;
|
||||
import org.alfresco.query.PagingRequest;
|
||||
import org.alfresco.query.PagingResults;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.site.SiteModel;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.security.AccessPermission;
|
||||
import org.alfresco.service.cmr.security.AccessStatus;
|
||||
import org.alfresco.service.cmr.security.AuthorityType;
|
||||
import org.alfresco.service.cmr.site.SiteService;
|
||||
import org.alfresco.service.cmr.site.SiteVisibility;
|
||||
import org.alfresco.util.GUID;
|
||||
@@ -73,8 +82,7 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
|
||||
|
||||
private String createTestUser()
|
||||
{
|
||||
return doTestInTransaction(new Test<String>()
|
||||
{
|
||||
return doTestInTransaction(new Test<String>() {
|
||||
public String run()
|
||||
{
|
||||
String userName = GUID.generate();
|
||||
@@ -90,8 +98,7 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
|
||||
final String elephant = createTestUser();
|
||||
final String snake = createTestUser();
|
||||
|
||||
doTestInTransaction(new Test<Void>()
|
||||
{
|
||||
doTestInTransaction(new Test<Void>() {
|
||||
public Void run()
|
||||
{
|
||||
assertFalse(extendedSecurityService.hasExtendedSecurity(filePlan));
|
||||
@@ -118,7 +125,7 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
|
||||
|
||||
// test remove
|
||||
extendedSecurityService.remove(recordToo);
|
||||
|
||||
|
||||
assertFalse(extendedSecurityService.hasExtendedSecurity(recordToo));
|
||||
assertTrue(extendedSecurityService.getReaders(recordToo).isEmpty());
|
||||
assertTrue(extendedSecurityService.getWriters(recordToo).isEmpty());
|
||||
@@ -133,8 +140,7 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
|
||||
final String monkey = createTestUser();
|
||||
final String elephant = createTestUser();
|
||||
|
||||
doTestInTransaction(new Test<Void>()
|
||||
{
|
||||
doTestInTransaction(new Test<Void>() {
|
||||
Set<String> extendedReaders = new HashSet<>(2);
|
||||
|
||||
public Void run() throws Exception
|
||||
@@ -184,112 +190,337 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
|
||||
|
||||
public void testDifferentUsersDifferentPermissions()
|
||||
{
|
||||
final String userNone = createTestUser();
|
||||
final String userRead = createTestUser();
|
||||
final String userWrite = createTestUser();
|
||||
final String siteShortName = GUID.generate();
|
||||
final String userNone = createTestUser();
|
||||
final String userRead = createTestUser();
|
||||
final String userWrite = createTestUser();
|
||||
final String siteShortName = GUID.generate();
|
||||
|
||||
doTestInTransaction(new Test<Void>()
|
||||
{
|
||||
doTestInTransaction(new Test<Void>() {
|
||||
public Void run() throws Exception
|
||||
{
|
||||
siteService.createSite(null, siteShortName, "test", "test", SiteVisibility.PRIVATE);
|
||||
return null;
|
||||
siteService.createSite(null, siteShortName, "test", "test", SiteVisibility.PRIVATE);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
final NodeRef documentLibrary = doTestInTransaction(new Test<NodeRef>()
|
||||
{
|
||||
final NodeRef documentLibrary = doTestInTransaction(new Test<NodeRef>() {
|
||||
public NodeRef run() throws Exception
|
||||
{
|
||||
siteService.setMembership(siteShortName, userRead, SiteModel.SITE_CONSUMER);
|
||||
siteService.setMembership(siteShortName, userWrite, SiteModel.SITE_COLLABORATOR);
|
||||
return siteService.createContainer(siteShortName, SiteService.DOCUMENT_LIBRARY, null, null);
|
||||
siteService.setMembership(siteShortName, userRead, SiteModel.SITE_CONSUMER);
|
||||
siteService.setMembership(siteShortName, userWrite, SiteModel.SITE_COLLABORATOR);
|
||||
return siteService.createContainer(siteShortName, SiteService.DOCUMENT_LIBRARY, null, null);
|
||||
}
|
||||
});
|
||||
|
||||
final NodeRef record = doTestInTransaction(new Test<NodeRef>()
|
||||
{
|
||||
final NodeRef record = doTestInTransaction(new Test<NodeRef>() {
|
||||
public NodeRef run() throws Exception
|
||||
{
|
||||
NodeRef record = fileFolderService.create(documentLibrary, GUID.generate(), ContentModel.TYPE_CONTENT).getNodeRef();
|
||||
recordService.createRecord(filePlan, record);
|
||||
return record;
|
||||
NodeRef record = fileFolderService.create(documentLibrary, GUID.generate(), ContentModel.TYPE_CONTENT)
|
||||
.getNodeRef();
|
||||
recordService.createRecord(filePlan, record);
|
||||
return record;
|
||||
}
|
||||
});
|
||||
|
||||
doTestInTransaction(new Test<Void>()
|
||||
{
|
||||
doTestInTransaction(new Test<Void>() {
|
||||
public Void run() throws Exception
|
||||
{
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userNone);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userNone);
|
||||
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userRead);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userRead);
|
||||
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userWrite);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userWrite);
|
||||
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userNone);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userNone);
|
||||
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userRead);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userRead);
|
||||
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userWrite);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userWrite);
|
||||
|
||||
return null;
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void testConcurrentSetWithRetry()
|
||||
{
|
||||
Set<String> extendedReaders = new HashSet<>(2);
|
||||
Set<String> extendedWriters = new HashSet<>(2);
|
||||
|
||||
Set<NodeRef> documents = setupConcurrentTestCase(10, extendedReaders, extendedWriters);
|
||||
|
||||
// For each record created previously, spawn a thread to set extended security so we cause concurrency
|
||||
// failure trying to create IPR groups with the same name
|
||||
fireParallelExecutionOfSetExtendedSecurity(documents, extendedReaders, extendedWriters, true);
|
||||
|
||||
// Look for duplicated IPR groups and verify all documents have the same groups assigned
|
||||
verifyCreatedGroups(documents, false);
|
||||
|
||||
AuthenticationUtil.clearCurrentSecurityContext();
|
||||
}
|
||||
|
||||
public void testConcurrentSetWithoutRetry()
|
||||
{
|
||||
Set<String> extendedReaders = new HashSet<>(2);
|
||||
Set<String> extendedWriters = new HashSet<>(2);
|
||||
|
||||
Set<NodeRef> documents = setupConcurrentTestCase(10, extendedReaders, extendedWriters);
|
||||
|
||||
// For each record created previously, spawn a thread to set extended security so we cause concurrency
|
||||
// failure trying to create IPR groups with the same name.
|
||||
// Since there is no retry, we expect to get a ConcurrencyFailureException
|
||||
Assert.assertThrows(ConcurrencyFailureException.class, () -> {
|
||||
fireParallelExecutionOfSetExtendedSecurity(documents, extendedReaders, extendedWriters, false);
|
||||
});
|
||||
|
||||
// Look for duplicated IPR groups and verify all documents have the same groups assigned
|
||||
// Since there was a ConcurrencyFailureException some threads failed to set extended security so some
|
||||
// documents may not have IPR groups created.
|
||||
verifyCreatedGroups(documents, true);
|
||||
|
||||
AuthenticationUtil.clearCurrentSecurityContext();
|
||||
}
|
||||
|
||||
private Set<NodeRef> setupConcurrentTestCase(int concurrentThreads, Set<String> extendedReaders, Set<String> extendedWriters)
|
||||
{
|
||||
final String usera = createTestUser();
|
||||
final String userb = createTestUser();
|
||||
final String owner = createTestUser();
|
||||
|
||||
extendedReaders.add(usera);
|
||||
extendedReaders.add(userb);
|
||||
extendedWriters.add(usera);
|
||||
extendedWriters.add(userb);
|
||||
|
||||
AuthenticationUtil.setAdminUserAsFullyAuthenticatedUser();
|
||||
|
||||
// Create a site
|
||||
NodeRef documentLib = createSite(new HashSet<>(), new HashSet<>());
|
||||
|
||||
// Create records in the site document library
|
||||
return createRecords(concurrentThreads, documentLib, owner);
|
||||
}
|
||||
|
||||
private NodeRef createSite(Set<String> readers, Set<String> writers)
|
||||
{
|
||||
return retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<NodeRef>() {
|
||||
@Override
|
||||
public NodeRef execute() throws Throwable
|
||||
{
|
||||
final String siteShortName = GUID.generate();
|
||||
siteService.createSite(null, siteShortName, "test", "test", SiteVisibility.PRIVATE);
|
||||
readers.forEach(reader -> siteService.setMembership(siteShortName, reader, SiteModel.SITE_CONSUMER));
|
||||
writers.forEach(writer -> siteService.setMembership(siteShortName, writer, SiteModel.SITE_COLLABORATOR));
|
||||
return siteService.createContainer(siteShortName, SiteService.DOCUMENT_LIBRARY, null, null);
|
||||
}
|
||||
}, false, true);
|
||||
}
|
||||
|
||||
private Set<NodeRef> createRecords(int numRecords, NodeRef parent, String owner)
|
||||
{
|
||||
return retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Set<NodeRef>>() {
|
||||
@Override
|
||||
public Set<NodeRef> execute() throws Throwable
|
||||
{
|
||||
int createdRecords = 0;
|
||||
Set<NodeRef> documents = new HashSet<>();
|
||||
while (createdRecords < numRecords)
|
||||
{
|
||||
final NodeRef doc = fileFolderService.create(parent, GUID.generate(), ContentModel.TYPE_CONTENT).getNodeRef();
|
||||
ownableService.setOwner(doc, owner);
|
||||
recordService.createRecord(filePlan, doc, rmFolder, true);
|
||||
recordService.file(doc);
|
||||
recordService.complete(doc);
|
||||
documents.add(doc);
|
||||
createdRecords++;
|
||||
}
|
||||
return documents;
|
||||
}
|
||||
}, false, true);
|
||||
}
|
||||
|
||||
private void setExtendedSecurity(NodeRef doc, Set<String> readers, Set<String> writers, boolean useRetry)
|
||||
{
|
||||
if (!useRetry)
|
||||
{
|
||||
setExtendedSecurity(doc, readers, writers);
|
||||
return;
|
||||
}
|
||||
|
||||
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>() {
|
||||
@Override
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
setExtendedSecurity(doc, readers, writers);
|
||||
return null;
|
||||
}
|
||||
}, false, true);
|
||||
}
|
||||
|
||||
private void setExtendedSecurity(NodeRef doc, Set<String> readers, Set<String> writers)
|
||||
{
|
||||
AuthenticationUtil.setAdminUserAsFullyAuthenticatedUser();
|
||||
extendedSecurityService.set(doc, readers, writers);
|
||||
}
|
||||
|
||||
private void fireParallelExecutionOfSetExtendedSecurity(Set<NodeRef> documents, Set<String> extendedReaders, Set<String> extendedWriters, boolean useRetry)
|
||||
{
|
||||
CompletableFuture<?>[] futures = documents.stream()
|
||||
.map(doc -> CompletableFuture.runAsync(() -> setExtendedSecurity(doc, extendedReaders, extendedWriters, useRetry)))
|
||||
.toArray(CompletableFuture[]::new);
|
||||
|
||||
try
|
||||
{
|
||||
CompletableFuture.allOf(futures).join();
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Throwable cause = e.getCause();
|
||||
if (cause instanceof ConcurrencyFailureException)
|
||||
{
|
||||
throw (ConcurrencyFailureException) cause;
|
||||
}
|
||||
throw new RuntimeException("Error during parallel execution", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void verifyCreatedGroups(Set<NodeRef> documents, boolean onlyDuplicatesValidation)
|
||||
{
|
||||
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>() {
|
||||
@Override
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
Set<String> expectedAuthorities = null;
|
||||
Set<Set<String>> errors = new HashSet<>();
|
||||
for (NodeRef doc : documents)
|
||||
{
|
||||
Set<AccessPermission> permissions = permissionService.getAllSetPermissions(doc);
|
||||
Set<String> authorities = getDocumentAuthorities(permissions);
|
||||
Set<String> authoritiesById = getAuthorityIds(authorities);
|
||||
|
||||
verifyIPRGroups(authorities, onlyDuplicatesValidation);
|
||||
|
||||
if (onlyDuplicatesValidation)
|
||||
{
|
||||
// Some documents may not have IPR groups created if there was a ConcurrencyFailureException
|
||||
continue;
|
||||
}
|
||||
|
||||
// All documents should have the same exact set of groups assigned
|
||||
if (expectedAuthorities == null)
|
||||
{
|
||||
expectedAuthorities = authoritiesById;
|
||||
}
|
||||
|
||||
if (!expectedAuthorities.equals(authoritiesById))
|
||||
{
|
||||
errors.add(authoritiesById);
|
||||
}
|
||||
}
|
||||
|
||||
assertTrue("Unexpected authorities linked to document", errors.isEmpty());
|
||||
|
||||
return null;
|
||||
}
|
||||
}, false, true);
|
||||
}
|
||||
|
||||
private Set<String> getDocumentAuthorities(Set<AccessPermission> permissions)
|
||||
{
|
||||
Set<String> authorities = new HashSet<>();
|
||||
|
||||
for (AccessPermission accessPermission : permissions)
|
||||
{
|
||||
String authority = accessPermission.getAuthority();
|
||||
String authName = authorityService.getName(AuthorityType.GROUP, authority);
|
||||
authorities.add(authName);
|
||||
|
||||
}
|
||||
return authorities;
|
||||
}
|
||||
|
||||
private Set<String> getAuthorityIds(Set<String> authorities)
|
||||
{
|
||||
Set<String> authorityIds = new HashSet<>();
|
||||
for (String authority : authorities)
|
||||
{
|
||||
String authId = authorityService.getAuthorityNodeRef(authority) != null
|
||||
? authorityService.getAuthorityNodeRef(authority).getId()
|
||||
: null;
|
||||
authorityIds.add(authId);
|
||||
}
|
||||
return authorityIds;
|
||||
}
|
||||
|
||||
private void verifyIPRGroups(Set<String> authorities, boolean onlyDuplicatesValidation)
|
||||
{
|
||||
boolean hasGroupIPR = false;
|
||||
|
||||
for (String authorityName : authorities)
|
||||
{
|
||||
String shortName = authorityService.getShortName(authorityName);
|
||||
|
||||
if (authorityName.startsWith("GROUP_IPR"))
|
||||
{
|
||||
hasGroupIPR = true;
|
||||
PagingResults<String> results = authorityService.getAuthorities(AuthorityType.GROUP, null, shortName, false,
|
||||
false, new PagingRequest(0, 10));
|
||||
|
||||
assertEquals("No duplicated IPR group expected", 1, results.getPage().size());
|
||||
}
|
||||
}
|
||||
|
||||
if (!onlyDuplicatesValidation)
|
||||
{
|
||||
assertTrue("No IPR Groups created", hasGroupIPR);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -4,7 +4,7 @@
|
||||
|
||||
# Version label
|
||||
version.major=23
|
||||
version.minor=5
|
||||
version.minor=6
|
||||
version.revision=0
|
||||
version.label=
|
||||
|
||||
|
@@ -0,0 +1,100 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Records Management Module
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* -
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
* -
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
* -
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
* -
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.module.org_alfresco_module_rm.content.cleanser;
|
||||
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Spy;
|
||||
|
||||
import org.alfresco.module.org_alfresco_module_rm.test.util.BaseUnitTest;
|
||||
import org.alfresco.service.cmr.repository.ContentIOException;
|
||||
|
||||
/**
|
||||
* Eager content store cleaner unit test.
|
||||
*
|
||||
*/
|
||||
public class ContentCleanserSevenPassUnitTest extends BaseUnitTest
|
||||
{
|
||||
@InjectMocks
|
||||
@Spy
|
||||
private ContentCleanserSevenPass contentCleanserSevenPass = new ContentCleanserSevenPass()
|
||||
{
|
||||
/** dummy implementations */
|
||||
@Override
|
||||
protected void overwrite(File file, OverwriteOperation overwriteOperation)
|
||||
{
|
||||
// Intentionally left empty
|
||||
}
|
||||
};
|
||||
|
||||
@Mock
|
||||
private File mockedFile;
|
||||
|
||||
/**
|
||||
* Given that a file exists When I cleanse it Then the content is overwritten
|
||||
*/
|
||||
@Test
|
||||
public void cleanseFile()
|
||||
{
|
||||
when(mockedFile.exists()).thenReturn(true);
|
||||
when(mockedFile.canWrite()).thenReturn(true);
|
||||
contentCleanserSevenPass.cleanse(mockedFile);
|
||||
verify(contentCleanserSevenPass, times(2)).overwrite(mockedFile, contentCleanserSevenPass.overwriteOnes);
|
||||
verify(contentCleanserSevenPass, times(3)).overwrite(mockedFile, contentCleanserSevenPass.overwriteZeros);
|
||||
verify(contentCleanserSevenPass, times(2)).overwrite(mockedFile, contentCleanserSevenPass.overwriteRandom);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Given that the file does not exist When I cleanse it Then an exception is thrown
|
||||
*/
|
||||
@Test(expected = ContentIOException.class)
|
||||
public void fileDoesNotExist()
|
||||
{
|
||||
when(mockedFile.exists()).thenReturn(false);
|
||||
when(mockedFile.canWrite()).thenReturn(true);
|
||||
contentCleanserSevenPass.cleanse(mockedFile);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given that I can not write to the file When I cleanse it Then an exception is thrown
|
||||
*/
|
||||
@Test(expected = ContentIOException.class)
|
||||
public void cantWriteToFile()
|
||||
{
|
||||
when(mockedFile.exists()).thenReturn(true);
|
||||
when(mockedFile.canWrite()).thenReturn(false);
|
||||
contentCleanserSevenPass.cleanse(mockedFile);
|
||||
}
|
||||
}
|
@@ -52,6 +52,7 @@ import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.model.RenditionModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.capability.RMPermissionModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService;
|
||||
@@ -67,6 +68,7 @@ import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransacti
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.security.AccessPermission;
|
||||
import org.alfresco.service.cmr.security.AccessStatus;
|
||||
import org.alfresco.service.cmr.security.AuthorityService;
|
||||
@@ -522,6 +524,104 @@ public class ExtendedSecurityServiceImplUnitTest
|
||||
verify(mockedPermissionService).setPermission(nodeRef, writeGroup, RMPermissionModel.FILING, true);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a node with no previous IPR groups assigned
|
||||
* And having pre-existing IPR groups matching the ones we need
|
||||
* When I add some read and write authorities but with a different casing
|
||||
* Then the existing IPR groups are used
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test public void addExtendedSecurityWithMixedCasingUsernames()
|
||||
{
|
||||
// Have the usernames in the node as the correct usernames but with incorrect casing
|
||||
String user1 = "UseR";
|
||||
String user2 = "UseR_w";
|
||||
|
||||
// Incorrect IPR Group names
|
||||
Set<String> diffCasingReaders = Stream.of(user1, GROUP).collect(Collectors.toSet());
|
||||
Set<String> diffCasingWriters = Stream.of(user2, GROUP_W).collect(Collectors.toSet());
|
||||
String wrongReadGroupPrefix = extendedSecurityService.getIPRGroupPrefixShortName(READER_GROUP_PREFIX, diffCasingReaders);
|
||||
String wrongWriteGroupPrefix = extendedSecurityService.getIPRGroupPrefixShortName(WRITER_GROUP_PREFIX, diffCasingWriters);
|
||||
String wrongReadGroup = wrongReadGroupPrefix + "0";
|
||||
String wrongWriteGroup = wrongWriteGroupPrefix + "0";
|
||||
|
||||
// Correct Group names
|
||||
String correctReadGroup = readGroupPrefix + "0";
|
||||
String correctWriteGroup = writeGroupPrefix + "0";
|
||||
|
||||
// If queried for the correct groups, return the results
|
||||
PagingResults<String> mockedCorrectReadPResults = mock(PagingResults.class);
|
||||
PagingResults<String> mockedCorrectWritePResults = mock(PagingResults.class);
|
||||
when(mockedCorrectReadPResults.getPage())
|
||||
.thenReturn(Stream.of(GROUP_PREFIX + correctReadGroup).collect(Collectors.toList()));
|
||||
when(mockedAuthorityService.getAuthorities(
|
||||
eq(AuthorityType.GROUP),
|
||||
eq(RMAuthority.ZONE_APP_RM),
|
||||
eq(readGroupPrefix),
|
||||
eq(false),
|
||||
eq(false),
|
||||
any(PagingRequest.class)))
|
||||
.thenReturn(mockedCorrectReadPResults);
|
||||
|
||||
when(mockedCorrectWritePResults.getPage())
|
||||
.thenReturn(Stream.of(GROUP_PREFIX + correctWriteGroup).collect(Collectors.toList()));
|
||||
when(mockedAuthorityService.getAuthorities(
|
||||
eq(AuthorityType.GROUP),
|
||||
eq(RMAuthority.ZONE_APP_RM),
|
||||
eq(writeGroupPrefix),
|
||||
eq(false),
|
||||
eq(false),
|
||||
any(PagingRequest.class)))
|
||||
.thenReturn(mockedCorrectWritePResults);
|
||||
|
||||
// Don't return results for the incorrect groups (lenient as these may not be called with normalization enabled)
|
||||
PagingResults<String> mockedWrongReadPResults = mock(PagingResults.class);
|
||||
PagingResults<String> mockedWrongWritePResults = mock(PagingResults.class);
|
||||
lenient().when(mockedWrongReadPResults.getPage())
|
||||
.thenReturn(Collections.emptyList());
|
||||
lenient().when(mockedAuthorityService.getAuthorities(
|
||||
eq(AuthorityType.GROUP),
|
||||
eq(RMAuthority.ZONE_APP_RM),
|
||||
eq(wrongReadGroupPrefix),
|
||||
eq(false),
|
||||
eq(false),
|
||||
any(PagingRequest.class)))
|
||||
.thenReturn(mockedWrongReadPResults);
|
||||
|
||||
lenient().when(mockedWrongWritePResults.getPage())
|
||||
.thenReturn(Collections.emptyList());
|
||||
lenient().when(mockedAuthorityService.getAuthorities(
|
||||
eq(AuthorityType.GROUP),
|
||||
eq(RMAuthority.ZONE_APP_RM),
|
||||
eq(wrongWriteGroupPrefix),
|
||||
eq(false),
|
||||
eq(false),
|
||||
any(PagingRequest.class)))
|
||||
.thenReturn(mockedWrongWritePResults);
|
||||
|
||||
// The users do exist, despite being in a different casing and are able to be retrieved
|
||||
NodeRef noderefUser1 = new NodeRef(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE, USER);
|
||||
when(mockedAuthorityService.authorityExists(user1)).thenReturn(true);
|
||||
when(mockedAuthorityService.getAuthorityNodeRef(user1)).thenReturn(noderefUser1);
|
||||
when(mockedNodeService.getProperty(noderefUser1, ContentModel.PROP_USERNAME)).thenReturn(USER);
|
||||
|
||||
NodeRef noderefUser2 = new NodeRef(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE, USER_W);
|
||||
when(mockedAuthorityService.authorityExists(user2)).thenReturn(true);
|
||||
when(mockedAuthorityService.getAuthorityNodeRef(user2)).thenReturn(noderefUser2);
|
||||
when(mockedNodeService.getProperty(noderefUser2, ContentModel.PROP_USERNAME)).thenReturn(USER_W);
|
||||
|
||||
// Set the extended security service to normalize usernames
|
||||
extendedSecurityService.setEnableUsernameNormalization(true);
|
||||
extendedSecurityService.set(nodeRef, diffCasingReaders, diffCasingWriters);
|
||||
|
||||
// Verify that the incorrect read group is not created
|
||||
verify(mockedAuthorityService, never()).createAuthority(AuthorityType.GROUP, wrongReadGroup, wrongReadGroup, Collections.singleton(RMAuthority.ZONE_APP_RM));
|
||||
|
||||
// Verify that the incorrect write group is not created
|
||||
verify(mockedAuthorityService, never()).createAuthority(AuthorityType.GROUP, wrongWriteGroup, wrongWriteGroup, Collections.singleton(RMAuthority.ZONE_APP_RM));
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a node with no previous IPR groups assigned
|
||||
@@ -571,7 +671,7 @@ public class ExtendedSecurityServiceImplUnitTest
|
||||
.thenReturn(Stream
|
||||
.of(USER_W, AlfMock.generateText())
|
||||
.collect(Collectors.toSet()));
|
||||
|
||||
|
||||
// add extended security
|
||||
extendedSecurityService.set(nodeRef, READERS, WRITERS);
|
||||
|
||||
@@ -895,7 +995,7 @@ public class ExtendedSecurityServiceImplUnitTest
|
||||
// group names
|
||||
String readGroup = extendedSecurityService.getIPRGroupShortName(READER_GROUP_FULL_PREFIX, READERS, 0);
|
||||
String writeGroup = extendedSecurityService.getIPRGroupShortName(WRITER_GROUP_FULL_PREFIX, WRITERS, 0);
|
||||
|
||||
|
||||
// setup renditions
|
||||
NodeRef renditionNodeRef = AlfMock.generateNodeRef(mockedNodeService);
|
||||
when(mockedNodeService.hasAspect(nodeRef, RecordsManagementModel.ASPECT_RECORD))
|
||||
@@ -904,7 +1004,7 @@ public class ExtendedSecurityServiceImplUnitTest
|
||||
.thenReturn(renditionNodeRef);
|
||||
when(mockedNodeService.getChildAssocs(nodeRef, RenditionModel.ASSOC_RENDITION, RegexQNamePattern.MATCH_ALL))
|
||||
.thenReturn(Collections.singletonList(mockedChildAssociationRef));
|
||||
|
||||
|
||||
// setup permissions
|
||||
Set<AccessPermission> permissions = Stream
|
||||
.of(new AccessPermissionImpl(AlfMock.generateText(), AccessStatus.ALLOWED, readGroup, 0),
|
||||
@@ -913,17 +1013,17 @@ public class ExtendedSecurityServiceImplUnitTest
|
||||
.collect(Collectors.toSet());
|
||||
when(mockedPermissionService.getAllSetPermissions(nodeRef))
|
||||
.thenReturn(permissions);
|
||||
|
||||
|
||||
// remove extended security
|
||||
extendedSecurityService.remove(nodeRef);
|
||||
|
||||
|
||||
// verify that the groups permissions have been removed
|
||||
verify(mockedPermissionService).clearPermission(nodeRef, readGroup);
|
||||
verify(mockedPermissionService).clearPermission(nodeRef, writeGroup);
|
||||
|
||||
|
||||
// verify that the groups permissions have been removed from the rendition
|
||||
verify(mockedPermissionService).clearPermission(renditionNodeRef, readGroup);
|
||||
verify(mockedPermissionService).clearPermission(renditionNodeRef, writeGroup);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
}
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<build>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -8,7 +8,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-amps</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
@@ -51,8 +51,8 @@
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-lang</groupId>
|
||||
<artifactId>commons-lang</artifactId>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
|
@@ -30,7 +30,7 @@ import java.util.regex.Pattern;
|
||||
|
||||
import org.alfresco.service.cmr.site.SiteInfo;
|
||||
import org.alfresco.service.cmr.wiki.WikiPageInfo;
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.apache.commons.lang3.StringEscapeUtils;
|
||||
import org.json.simple.JSONObject;
|
||||
import org.springframework.extensions.webscripts.Cache;
|
||||
import org.springframework.extensions.webscripts.Status;
|
||||
@@ -92,7 +92,7 @@ public class WikiPageGet extends AbstractWikiWebScript
|
||||
{
|
||||
links.add(link);
|
||||
// build the list of available pages
|
||||
WikiPageInfo wikiPage = wikiService.getWikiPage(site.getShortName(), StringEscapeUtils.unescapeHtml(link));
|
||||
WikiPageInfo wikiPage = wikiService.getWikiPage(site.getShortName(), StringEscapeUtils.unescapeHtml4(link));
|
||||
if (wikiPage != null)
|
||||
{
|
||||
pageTitles.add(wikiPage.getTitle());
|
||||
|
@@ -91,6 +91,15 @@ function doclist_getAllNodes(parsedArgs, filterParams, query, totalItemCount)
|
||||
};
|
||||
}
|
||||
|
||||
function sanitizeJunkFavouriteKeys(favourites){
|
||||
for (var key in favourites) {
|
||||
if (!key || key.trim() === "") {
|
||||
delete favourites[key];
|
||||
}
|
||||
}
|
||||
return favourites;
|
||||
}
|
||||
|
||||
/**
|
||||
* Main entry point: Create collection of documents and folders in the given space
|
||||
*
|
||||
@@ -123,6 +132,28 @@ function doclist_main()
|
||||
|
||||
if (logger.isLoggingEnabled())
|
||||
logger.log("doclist.lib.js - NodeRef: " + parsedArgs.nodeRef + " Query: " + query);
|
||||
|
||||
favourites = sanitizeJunkFavouriteKeys(favourites);
|
||||
|
||||
if(Object.keys(favourites).length === 0 && query === null)
|
||||
{
|
||||
return {
|
||||
luceneQuery: "",
|
||||
paging: {
|
||||
totalRecords: 0,
|
||||
startIndex: 0
|
||||
},
|
||||
container: parsedArgs.rootNode,
|
||||
parent: null,
|
||||
onlineEditing: utils.moduleInstalled("org.alfresco.module.vti"),
|
||||
itemCount: {
|
||||
folders: 0,
|
||||
documents: 0
|
||||
},
|
||||
items: [],
|
||||
customJSON: slingshotDocLib.getJSON()
|
||||
};
|
||||
}
|
||||
|
||||
var totalItemCount = filterParams.limitResults ? parseInt(filterParams.limitResults, 10) : -1;
|
||||
// For all sites documentLibrary query we pull in all available results and post filter
|
||||
|
@@ -182,11 +182,14 @@ var Filters =
|
||||
case "favourites":
|
||||
for (var favourite in favourites)
|
||||
{
|
||||
if (filterQuery)
|
||||
if (favourite && favourite.trim() !== "")
|
||||
{
|
||||
filterQuery += " OR ";
|
||||
if (filterQuery)
|
||||
{
|
||||
filterQuery += " OR ";
|
||||
}
|
||||
filterQuery += "ID:\"" + favourite + "\"";
|
||||
}
|
||||
filterQuery += "ID:\"" + favourite + "\"";
|
||||
}
|
||||
|
||||
if (filterQuery.length !== 0)
|
||||
@@ -201,7 +204,13 @@ var Filters =
|
||||
else
|
||||
{
|
||||
// empty favourites query
|
||||
filterQuery = "+ID:\"\"";
|
||||
logger.warn("No favourites found for user: " + person.properties.userName);
|
||||
return {
|
||||
query: null,
|
||||
limitResults: 0,
|
||||
sort: [],
|
||||
language: "lucene"
|
||||
};
|
||||
}
|
||||
|
||||
filterParams.query = filterQuery;
|
||||
|
@@ -80,6 +80,11 @@ function runAction(p_params)
|
||||
{
|
||||
result.fileExist = true;
|
||||
}
|
||||
if (error.indexOf("FolderExistsException") != -1)
|
||||
{
|
||||
result.fileExist = true;
|
||||
result.type = "folder";
|
||||
}
|
||||
}
|
||||
|
||||
results.push(result);
|
||||
|
@@ -45,7 +45,7 @@ import org.alfresco.service.cmr.wiki.WikiPageInfo;
|
||||
import org.alfresco.service.cmr.wiki.WikiService;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.util.PropertyMap;
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.apache.commons.lang3.StringEscapeUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.json.JSONArray;
|
||||
@@ -996,7 +996,7 @@ public class WikiRestApiTest extends BaseWebScriptTest
|
||||
String link = m.group(1);
|
||||
link += "?title=<script>alert('xss');</script>";
|
||||
WikiPageInfo wikiPage2 = this.wikiService.getWikiPage(SITE_SHORT_NAME_WIKI, link);
|
||||
WikiPageInfo wikiPage1 = this.wikiService.getWikiPage(SITE_SHORT_NAME_WIKI, StringEscapeUtils.unescapeHtml(link));
|
||||
WikiPageInfo wikiPage1 = this.wikiService.getWikiPage(SITE_SHORT_NAME_WIKI, StringEscapeUtils.unescapeHtml4(link));
|
||||
assertEquals(wikiPage2, wikiPage1);
|
||||
}
|
||||
|
||||
@@ -1006,4 +1006,4 @@ public class WikiRestApiTest extends BaseWebScriptTest
|
||||
this.wikiService.deleteWikiPage(wikiPageNew);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@@ -9,6 +9,6 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
</project>
|
||||
|
@@ -36,8 +36,7 @@ commons-email http://jakarta.apache.org/commons/
|
||||
commons-fileupload http://jakarta.apache.org/commons/
|
||||
commons-httpclient http://jakarta.apache.org/commons/
|
||||
commons-io http://jakarta.apache.org/commons/
|
||||
commons-jxpath http://jakarta.apache.org/commons/
|
||||
commons-lang http://jakarta.apache.org/commons/
|
||||
commons-jxpath http://jakarta.apache.org/commons/
|
||||
commons-lang3 http://jakarta.apache.org/commons/
|
||||
commons-logging http://jakarta.apache.org/commons/
|
||||
commons-net http://jakarta.apache.org/commons/
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -27,7 +27,7 @@
|
||||
|
||||
## Synopsis
|
||||
|
||||
**TAS**( **T**est **A**utomation **S**ystem)- **CMIS** is the project that handles the automated tests related only to CMIS API integrated with Alfresco One [Alfresco CMIS API](http://docs.alfresco.com/5.1/pra/1/topics/cmis-welcome.html).
|
||||
**TAS**( **T**est **A**utomation **S**ystem)- **CMIS** is the project that handles the automated tests related only to CMIS API integrated with Alfresco One [Alfresco CMIS API](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/Reference/CMIS-API).
|
||||
|
||||
It is based on Apache Maven, compatible with major IDEs and is using also Spring capabilities for dependency injection.
|
||||
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<organization>
|
||||
|
@@ -16,7 +16,7 @@ import org.alfresco.utility.testrail.annotation.TestRail;
|
||||
import org.apache.chemistry.opencmis.commons.exceptions.CmisObjectNotFoundException;
|
||||
import org.apache.chemistry.opencmis.commons.exceptions.CmisPermissionDeniedException;
|
||||
import org.apache.chemistry.opencmis.commons.exceptions.CmisUnauthorizedException;
|
||||
import org.apache.commons.lang.time.DateUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -27,7 +27,7 @@ Back to [TAS Master Documentation](https://git.alfresco.com/tas/alfresco-tas-uti
|
||||
|
||||
## Synopsis
|
||||
|
||||
**TAS**( **T**est **A**utomation **S**ystem)- **RESTAPI** is the project that handles the automated tests related only to [Alfresco REST API](http://docs.alfresco.com/5.1/pra/1/topics/pra-welcome.html).
|
||||
**TAS**( **T**est **A**utomation **S**ystem)- **RESTAPI** is the project that handles the automated tests related only to [Alfresco REST API](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/REST-API-Guide).
|
||||
|
||||
It is based on Apache Maven, compatible with major IDEs and is using also Spring capabilities for dependency injection.
|
||||
|
||||
@@ -271,7 +271,7 @@ restClient.onResponse().assertThat().body("entry.modifiedBy.firstName", org.hamc
|
||||
|
||||
### How to generate models or check coverage
|
||||
|
||||
There are some simple generators that could parse [Swagger YAML](http://docs.alfresco.com/community/concepts/alfresco-sdk-tutorials-using-rest-api-explorer.html) files and provide some usefull information to you like:
|
||||
There are some simple generators that could parse [Swagger YAML](https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Develop/REST-API-Guide/Things-to-Know-Before-You-Start/The-API-Explorer-is-Your-Source-of-Truth) files and provide some usefull information to you like:
|
||||
|
||||
a) Show on screen the actual coverage of TAS vs requests that exists in each YAML file - defined in pom.xml)
|
||||
|
||||
|
@@ -8,7 +8,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
@@ -17,7 +17,7 @@
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<rest.api.explorer.branch>master</rest.api.explorer.branch>
|
||||
<httpclient-osgi-version>4.5.6</httpclient-osgi-version>
|
||||
<commons-lang3.version>3.17.0</commons-lang3.version>
|
||||
<commons-lang3.version>3.18.0</commons-lang3.version>
|
||||
<scribejava-apis.version>8.3.3</scribejava-apis.version>
|
||||
<java.version>17</java.version>
|
||||
</properties>
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2022 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -26,6 +26,13 @@
|
||||
package org.alfresco.rest.rules;
|
||||
|
||||
import static java.util.stream.Collectors.toList;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.springframework.http.HttpStatus.BAD_REQUEST;
|
||||
import static org.springframework.http.HttpStatus.CREATED;
|
||||
import static org.springframework.http.HttpStatus.FORBIDDEN;
|
||||
import static org.springframework.http.HttpStatus.NOT_FOUND;
|
||||
|
||||
import static org.alfresco.rest.actions.access.AccessRestrictionUtil.ERROR_MESSAGE_ACCESS_RESTRICTED;
|
||||
import static org.alfresco.rest.actions.access.AccessRestrictionUtil.MAIL_ACTION;
|
||||
import static org.alfresco.rest.rules.RulesTestsUtils.CHECKIN_ACTION;
|
||||
@@ -45,11 +52,6 @@ import static org.alfresco.utility.model.FileModel.getRandomFileModel;
|
||||
import static org.alfresco.utility.model.FileType.TEXT_PLAIN;
|
||||
import static org.alfresco.utility.model.UserModel.getRandomUserModel;
|
||||
import static org.alfresco.utility.report.log.Step.STEP;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.springframework.http.HttpStatus.BAD_REQUEST;
|
||||
import static org.springframework.http.HttpStatus.CREATED;
|
||||
import static org.springframework.http.HttpStatus.FORBIDDEN;
|
||||
import static org.springframework.http.HttpStatus.NOT_FOUND;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Collections;
|
||||
@@ -61,10 +63,13 @@ import java.util.stream.IntStream;
|
||||
import jakarta.json.Json;
|
||||
import jakarta.json.JsonObject;
|
||||
|
||||
import org.apache.chemistry.opencmis.client.api.CmisObject;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import org.alfresco.rest.model.RestActionBodyExecTemplateModel;
|
||||
import org.alfresco.rest.model.RestActionConstraintModel;
|
||||
import org.alfresco.rest.model.RestCompositeConditionDefinitionModel;
|
||||
import org.alfresco.rest.model.RestPaginationModel;
|
||||
import org.alfresco.rest.model.RestRuleModel;
|
||||
import org.alfresco.rest.model.RestRuleModelsCollection;
|
||||
import org.alfresco.utility.constants.UserRole;
|
||||
@@ -74,9 +79,6 @@ import org.alfresco.utility.model.FolderModel;
|
||||
import org.alfresco.utility.model.SiteModel;
|
||||
import org.alfresco.utility.model.TestGroup;
|
||||
import org.alfresco.utility.model.UserModel;
|
||||
import org.apache.chemistry.opencmis.client.api.CmisObject;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
/**
|
||||
* Tests for POST /nodes/{nodeId}/rule-sets/{ruleSetId}/rules.
|
||||
@@ -101,13 +103,13 @@ public class CreateRulesTests extends RulesRestTest
|
||||
* <p>
|
||||
* Also check that the isShared field is not returned when not requested.
|
||||
*/
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES, TestGroup.SANITY })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES, TestGroup.SANITY})
|
||||
public void createRule()
|
||||
{
|
||||
RestRuleModel ruleModel = rulesUtils.createRuleModelWithModifiedValues();
|
||||
|
||||
RestRuleModel rule = restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(ruleModel);
|
||||
.createSingleRule(ruleModel);
|
||||
|
||||
RestRuleModel expectedRuleModel = rulesUtils.createRuleModelWithModifiedValues();
|
||||
restClient.assertStatusCodeIs(CREATED);
|
||||
@@ -117,7 +119,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check creating a rule in a non-existent folder returns an error. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void createRuleInNonExistentFolder()
|
||||
{
|
||||
STEP("Try to create a rule in non-existent folder.");
|
||||
@@ -134,7 +136,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check creating a rule in a non-existent rule set returns an error. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void createRuleInNonExistentRuleSet()
|
||||
{
|
||||
STEP("Try to create a rule in non-existent rule set.");
|
||||
@@ -148,7 +150,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Try to create a rule without a name and check the error. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void createRuleWithEmptyName()
|
||||
{
|
||||
RestRuleModel ruleModel = rulesUtils.createRuleModel("");
|
||||
@@ -160,7 +162,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check we can create two rules with the same name. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void duplicateRuleNameIsAcceptable()
|
||||
{
|
||||
RestRuleModel ruleModel = rulesUtils.createRuleModel("duplicateRuleName");
|
||||
@@ -175,7 +177,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check that a user without permission to view the folder cannot create a rule in it. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void requireReadPermissionToCreateRule()
|
||||
{
|
||||
STEP("Create a user and use them to create a private site containing a folder");
|
||||
@@ -194,7 +196,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check that a Collaborator cannot create a rule in a folder in a private site. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void siteCollaboratorCannotCreateRule()
|
||||
{
|
||||
testRolePermissionsWith(SiteCollaborator);
|
||||
@@ -204,7 +206,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check that a Contributor cannot create a rule in a private folder. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void siteContributorCannotCreateRule()
|
||||
{
|
||||
testRolePermissionsWith(SiteContributor);
|
||||
@@ -214,7 +216,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check that a Consumer cannot create a rule in a folder in a private site. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void siteConsumerCannotCreateRule()
|
||||
{
|
||||
testRolePermissionsWith(SiteConsumer);
|
||||
@@ -224,7 +226,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check that a siteManager can create a rule in a folder in a private site. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void siteManagerCanCreateRule()
|
||||
{
|
||||
testRolePermissionsWith(SiteManager)
|
||||
@@ -234,7 +236,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check we can't create a rule under a document node. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void tryToCreateRuleUnderDocument()
|
||||
{
|
||||
STEP("Create a document.");
|
||||
@@ -250,7 +252,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check we can create several rules. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void createRules()
|
||||
{
|
||||
STEP("Create a list of rules in one POST request");
|
||||
@@ -258,19 +260,18 @@ public class CreateRulesTests extends RulesRestTest
|
||||
List<RestRuleModel> ruleModels = ruleNames.stream().map(rulesUtils::createRuleModel).collect(toList());
|
||||
|
||||
RestRuleModelsCollection rules = restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createListOfRules(ruleModels);
|
||||
.createListOfRules(ruleModels);
|
||||
|
||||
restClient.assertStatusCodeIs(CREATED);
|
||||
|
||||
assertEquals("Unexpected number of rules received in response.", ruleNames.size(), rules.getEntries().size());
|
||||
IntStream.range(0, ruleModels.size()).forEach(i ->
|
||||
rules.getEntries().get(i).onModel()
|
||||
.assertThat().field("id").isNotNull()
|
||||
.assertThat().field("name").is(ruleNames.get(i)));
|
||||
IntStream.range(0, ruleModels.size()).forEach(i -> rules.getEntries().get(i).onModel()
|
||||
.assertThat().field("id").isNotNull()
|
||||
.assertThat().field("name").is(ruleNames.get(i)));
|
||||
}
|
||||
|
||||
/** Check we can create over 100 rules and get them all back in response. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void createOver100Rules()
|
||||
{
|
||||
STEP("Create a list of 120 rules in one POST request");
|
||||
@@ -287,10 +288,9 @@ public class CreateRulesTests extends RulesRestTest
|
||||
restClient.assertStatusCodeIs(CREATED);
|
||||
|
||||
assertEquals("Unexpected number of rules received in response.", ruleCount, rules.getEntries().size());
|
||||
IntStream.range(0, ruleModels.size()).forEach(i ->
|
||||
rules.getEntries().get(i).onModel()
|
||||
.assertThat().field("id").isNotNull()
|
||||
.assertThat().field("name").is(ruleNamePrefix + (i + 1)));
|
||||
IntStream.range(0, ruleModels.size()).forEach(i -> rules.getEntries().get(i).onModel()
|
||||
.assertThat().field("id").isNotNull()
|
||||
.assertThat().field("name").is(ruleNamePrefix + (i + 1)));
|
||||
|
||||
rules.getPagination()
|
||||
.assertThat().field("count").is(ruleCount)
|
||||
@@ -302,7 +302,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Try to create several rules with an error in one of them. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void createRulesWithOneError()
|
||||
{
|
||||
STEP("Try to create a three rules but the middle one has an error.");
|
||||
@@ -319,55 +319,55 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check we can create a rule without description. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void createRuleWithoutDescription()
|
||||
{
|
||||
RestRuleModel ruleModel = rulesUtils.createRuleModelWithDefaultValues();
|
||||
UserModel admin = dataUser.getAdminUser();
|
||||
|
||||
RestRuleModel rule = restClient.authenticateUser(admin).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(ruleModel);
|
||||
.createSingleRule(ruleModel);
|
||||
|
||||
restClient.assertStatusCodeIs(CREATED);
|
||||
rule.assertThat().field("id").isNotNull()
|
||||
.assertThat().field("name").is(RULE_NAME_DEFAULT)
|
||||
.assertThat().field("description").isNull();
|
||||
.assertThat().field("name").is(RULE_NAME_DEFAULT)
|
||||
.assertThat().field("description").isNull();
|
||||
}
|
||||
|
||||
/** Check we can create a rule without specifying triggers but with the default "inbound" value. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void createRuleWithoutTriggers()
|
||||
{
|
||||
RestRuleModel ruleModel = rulesUtils.createRuleModelWithDefaultValues();
|
||||
UserModel admin = dataUser.getAdminUser();
|
||||
|
||||
RestRuleModel rule = restClient.authenticateUser(admin).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(ruleModel);
|
||||
.createSingleRule(ruleModel);
|
||||
|
||||
restClient.assertStatusCodeIs(CREATED);
|
||||
rule.assertThat().field("id").isNotNull()
|
||||
.assertThat().field("name").is(RULE_NAME_DEFAULT)
|
||||
.assertThat().field("triggers").is(List.of("inbound"));
|
||||
.assertThat().field("name").is(RULE_NAME_DEFAULT)
|
||||
.assertThat().field("triggers").is(List.of("inbound"));
|
||||
}
|
||||
|
||||
/** Check we can create a rule without error script. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void createRuleWithoutErrorScript()
|
||||
{
|
||||
RestRuleModel ruleModel = rulesUtils.createRuleModelWithDefaultValues();
|
||||
UserModel admin = dataUser.getAdminUser();
|
||||
|
||||
RestRuleModel rule = restClient.authenticateUser(admin).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(ruleModel);
|
||||
.createSingleRule(ruleModel);
|
||||
|
||||
restClient.assertStatusCodeIs(CREATED);
|
||||
rule.assertThat().field("id").isNotNull()
|
||||
.assertThat().field("name").is(RULE_NAME_DEFAULT)
|
||||
.assertThat().field("errorScript").isNull();
|
||||
.assertThat().field("name").is(RULE_NAME_DEFAULT)
|
||||
.assertThat().field("errorScript").isNull();
|
||||
}
|
||||
|
||||
/** Check we can create a rule with irrelevant isShared flag, and it doesn't have impact to the process. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void createRuleWithSharedFlag()
|
||||
{
|
||||
RestRuleModel ruleModel = rulesUtils.createRuleModelWithDefaultValues();
|
||||
@@ -375,23 +375,23 @@ public class CreateRulesTests extends RulesRestTest
|
||||
UserModel admin = dataUser.getAdminUser();
|
||||
|
||||
RestRuleModel rule = restClient.authenticateUser(admin).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(ruleModel);
|
||||
.createSingleRule(ruleModel);
|
||||
|
||||
restClient.assertStatusCodeIs(CREATED);
|
||||
rule.assertThat().field("id").isNotNull()
|
||||
.assertThat().field("name").is(RULE_NAME_DEFAULT)
|
||||
.assertThat().field("isShared").isNull();
|
||||
.assertThat().field("name").is(RULE_NAME_DEFAULT)
|
||||
.assertThat().field("isShared").isNull();
|
||||
}
|
||||
|
||||
/** Check we can create a rule. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES, TestGroup.SANITY })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES, TestGroup.SANITY})
|
||||
public void createRuleAndIncludeFieldsInResponse()
|
||||
{
|
||||
RestRuleModel ruleModel = rulesUtils.createRuleModel("ruleName");
|
||||
|
||||
RestRuleModel rule = restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.include("isShared")
|
||||
.createSingleRule(ruleModel);
|
||||
.include("isShared")
|
||||
.createSingleRule(ruleModel);
|
||||
|
||||
restClient.assertStatusCodeIs(CREATED);
|
||||
rule.assertThat().field("isShared").isNotNull();
|
||||
@@ -412,7 +412,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check that the folder's owner can create rules, even if it is in a private site they aren't a member of. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void checkOwnerCanCreateRule()
|
||||
{
|
||||
STEP("Use admin to create a private site.");
|
||||
@@ -431,7 +431,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check that an administrator can create a rule in a private site even if they aren't a member. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void checkAdminCanCreateRule()
|
||||
{
|
||||
STEP("Use a user to create a private site with a folder.");
|
||||
@@ -446,7 +446,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check that a coordinator can create rules in folders outside sites. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void checkCoordinatorCanCreateRule()
|
||||
{
|
||||
STEP("Create a folder in the user's file space.");
|
||||
@@ -454,11 +454,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
|
||||
STEP("Create another user as a coordinator for this folder.");
|
||||
UserModel coordinator = dataUser.createRandomTestUser("Rules");
|
||||
/*
|
||||
Update folder node properties to add a coordinator
|
||||
{ "permissions": { "isInheritanceEnabled": true, "locallySet": { "authorityId": "coordinator.getUsername()",
|
||||
"name": "Coordinator", "accessStatus":"ALLOWED" } } }
|
||||
*/
|
||||
/* Update folder node properties to add a coordinator { "permissions": { "isInheritanceEnabled": true, "locallySet": { "authorityId": "coordinator.getUsername()", "name": "Coordinator", "accessStatus":"ALLOWED" } } } */
|
||||
String putBody = getAddPermissionsBody(coordinator.getUsername(), "Coordinator");
|
||||
restClient.authenticateUser(user).withCoreAPI().usingNode(folder).updateNode(putBody);
|
||||
|
||||
@@ -470,7 +466,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check that an editor cannot create rules in folders outside sites. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void checkEditorCannotCreateRule()
|
||||
{
|
||||
STEP("Create a folder in the user's file space.");
|
||||
@@ -478,11 +474,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
|
||||
STEP("Create another user as a editor for this folder.");
|
||||
UserModel editor = dataUser.createRandomTestUser();
|
||||
/*
|
||||
Update folder node properties to add an editor
|
||||
{ "permissions": { "isInheritanceEnabled": true, "locallySet": { "authorityId": "editor.getUsername()",
|
||||
"name": "Coordinator", "accessStatus":"ALLOWED" } } }
|
||||
*/
|
||||
/* Update folder node properties to add an editor { "permissions": { "isInheritanceEnabled": true, "locallySet": { "authorityId": "editor.getUsername()", "name": "Coordinator", "accessStatus":"ALLOWED" } } } */
|
||||
String putBody = getAddPermissionsBody(editor.getUsername(), "Editor");
|
||||
restClient.authenticateUser(user).withCoreAPI().usingNode(folder).updateNode(putBody);
|
||||
|
||||
@@ -494,7 +486,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
}
|
||||
|
||||
/** Check that a collaborator cannot create rules in folders outside sites. */
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void checkCollaboratorCannotCreateRule()
|
||||
{
|
||||
STEP("Create a folder in the user's file space.");
|
||||
@@ -502,11 +494,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
|
||||
STEP("Create another user as a collaborator for this folder.");
|
||||
UserModel collaborator = dataUser.createRandomTestUser();
|
||||
/*
|
||||
Update folder node properties to add a collaborator
|
||||
{ "permissions": { "isInheritanceEnabled": true, "locallySet": { "authorityId": "collaborator.getUsername()",
|
||||
"name": "Coordinator", "accessStatus":"ALLOWED" } } }
|
||||
*/
|
||||
/* Update folder node properties to add a collaborator { "permissions": { "isInheritanceEnabled": true, "locallySet": { "authorityId": "collaborator.getUsername()", "name": "Coordinator", "accessStatus":"ALLOWED" } } } */
|
||||
String putBody = getAddPermissionsBody(collaborator.getUsername(), "Collaborator");
|
||||
restClient.authenticateUser(user).withCoreAPI().usingNode(folder).updateNode(putBody);
|
||||
|
||||
@@ -572,10 +560,10 @@ public class CreateRulesTests extends RulesRestTest
|
||||
public void createRuleWithActions_userCannotUsePrivateAction()
|
||||
{
|
||||
restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(rulesUtils.createRuleWithPrivateAction());
|
||||
.createSingleRule(rulesUtils.createRuleWithPrivateAction());
|
||||
|
||||
restClient.assertStatusCodeIs(FORBIDDEN)
|
||||
.assertLastError().containsSummary(ERROR_MESSAGE_ACCESS_RESTRICTED);
|
||||
.assertLastError().containsSummary(ERROR_MESSAGE_ACCESS_RESTRICTED);
|
||||
}
|
||||
|
||||
/** Check that an administrator can create rules that use private actions. */
|
||||
@@ -583,7 +571,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
public void createRuleWithActions_adminCanUsePrivateAction()
|
||||
{
|
||||
restClient.authenticateUser(dataUser.getAdminUser()).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(rulesUtils.createRuleWithPrivateAction());
|
||||
.createSingleRule(rulesUtils.createRuleWithPrivateAction());
|
||||
|
||||
restClient.assertStatusCodeIs(CREATED);
|
||||
}
|
||||
@@ -656,8 +644,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
public void createRuleWithNotApplicableActionShouldFail()
|
||||
{
|
||||
final RestRuleModel ruleModel = rulesUtils.createRuleModelWithDefaultValues();
|
||||
final RestActionBodyExecTemplateModel invalidAction =
|
||||
rulesUtils.createCustomActionModel(RulesTestsUtils.DELETE_RENDITION_ACTION, Map.of("dummy-key", "dummy-value"));
|
||||
final RestActionBodyExecTemplateModel invalidAction = rulesUtils.createCustomActionModel(RulesTestsUtils.DELETE_RENDITION_ACTION, Map.of("dummy-key", "dummy-value"));
|
||||
ruleModel.setActions(List.of(invalidAction));
|
||||
|
||||
restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet().createSingleRule(ruleModel);
|
||||
@@ -673,8 +660,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
public void createRuleWithMissingActionParametersShouldFail()
|
||||
{
|
||||
final RestRuleModel ruleModel = rulesUtils.createRuleModelWithDefaultValues();
|
||||
final RestActionBodyExecTemplateModel invalidAction =
|
||||
rulesUtils.createCustomActionModel(RulesTestsUtils.COPY_ACTION, Collections.emptyMap());
|
||||
final RestActionBodyExecTemplateModel invalidAction = rulesUtils.createCustomActionModel(RulesTestsUtils.COPY_ACTION, Collections.emptyMap());
|
||||
ruleModel.setActions(List.of(invalidAction));
|
||||
|
||||
restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
@@ -736,7 +722,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
public void createRuleWithoutMandatoryActionParametersShouldFail()
|
||||
{
|
||||
final RestRuleModel ruleModel = rulesUtils.createRuleModelWithDefaultValues();
|
||||
final RestActionBodyExecTemplateModel invalidAction = rulesUtils.createCustomActionModel(COPY_ACTION, Map.of("deep-copy",false));
|
||||
final RestActionBodyExecTemplateModel invalidAction = rulesUtils.createCustomActionModel(COPY_ACTION, Map.of("deep-copy", false));
|
||||
ruleModel.setActions(List.of(invalidAction));
|
||||
|
||||
restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
@@ -749,7 +735,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
/**
|
||||
* Check we get error when attempting to create a rule that copies files to a non-existent folder.
|
||||
*/
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void createRuleThatUsesNonExistentNode()
|
||||
{
|
||||
RestRuleModel ruleModel = rulesUtils.createRuleModelWithDefaultValues();
|
||||
@@ -758,16 +744,16 @@ public class CreateRulesTests extends RulesRestTest
|
||||
ruleModel.setActions(List.of(invalidAction));
|
||||
|
||||
restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(ruleModel);
|
||||
.createSingleRule(ruleModel);
|
||||
|
||||
restClient.assertStatusCodeIs(NOT_FOUND);
|
||||
restClient.assertLastError().containsSummary("The entity with id: non-existent-node was not found");
|
||||
restClient.assertLastError().containsSummary("Destination folder having Id: non-existent-node no longer exists. Please update your rule definition.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Check we get error when attempting to create a rule that references a folder that the user does not have read permission for.
|
||||
*/
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void createRuleThatUsesNodeWithoutReadPermission()
|
||||
{
|
||||
SiteModel privateSite = dataSite.usingAdmin().createPrivateRandomSite();
|
||||
@@ -779,7 +765,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
ruleModel.setActions(List.of(invalidAction));
|
||||
|
||||
restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(ruleModel);
|
||||
.createSingleRule(ruleModel);
|
||||
|
||||
restClient.assertStatusCodeIs(NOT_FOUND);
|
||||
restClient.assertLastError().containsSummary("The entity with id: " + privateFolder.getNodeRef() + " was not found");
|
||||
@@ -788,7 +774,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
/**
|
||||
* Check we get error when attempting to create a rule that copies files to a folder that a user only has read permission for.
|
||||
*/
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void createRuleThatWritesToNodeWithoutPermission()
|
||||
{
|
||||
SiteModel privateSite = dataSite.usingAdmin().createPrivateRandomSite();
|
||||
@@ -802,7 +788,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
ruleModel.setActions(List.of(invalidAction));
|
||||
|
||||
restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(ruleModel);
|
||||
.createSingleRule(ruleModel);
|
||||
|
||||
restClient.assertStatusCodeIs(FORBIDDEN);
|
||||
restClient.assertLastError().containsSummary("No proper permissions for node: " + privateFolder.getNodeRef());
|
||||
@@ -828,7 +814,6 @@ public class CreateRulesTests extends RulesRestTest
|
||||
restClient.assertLastError().containsSummary("Node is not a folder " + fileModel.getNodeRef());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Check we get error when attempting to create a rule with mail action defined with non-existing mail template.
|
||||
*/
|
||||
@@ -850,7 +835,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
ruleModel.setActions(List.of(mailAction));
|
||||
|
||||
restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(ruleModel);
|
||||
.createSingleRule(ruleModel);
|
||||
|
||||
restClient.assertStatusCodeIs(BAD_REQUEST);
|
||||
restClient.assertLastError().containsSummary("Action parameter: template has invalid value (" + mailTemplate +
|
||||
@@ -860,7 +845,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
/**
|
||||
* Check the user can create a rule with a script.
|
||||
*/
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void checkCanUseScriptInRule()
|
||||
{
|
||||
RestRuleModel ruleModel = rulesUtils.createRuleModelWithDefaultValues();
|
||||
@@ -869,7 +854,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
ruleModel.setActions(List.of(scriptAction));
|
||||
|
||||
restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(ruleModel);
|
||||
.createSingleRule(ruleModel);
|
||||
|
||||
restClient.assertStatusCodeIs(CREATED);
|
||||
}
|
||||
@@ -877,7 +862,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
/**
|
||||
* Check the script has to be stored in the scripts directory in the data dictionary.
|
||||
*/
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void checkCantUseNodeOutsideScriptsDirectory()
|
||||
{
|
||||
STEP("Copy script to location outside data dictionary.");
|
||||
@@ -898,16 +883,16 @@ public class CreateRulesTests extends RulesRestTest
|
||||
ruleModel.setActions(List.of(scriptAction));
|
||||
|
||||
restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(ruleModel);
|
||||
.createSingleRule(ruleModel);
|
||||
|
||||
restClient.assertStatusCodeIs(BAD_REQUEST)
|
||||
.assertLastError().containsSummary("script-ref has invalid value");
|
||||
.assertLastError().containsSummary("script-ref has invalid value");
|
||||
}
|
||||
|
||||
/**
|
||||
* Check a real category needs to be supplied when linking to a category.
|
||||
*/
|
||||
@Test (groups = { TestGroup.REST_API, TestGroup.RULES })
|
||||
@Test(groups = {TestGroup.REST_API, TestGroup.RULES})
|
||||
public void checkLinkToCategoryNeedsRealCategory()
|
||||
{
|
||||
STEP("Attempt to link to a category with a folder node, rather than a category node.");
|
||||
@@ -918,7 +903,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
ruleModel.setActions(List.of(categoryAction));
|
||||
|
||||
restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(ruleModel);
|
||||
.createSingleRule(ruleModel);
|
||||
|
||||
restClient.assertStatusCodeIs(BAD_REQUEST);
|
||||
}
|
||||
@@ -933,7 +918,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
ruleModel.setConditions(rulesUtils.createVariousConditions());
|
||||
|
||||
RestRuleModel rule = restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(ruleModel);
|
||||
.createSingleRule(ruleModel);
|
||||
|
||||
RestRuleModel expectedRuleModel = rulesUtils.createRuleModelWithDefaultValues();
|
||||
expectedRuleModel.setConditions(rulesUtils.createVariousConditions());
|
||||
@@ -952,7 +937,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
ruleModel.setConditions(rulesUtils.createCompositeCondition(null));
|
||||
|
||||
RestRuleModel rule = restClient.authenticateUser(user).withPrivateAPI().usingNode(ruleFolder).usingDefaultRuleSet()
|
||||
.createSingleRule(ruleModel);
|
||||
.createSingleRule(ruleModel);
|
||||
|
||||
RestRuleModel expectedRuleModel = rulesUtils.createRuleModelWithDefaultValues();
|
||||
expectedRuleModel.setTriggers(List.of("inbound"));
|
||||
@@ -969,10 +954,8 @@ public class CreateRulesTests extends RulesRestTest
|
||||
STEP("Try to create a rule with non existing category in conditions.");
|
||||
String fakeCategoryId = "bdba5f9f-fake-id22-803b-349bcfd06fd1";
|
||||
RestCompositeConditionDefinitionModel conditions = rulesUtils.createCompositeCondition(List.of(
|
||||
rulesUtils.createCompositeCondition(!INVERTED, List.of(
|
||||
rulesUtils.createSimpleCondition("category", "equals", fakeCategoryId)
|
||||
))
|
||||
));
|
||||
rulesUtils.createCompositeCondition(!INVERTED, List.of(
|
||||
rulesUtils.createSimpleCondition("category", "equals", fakeCategoryId)))));
|
||||
RestRuleModel ruleModel = rulesUtils.createRuleModelWithDefaultValues();
|
||||
ruleModel.setConditions(conditions);
|
||||
|
||||
@@ -992,9 +975,7 @@ public class CreateRulesTests extends RulesRestTest
|
||||
final String comparator = "greaterthan";
|
||||
RestCompositeConditionDefinitionModel conditions = rulesUtils.createCompositeCondition(List.of(
|
||||
rulesUtils.createCompositeCondition(!INVERTED, List.of(
|
||||
rulesUtils.createSimpleCondition("size", comparator, "500")
|
||||
))
|
||||
));
|
||||
rulesUtils.createSimpleCondition("size", comparator, "500")))));
|
||||
RestRuleModel ruleModel = rulesUtils.createRuleModelWithDefaultValues();
|
||||
ruleModel.setConditions(conditions);
|
||||
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -4,7 +4,7 @@
|
||||
<!-- Resource defaultTransactionIsolation="-1" defaultAutoCommit="false" maxActive="100" initialSize="10" password="alfresco" username="alfresco" url="jdbc:mysql:///alfresco" driverClassName="org.gjt.mm.mysql.Driver" type="javax.sql.DataSource" auth="Container" name="jdbc/dataSource"/-->
|
||||
<Environment override="false" type="java.lang.Boolean" name="properties/startup.enable" description="A flag that globally enables or disables startup of the major Alfresco subsystems." value="true"/>
|
||||
<Environment override="false" type="java.lang.String" name="properties/dir.root" description="The filesystem directory below which content and index data is stored. Should be on a shared disk if this is a clustered installation."/>
|
||||
<Environment override="false" type="java.lang.String" name="properties/hibernate.dialect" description="The fully qualified name of a org.hibernate.dialect.Dialect subclass that allows Hibernate to generate SQL optimized for a particular relational database. Choose from org.hibernate.dialect.DerbyDialect, org.hibernate.dialect.MySQLInnoDBDialect, org.alfresco.repo.domain.hibernate.dialect.AlfrescoOracle9Dialect, org.alfresco.repo.domain.hibernate.dialect.AlfrescoSybaseAnywhereDialect, org.alfresco.repo.domain.hibernate.dialect.AlfrescoSQLServerDialect, org.hibernate.dialect.PostgreSQLDialect"/>
|
||||
<Environment override="false" type="java.lang.String" name="properties/hibernate.dialect" description="The fully qualified name of a org.hibernate.dialect.Dialect subclass that allows Hibernate to generate SQL optimized for a particular relational database. Choose from org.hibernate.dialect.DerbyDialect, org.hibernate.dialect.MySQLInnoDBDialect, org.alfresco.repo.domain.hibernate.dialect.AlfrescoOracle9Dialect, org.alfresco.repo.domain.hibernate.dialect.AlfrescoSybaseAnywhereDialect, org.alfresco.repo.domain.hibernate.dialect.SQLServerDialect, org.hibernate.dialect.PostgreSQLDialect"/>
|
||||
<Environment override="false" type="java.lang.String" name="properties/hibernate.query.substitutions" description="Mapping from tokens in Hibernate queries to SQL tokens. For PostgreSQL, set this to "true TRUE, false FALSE"."/>
|
||||
<Environment override="false" type="java.lang.Boolean" name="properties/hibernate.jdbc.use_get_generated_keys" description="Enable use of JDBC3 PreparedStatement.getGeneratedKeys() to retrieve natively generated keys after insert. Requires JDBC3+ driver. Set to false if your driver has problems with the Hibernate identifier generators. By default, tries to determine the driver capabilities using connection metadata."/>
|
||||
<Environment override="false" type="java.lang.String" name="properties/hibernate.default_schema" description="Qualify unqualified table names with the given schema/tablespace in generated SQL. It may be necessary to set this when the target database has more than one schema."/>
|
||||
|
@@ -500,7 +500,7 @@
|
||||
org.hibernate.dialect.MySQLInnoDBDialect,
|
||||
org.alfresco.repo.domain.hibernate.dialect.AlfrescoOracle9Dialect,
|
||||
org.alfresco.repo.domain.hibernate.dialect.AlfrescoSybaseAnywhereDialect,
|
||||
org.alfresco.repo.domain.hibernate.dialect.AlfrescoSQLServerDialect, org.hibernate.dialect.PostgreSQLDialect</description>
|
||||
org.alfresco.repo.domain.hibernate.dialect.SQLServerDialect, org.hibernate.dialect.PostgreSQLDialect</description>
|
||||
<env-entry-name>properties/hibernate.dialect</env-entry-name>
|
||||
<env-entry-type>java.lang.String</env-entry-type>
|
||||
<env-entry-value/> <!-- Empty value included for JBoss compatibility -->
|
||||
|
@@ -74,7 +74,7 @@ ModuleDetails shareServicesModule = moduleService.getModule("alfresco-share-serv
|
||||
<div class="index-list">
|
||||
<h4><%=descriptorService.getServerDescriptor().getEdition()%></h4>
|
||||
<p></p>
|
||||
<p><a href="http://docs.alfresco.com/">Online Documentation</a></p>
|
||||
<p><a href="https://support.hyland.com/p/alfresco">Online Documentation</a></p>
|
||||
<p></p>
|
||||
<%
|
||||
if (shareServicesModule != null && ModuleInstallState.INSTALLED.equals(shareServicesModule.getInstallState()))
|
||||
|
36
pom.xml
36
pom.xml
@@ -2,7 +2,7 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>Alfresco Community Repo Parent</name>
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
<properties>
|
||||
<acs.version.major>23</acs.version.major>
|
||||
<acs.version.minor>5</acs.version.minor>
|
||||
<acs.version.minor>6</acs.version.minor>
|
||||
<acs.version.revision>0</acs.version.revision>
|
||||
<acs.version.label />
|
||||
<amp.min.version>${acs.version.major}.0.0</amp.min.version>
|
||||
@@ -57,13 +57,13 @@
|
||||
<dependency.acs-event-model.version>0.0.33</dependency.acs-event-model.version>
|
||||
|
||||
<dependency.aspectj.version>1.9.22.1</dependency.aspectj.version>
|
||||
<dependency.spring.version>6.1.14</dependency.spring.version>
|
||||
<dependency.spring-security.version>6.3.4</dependency.spring-security.version>
|
||||
<dependency.spring.version>6.2.8</dependency.spring.version>
|
||||
<dependency.spring-security.version>6.3.9</dependency.spring-security.version>
|
||||
<dependency.antlr.version>3.5.3</dependency.antlr.version>
|
||||
<dependency.jackson.version>2.17.2</dependency.jackson.version>
|
||||
<dependency.cxf.version>4.0.5</dependency.cxf.version>
|
||||
<dependency.cxf.version>4.1.2</dependency.cxf.version>
|
||||
<dependency.opencmis.version>1.0.0-jakarta-1</dependency.opencmis.version>
|
||||
<dependency.webscripts.version>9.4</dependency.webscripts.version>
|
||||
<dependency.webscripts.version>10.2</dependency.webscripts.version>
|
||||
<dependency.bouncycastle.version>1.78.1</dependency.bouncycastle.version>
|
||||
<dependency.mockito-core.version>5.14.1</dependency.mockito-core.version>
|
||||
<dependency.assertj.version>3.26.3</dependency.assertj.version>
|
||||
@@ -83,10 +83,10 @@
|
||||
<dependency.groovy.version>3.0.22</dependency.groovy.version>
|
||||
<dependency.tika.version>2.9.2</dependency.tika.version>
|
||||
<dependency.truezip.version>7.7.10</dependency.truezip.version>
|
||||
<dependency.poi.version>5.3.0</dependency.poi.version>
|
||||
<dependency.poi.version>5.4.1</dependency.poi.version>
|
||||
<dependency.jboss.logging.version>3.5.0.Final</dependency.jboss.logging.version>
|
||||
<dependency.camel.version>4.6.0</dependency.camel.version> <!-- when bumping this version, please keep track/sync with included netty.io dependencies -->
|
||||
<dependency.netty.version>4.1.113.Final</dependency.netty.version> <!-- must be in sync with camels transitive dependencies, e.g.: netty-common -->
|
||||
<dependency.camel.version>4.10.2</dependency.camel.version> <!-- when bumping this version, please keep track/sync with included netty.io dependencies -->
|
||||
<dependency.netty.version>4.1.118.Final</dependency.netty.version> <!-- must be in sync with camels transitive dependencies, e.g.: netty-common -->
|
||||
<dependency.activemq.version>5.18.3</dependency.activemq.version>
|
||||
<dependency.apache-compress.version>1.27.1</dependency.apache-compress.version>
|
||||
<dependency.awaitility.version>4.2.2</dependency.awaitility.version>
|
||||
@@ -111,9 +111,9 @@
|
||||
<dependency.jakarta-ee-json-api.version>2.1.3</dependency.jakarta-ee-json-api.version>
|
||||
<dependency.jakarta-ee-json-impl.version>1.1.7</dependency.jakarta-ee-json-impl.version>
|
||||
<dependency.jakarta-json-path.version>2.9.0</dependency.jakarta-json-path.version>
|
||||
<dependency.json-smart.version>2.5.1</dependency.json-smart.version>
|
||||
<dependency.json-smart.version>2.5.2</dependency.json-smart.version>
|
||||
<alfresco.googledrive.version>4.1.0</alfresco.googledrive.version>
|
||||
<alfresco.aos-module.version>3.2.0</alfresco.aos-module.version>
|
||||
<alfresco.aos-module.version>3.3.0</alfresco.aos-module.version>
|
||||
<alfresco.api-explorer.version>23.4.0</alfresco.api-explorer.version> <!-- Also in alfresco-enterprise-share -->
|
||||
|
||||
<alfresco.maven-plugin.version>2.2.0</alfresco.maven-plugin.version>
|
||||
@@ -154,7 +154,7 @@
|
||||
<connection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</connection>
|
||||
<developerConnection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</developerConnection>
|
||||
<url>https://github.com/Alfresco/alfresco-community-repo</url>
|
||||
<tag>23.5.0.6</tag>
|
||||
<tag>23.6.0.18</tag>
|
||||
</scm>
|
||||
|
||||
<distributionManagement>
|
||||
@@ -409,7 +409,7 @@
|
||||
<dependency>
|
||||
<groupId>commons-beanutils</groupId>
|
||||
<artifactId>commons-beanutils</artifactId>
|
||||
<version>1.9.4</version>
|
||||
<version>1.11.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
@@ -417,9 +417,9 @@
|
||||
<version>1.17.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-lang</groupId>
|
||||
<artifactId>commons-lang</artifactId>
|
||||
<version>2.6</version>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>3.18.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
@@ -439,8 +439,8 @@
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-fileupload2-jakarta</artifactId>
|
||||
<version>2.0.0-M1</version>
|
||||
<artifactId>commons-fileupload2-jakarta-servlet6</artifactId>
|
||||
<version>2.0.0-M4</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-net</groupId>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2023 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -34,7 +34,7 @@ import org.alfresco.repo.management.subsystems.ActivateableBean;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationComponent;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationException;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.external.AdminConsoleAuthenticator;
|
||||
import org.alfresco.repo.security.authentication.external.ExternalUserAuthenticator;
|
||||
import org.alfresco.repo.security.authentication.external.RemoteUserMapper;
|
||||
import org.alfresco.repo.web.auth.AuthenticationListener;
|
||||
import org.alfresco.repo.web.auth.TicketCredentials;
|
||||
@@ -55,7 +55,7 @@ import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Authenticator to provide Remote User based Header authentication dropping back to Basic Auth otherwise.
|
||||
* Authenticator to provide Remote User based Header authentication dropping back to Basic Auth otherwise.
|
||||
* Statelessly authenticating via a secure header now does not require a Session so can be used with
|
||||
* request-level load balancers which was not previously possible.
|
||||
* <p>
|
||||
@@ -73,9 +73,11 @@ public class RemoteUserAuthenticatorFactory extends BasicHttpAuthenticatorFactor
|
||||
|
||||
protected RemoteUserMapper remoteUserMapper;
|
||||
protected AuthenticationComponent authenticationComponent;
|
||||
protected AdminConsoleAuthenticator adminConsoleAuthenticator;
|
||||
protected ExternalUserAuthenticator adminConsoleAuthenticator;
|
||||
protected ExternalUserAuthenticator webScriptsHomeAuthenticator;
|
||||
|
||||
private boolean alwaysAllowBasicAuthForAdminConsole = true;
|
||||
private boolean alwaysAllowBasicAuthForWebScriptsHome = true;
|
||||
List<String> adminConsoleScriptFamilies;
|
||||
long getRemoteUserTimeoutMilliseconds = GET_REMOTE_USER_TIMEOUT_MILLISECONDS_DEFAULT;
|
||||
|
||||
@@ -84,7 +86,7 @@ public class RemoteUserAuthenticatorFactory extends BasicHttpAuthenticatorFactor
|
||||
{
|
||||
this.remoteUserMapper = remoteUserMapper;
|
||||
}
|
||||
|
||||
|
||||
public void setAuthenticationComponent(AuthenticationComponent authenticationComponent)
|
||||
{
|
||||
this.authenticationComponent = authenticationComponent;
|
||||
@@ -100,6 +102,16 @@ public class RemoteUserAuthenticatorFactory extends BasicHttpAuthenticatorFactor
|
||||
this.alwaysAllowBasicAuthForAdminConsole = alwaysAllowBasicAuthForAdminConsole;
|
||||
}
|
||||
|
||||
public boolean isAlwaysAllowBasicAuthForWebScriptsHome()
|
||||
{
|
||||
return alwaysAllowBasicAuthForWebScriptsHome;
|
||||
}
|
||||
|
||||
public void setAlwaysAllowBasicAuthForWebScriptsHome(boolean alwaysAllowBasicAuthForWebScriptsHome)
|
||||
{
|
||||
this.alwaysAllowBasicAuthForWebScriptsHome = alwaysAllowBasicAuthForWebScriptsHome;
|
||||
}
|
||||
|
||||
public List<String> getAdminConsoleScriptFamilies()
|
||||
{
|
||||
return adminConsoleScriptFamilies;
|
||||
@@ -121,11 +133,17 @@ public class RemoteUserAuthenticatorFactory extends BasicHttpAuthenticatorFactor
|
||||
}
|
||||
|
||||
public void setAdminConsoleAuthenticator(
|
||||
AdminConsoleAuthenticator adminConsoleAuthenticator)
|
||||
ExternalUserAuthenticator adminConsoleAuthenticator)
|
||||
{
|
||||
this.adminConsoleAuthenticator = adminConsoleAuthenticator;
|
||||
}
|
||||
|
||||
public void setWebScriptsHomeAuthenticator(
|
||||
ExternalUserAuthenticator webScriptsHomeAuthenticator)
|
||||
{
|
||||
this.webScriptsHomeAuthenticator = webScriptsHomeAuthenticator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Authenticator create(WebScriptServletRequest req, WebScriptServletResponse res)
|
||||
{
|
||||
@@ -139,11 +157,13 @@ public class RemoteUserAuthenticatorFactory extends BasicHttpAuthenticatorFactor
|
||||
*/
|
||||
public class RemoteUserAuthenticator extends BasicHttpAuthenticator
|
||||
{
|
||||
private static final String WEB_SCRIPTS_BASE_PATH = "org/springframework/extensions/webscripts";
|
||||
|
||||
public RemoteUserAuthenticator(WebScriptServletRequest req, WebScriptServletResponse res, AuthenticationListener listener)
|
||||
{
|
||||
super(req, res, listener);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean authenticate(RequiredAuthentication required, boolean isGuest)
|
||||
{
|
||||
@@ -159,24 +179,47 @@ public class RemoteUserAuthenticatorFactory extends BasicHttpAuthenticatorFactor
|
||||
{
|
||||
|
||||
if (servletReq.getServiceMatch() != null &&
|
||||
isAdminConsoleWebScript(servletReq.getServiceMatch().getWebScript()) && isAdminConsoleAuthenticatorActive())
|
||||
isAdminConsole(servletReq.getServiceMatch().getWebScript()) && isAdminConsoleAuthenticatorActive())
|
||||
{
|
||||
userId = getAdminConsoleUser();
|
||||
}
|
||||
else if (servletReq.getServiceMatch() != null &&
|
||||
isWebScriptsHome(servletReq.getServiceMatch().getWebScript()) && isWebScriptsHomeAuthenticatorActive())
|
||||
{
|
||||
userId = getWebScriptsHomeUser();
|
||||
}
|
||||
|
||||
if (userId == null)
|
||||
{
|
||||
if (isAlwaysAllowBasicAuthForAdminConsole())
|
||||
{
|
||||
final boolean useTimeoutForAdminAccessingAdminConsole = shouldUseTimeoutForAdminAccessingAdminConsole(required, isGuest);
|
||||
boolean shouldUseTimeout = shouldUseTimeoutForAdminAccessingAdminConsole(required, isGuest);
|
||||
|
||||
if (useTimeoutForAdminAccessingAdminConsole && isBasicAuthHeaderPresentForAdmin())
|
||||
if (shouldUseTimeout && isBasicAuthHeaderPresentForAdmin())
|
||||
{
|
||||
return callBasicAuthForAdminConsoleAccess(required, isGuest);
|
||||
return callBasicAuthForAdminConsoleOrWebScriptsHomeAccess(required, isGuest);
|
||||
}
|
||||
try
|
||||
{
|
||||
userId = getRemoteUserWithTimeout(useTimeoutForAdminAccessingAdminConsole);
|
||||
userId = getRemoteUserWithTimeout(shouldUseTimeout);
|
||||
}
|
||||
catch (AuthenticationTimeoutException e)
|
||||
{
|
||||
// return basic auth challenge
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if (isAlwaysAllowBasicAuthForWebScriptsHome())
|
||||
{
|
||||
boolean shouldUseTimeout = shouldUseTimeoutForAdminAccessingWebScriptsHome(required, isGuest);
|
||||
|
||||
if (shouldUseTimeout && isBasicAuthHeaderPresentForAdmin())
|
||||
{
|
||||
return callBasicAuthForAdminConsoleOrWebScriptsHomeAccess(required, isGuest);
|
||||
}
|
||||
try
|
||||
{
|
||||
userId = getRemoteUserWithTimeout(shouldUseTimeout);
|
||||
}
|
||||
catch (AuthenticationTimeoutException e)
|
||||
{
|
||||
@@ -255,38 +298,63 @@ public class RemoteUserAuthenticatorFactory extends BasicHttpAuthenticatorFactor
|
||||
authenticated = super.authenticate(required, isGuest);
|
||||
}
|
||||
}
|
||||
if(!authenticated && servletReq.getServiceMatch() != null &&
|
||||
isAdminConsoleWebScript(servletReq.getServiceMatch().getWebScript()) && isAdminConsoleAuthenticatorActive())
|
||||
if (!authenticated && servletReq.getServiceMatch() != null)
|
||||
{
|
||||
adminConsoleAuthenticator.requestAuthentication(this.servletReq.getHttpServletRequest(), this.servletRes.getHttpServletResponse());
|
||||
WebScript webScript = servletReq.getServiceMatch().getWebScript();
|
||||
|
||||
if (isAdminConsole(webScript) && isAdminConsoleAuthenticatorActive())
|
||||
{
|
||||
adminConsoleAuthenticator.requestAuthentication(
|
||||
this.servletReq.getHttpServletRequest(),
|
||||
this.servletRes.getHttpServletResponse());
|
||||
}
|
||||
else if (isWebScriptsHome(webScript)
|
||||
&& isWebScriptsHomeAuthenticatorActive())
|
||||
{
|
||||
webScriptsHomeAuthenticator.requestAuthentication(
|
||||
this.servletReq.getHttpServletRequest(),
|
||||
this.servletRes.getHttpServletResponse());
|
||||
}
|
||||
}
|
||||
return authenticated;
|
||||
}
|
||||
|
||||
private boolean callBasicAuthForAdminConsoleAccess(RequiredAuthentication required, boolean isGuest)
|
||||
private boolean callBasicAuthForAdminConsoleOrWebScriptsHomeAccess(RequiredAuthentication required, boolean isGuest)
|
||||
{
|
||||
// return REST call, after a timeout/basic auth challenge
|
||||
if (LOGGER.isTraceEnabled())
|
||||
{
|
||||
LOGGER.trace("An Admin Console request has come in with Basic Auth headers present for an admin user.");
|
||||
LOGGER.trace("An Admin Console or WebScripts Home request has come in with Basic Auth headers present for an admin user.");
|
||||
}
|
||||
// In order to prompt for another password, in case it was not entered correctly,
|
||||
// the output of this method should be returned by the calling "authenticate" method;
|
||||
// This would also mean, that once the admin basic auth header is present,
|
||||
// the authentication chain will not be used for the admin console access
|
||||
// the authentication chain will not be used for access
|
||||
return super.authenticate(required, isGuest);
|
||||
}
|
||||
|
||||
private boolean shouldUseTimeoutForAdminAccessingAdminConsole(RequiredAuthentication required, boolean isGuest)
|
||||
{
|
||||
boolean useTimeoutForAdminAccessingAdminConsole = RequiredAuthentication.admin.equals(required) && !isGuest &&
|
||||
servletReq.getServiceMatch() != null && isAdminConsoleWebScript(servletReq.getServiceMatch().getWebScript());
|
||||
boolean adminConsoleTimeout = RequiredAuthentication.admin.equals(required) && !isGuest &&
|
||||
servletReq.getServiceMatch() != null && isAdminConsole(servletReq.getServiceMatch().getWebScript());
|
||||
|
||||
if (LOGGER.isTraceEnabled())
|
||||
{
|
||||
LOGGER.trace("Should ensure that the admins can login with basic auth: " + useTimeoutForAdminAccessingAdminConsole);
|
||||
LOGGER.trace("Should ensure that the admins can login with basic auth: " + adminConsoleTimeout);
|
||||
}
|
||||
return useTimeoutForAdminAccessingAdminConsole;
|
||||
return adminConsoleTimeout;
|
||||
}
|
||||
|
||||
private boolean shouldUseTimeoutForAdminAccessingWebScriptsHome(RequiredAuthentication required, boolean isGuest)
|
||||
{
|
||||
boolean adminWebScriptsHomeTimeout = RequiredAuthentication.admin.equals(required) && !isGuest &&
|
||||
servletReq.getServiceMatch() != null && isWebScriptsHome(servletReq.getServiceMatch().getWebScript());
|
||||
|
||||
if (LOGGER.isTraceEnabled())
|
||||
{
|
||||
LOGGER.trace("Should ensure that the admins can login with basic auth: " + adminWebScriptsHomeTimeout);
|
||||
}
|
||||
return adminWebScriptsHomeTimeout;
|
||||
}
|
||||
|
||||
private boolean isRemoteUserMapperActive()
|
||||
@@ -299,7 +367,12 @@ public class RemoteUserAuthenticatorFactory extends BasicHttpAuthenticatorFactor
|
||||
return adminConsoleAuthenticator != null && (!(adminConsoleAuthenticator instanceof ActivateableBean) || ((ActivateableBean) adminConsoleAuthenticator).isActive());
|
||||
}
|
||||
|
||||
protected boolean isAdminConsoleWebScript(WebScript webScript)
|
||||
private boolean isWebScriptsHomeAuthenticatorActive()
|
||||
{
|
||||
return webScriptsHomeAuthenticator != null && (!(webScriptsHomeAuthenticator instanceof ActivateableBean) || ((ActivateableBean) webScriptsHomeAuthenticator).isActive());
|
||||
}
|
||||
|
||||
protected boolean isAdminConsole(WebScript webScript)
|
||||
{
|
||||
if (webScript == null || adminConsoleScriptFamilies == null || webScript.getDescription() == null
|
||||
|| webScript.getDescription().getFamilys() == null)
|
||||
@@ -313,7 +386,7 @@ public class RemoteUserAuthenticatorFactory extends BasicHttpAuthenticatorFactor
|
||||
}
|
||||
|
||||
// intersect the "family" sets defined
|
||||
Set<String> families = new HashSet<String>(webScript.getDescription().getFamilys());
|
||||
Set<String> families = new HashSet<>(webScript.getDescription().getFamilys());
|
||||
families.retainAll(adminConsoleScriptFamilies);
|
||||
final boolean isAdminConsole = !families.isEmpty();
|
||||
|
||||
@@ -325,6 +398,23 @@ public class RemoteUserAuthenticatorFactory extends BasicHttpAuthenticatorFactor
|
||||
return isAdminConsole;
|
||||
}
|
||||
|
||||
protected boolean isWebScriptsHome(WebScript webScript)
|
||||
{
|
||||
if (webScript == null || webScript.toString() == null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean isWebScriptsHome = webScript.toString().startsWith(WEB_SCRIPTS_BASE_PATH);
|
||||
|
||||
if (LOGGER.isTraceEnabled() && isWebScriptsHome)
|
||||
{
|
||||
LOGGER.trace("Detected a WebScripts Home webscript: " + webScript);
|
||||
}
|
||||
|
||||
return isWebScriptsHome;
|
||||
}
|
||||
|
||||
protected String getRemoteUserWithTimeout(boolean useTimeout) throws AuthenticationTimeoutException
|
||||
{
|
||||
if (!useTimeout)
|
||||
@@ -394,7 +484,7 @@ public class RemoteUserAuthenticatorFactory extends BasicHttpAuthenticatorFactor
|
||||
protected String getRemoteUser()
|
||||
{
|
||||
String userId = null;
|
||||
|
||||
|
||||
// If the remote user mapper is configured, we may be able to map in an externally authenticated user
|
||||
if (isRemoteUserMapperActive())
|
||||
{
|
||||
@@ -423,7 +513,21 @@ public class RemoteUserAuthenticatorFactory extends BasicHttpAuthenticatorFactor
|
||||
|
||||
if (isRemoteUserMapperActive())
|
||||
{
|
||||
userId = adminConsoleAuthenticator.getAdminConsoleUser(this.servletReq.getHttpServletRequest(), this.servletRes.getHttpServletResponse());
|
||||
userId = adminConsoleAuthenticator.getUserId(this.servletReq.getHttpServletRequest(), this.servletRes.getHttpServletResponse());
|
||||
}
|
||||
|
||||
logRemoteUserID(userId);
|
||||
|
||||
return userId;
|
||||
}
|
||||
|
||||
protected String getWebScriptsHomeUser()
|
||||
{
|
||||
String userId = null;
|
||||
|
||||
if (isRemoteUserMapperActive())
|
||||
{
|
||||
userId = webScriptsHomeAuthenticator.getUserId(this.servletReq.getHttpServletRequest(), this.servletRes.getHttpServletResponse());
|
||||
}
|
||||
|
||||
logRemoteUserID(userId);
|
||||
|
@@ -28,11 +28,9 @@ package org.alfresco.repo.web.scripts.transfer;
|
||||
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
|
||||
import org.alfresco.service.cmr.transfer.TransferException;
|
||||
import org.alfresco.service.cmr.transfer.TransferReceiver;
|
||||
import org.apache.commons.fileupload2.core.FileItemInput;
|
||||
import org.apache.commons.fileupload2.core.FileItemInputIterator;
|
||||
import org.apache.commons.fileupload2.jakarta.JakartaServletFileUpload;
|
||||
import org.apache.commons.fileupload2.jakarta.servlet6.JakartaServletFileUpload;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.extensions.webscripts.Status;
|
||||
@@ -41,6 +39,9 @@ import org.springframework.extensions.webscripts.WebScriptResponse;
|
||||
import org.springframework.extensions.webscripts.WrappingWebScriptRequest;
|
||||
import org.springframework.extensions.webscripts.servlet.WebScriptServletRequest;
|
||||
|
||||
import org.alfresco.service.cmr.transfer.TransferException;
|
||||
import org.alfresco.service.cmr.transfer.TransferReceiver;
|
||||
|
||||
/**
|
||||
* This command processor is used to receive one or more content files for a given transfer.
|
||||
*
|
||||
@@ -50,9 +51,9 @@ import org.springframework.extensions.webscripts.servlet.WebScriptServletRequest
|
||||
public class PostContentCommandProcessor implements CommandProcessor
|
||||
{
|
||||
private TransferReceiver receiver;
|
||||
|
||||
|
||||
private static final String MSG_CAUGHT_UNEXPECTED_EXCEPTION = "transfer_service.receiver.caught_unexpected_exception";
|
||||
|
||||
|
||||
private static Log logger = LogFactory.getLog(PostContentCommandProcessor.class);
|
||||
|
||||
/**
|
||||
@@ -64,12 +65,9 @@ public class PostContentCommandProcessor implements CommandProcessor
|
||||
this.receiver = receiver;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/* (non-Javadoc)
|
||||
*
|
||||
* @see org.alfresco.repo.web.scripts.transfer.CommandProcessor#process(org.alfresco.web.scripts.WebScriptRequest,
|
||||
* org.alfresco.web.scripts.WebScriptResponse)
|
||||
*/
|
||||
* @see org.alfresco.repo.web.scripts.transfer.CommandProcessor#process(org.alfresco.web.scripts.WebScriptRequest, org.alfresco.web.scripts.WebScriptResponse) */
|
||||
public int process(WebScriptRequest req, WebScriptResponse resp)
|
||||
{
|
||||
logger.debug("post content start");
|
||||
@@ -91,8 +89,7 @@ public class PostContentCommandProcessor implements CommandProcessor
|
||||
{
|
||||
current = null;
|
||||
}
|
||||
}
|
||||
while (current != null);
|
||||
} while (current != null);
|
||||
if (webScriptServletRequest == null)
|
||||
{
|
||||
resp.setStatus(Status.STATUS_BAD_REQUEST);
|
||||
@@ -101,7 +98,7 @@ public class PostContentCommandProcessor implements CommandProcessor
|
||||
|
||||
HttpServletRequest servletRequest = webScriptServletRequest.getHttpServletRequest();
|
||||
|
||||
//Read the transfer id from the request
|
||||
// Read the transfer id from the request
|
||||
String transferId = servletRequest.getParameter("transferId");
|
||||
|
||||
if ((transferId == null) || !JakartaServletFileUpload.isMultipartContent(servletRequest))
|
||||
@@ -124,34 +121,34 @@ public class PostContentCommandProcessor implements CommandProcessor
|
||||
logger.debug("got content Mime Part : " + name);
|
||||
receiver.saveContent(transferId, item.getName(), item.getInputStream());
|
||||
}
|
||||
}
|
||||
|
||||
// WebScriptServletRequest alfRequest = (WebScriptServletRequest)req;
|
||||
// String[] names = alfRequest.getParameterNames();
|
||||
// for(String name : names)
|
||||
// {
|
||||
// FormField item = alfRequest.getFileField(name);
|
||||
//
|
||||
// if(item != null)
|
||||
// {
|
||||
// logger.debug("got content Mime Part : " + name);
|
||||
// receiver.saveContent(transferId, item.getName(), item.getInputStream());
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// //TODO - should this be an exception?
|
||||
// logger.debug("Unable to get content for Mime Part : " + name);
|
||||
// }
|
||||
// }
|
||||
|
||||
}
|
||||
|
||||
// WebScriptServletRequest alfRequest = (WebScriptServletRequest)req;
|
||||
// String[] names = alfRequest.getParameterNames();
|
||||
// for(String name : names)
|
||||
// {
|
||||
// FormField item = alfRequest.getFileField(name);
|
||||
//
|
||||
// if(item != null)
|
||||
// {
|
||||
// logger.debug("got content Mime Part : " + name);
|
||||
// receiver.saveContent(transferId, item.getName(), item.getInputStream());
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// //TODO - should this be an exception?
|
||||
// logger.debug("Unable to get content for Mime Part : " + name);
|
||||
// }
|
||||
// }
|
||||
|
||||
logger.debug("success");
|
||||
|
||||
|
||||
resp.setStatus(Status.STATUS_OK);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.debug("exception caught", ex);
|
||||
if(transferId != null)
|
||||
if (transferId != null)
|
||||
{
|
||||
logger.debug("ending transfer", ex);
|
||||
receiver.end(transferId);
|
||||
|
@@ -27,15 +27,11 @@
|
||||
package org.alfresco.repo.web.scripts.transfer;
|
||||
|
||||
import java.io.OutputStream;
|
||||
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
|
||||
import org.alfresco.repo.transfer.TransferCommons;
|
||||
import org.alfresco.service.cmr.transfer.TransferException;
|
||||
import org.alfresco.service.cmr.transfer.TransferReceiver;
|
||||
import org.apache.commons.fileupload2.core.FileItemInput;
|
||||
import org.apache.commons.fileupload2.core.FileItemInputIterator;
|
||||
import org.apache.commons.fileupload2.jakarta.JakartaServletFileUpload;
|
||||
import org.apache.commons.fileupload2.jakarta.servlet6.JakartaServletFileUpload;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.extensions.webscripts.Status;
|
||||
@@ -44,6 +40,10 @@ import org.springframework.extensions.webscripts.WebScriptResponse;
|
||||
import org.springframework.extensions.webscripts.WrappingWebScriptRequest;
|
||||
import org.springframework.extensions.webscripts.servlet.WebScriptServletRequest;
|
||||
|
||||
import org.alfresco.repo.transfer.TransferCommons;
|
||||
import org.alfresco.service.cmr.transfer.TransferException;
|
||||
import org.alfresco.service.cmr.transfer.TransferReceiver;
|
||||
|
||||
/**
|
||||
* This command processor is used to receive the snapshot for a given transfer.
|
||||
*
|
||||
@@ -53,17 +53,17 @@ import org.springframework.extensions.webscripts.servlet.WebScriptServletRequest
|
||||
public class PostSnapshotCommandProcessor implements CommandProcessor
|
||||
{
|
||||
private TransferReceiver receiver;
|
||||
|
||||
|
||||
private static Log logger = LogFactory.getLog(PostSnapshotCommandProcessor.class);
|
||||
|
||||
private static final String MSG_CAUGHT_UNEXPECTED_EXCEPTION = "transfer_service.receiver.caught_unexpected_exception";
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.alfresco.repo.web.scripts.transfer.CommandProcessor#process(org.alfresco.web.scripts.WebScriptRequest, org.alfresco.web.scripts.WebScriptResponse)
|
||||
*/
|
||||
*
|
||||
* @see org.alfresco.repo.web.scripts.transfer.CommandProcessor#process(org.alfresco.web.scripts.WebScriptRequest, org.alfresco.web.scripts.WebScriptResponse) */
|
||||
public int process(WebScriptRequest req, WebScriptResponse resp)
|
||||
{
|
||||
|
||||
|
||||
int result = Status.STATUS_OK;
|
||||
// Unwrap to a WebScriptServletRequest if we have one
|
||||
WebScriptServletRequest webScriptServletRequest = null;
|
||||
@@ -83,45 +83,44 @@ public class PostSnapshotCommandProcessor implements CommandProcessor
|
||||
{
|
||||
current = null;
|
||||
}
|
||||
}
|
||||
while (current != null);
|
||||
if (webScriptServletRequest == null)
|
||||
} while (current != null);
|
||||
if (webScriptServletRequest == null)
|
||||
{
|
||||
logger.debug("bad request, not assignable from");
|
||||
resp.setStatus(Status.STATUS_BAD_REQUEST);
|
||||
return Status.STATUS_BAD_REQUEST;
|
||||
}
|
||||
|
||||
//We can't use the WebScriptRequest version of getParameter, since that may cause the content stream
|
||||
//to be parsed. Get hold of the raw HttpServletRequest and work with that.
|
||||
|
||||
// We can't use the WebScriptRequest version of getParameter, since that may cause the content stream
|
||||
// to be parsed. Get hold of the raw HttpServletRequest and work with that.
|
||||
HttpServletRequest servletRequest = webScriptServletRequest.getHttpServletRequest();
|
||||
|
||||
//Read the transfer id from the request
|
||||
|
||||
// Read the transfer id from the request
|
||||
String transferId = servletRequest.getParameter("transferId");
|
||||
|
||||
|
||||
if ((transferId == null) || !JakartaServletFileUpload.isMultipartContent(servletRequest))
|
||||
{
|
||||
logger.debug("bad request, not multipart");
|
||||
resp.setStatus(Status.STATUS_BAD_REQUEST);
|
||||
return Status.STATUS_BAD_REQUEST;
|
||||
}
|
||||
|
||||
try
|
||||
|
||||
try
|
||||
{
|
||||
logger.debug("about to upload manifest file");
|
||||
|
||||
JakartaServletFileUpload upload = new JakartaServletFileUpload();
|
||||
FileItemInputIterator iter = upload.getItemIterator(servletRequest);
|
||||
while (iter.hasNext())
|
||||
while (iter.hasNext())
|
||||
{
|
||||
FileItemInput item = iter.next();
|
||||
if (!item.isFormField() && TransferCommons.PART_NAME_MANIFEST.equals(item.getFieldName()))
|
||||
if (!item.isFormField() && TransferCommons.PART_NAME_MANIFEST.equals(item.getFieldName()))
|
||||
{
|
||||
logger.debug("got manifest file");
|
||||
receiver.saveSnapshot(transferId, item.getInputStream());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
logger.debug("success");
|
||||
resp.setStatus(Status.STATUS_OK);
|
||||
|
||||
@@ -133,10 +132,10 @@ public class PostSnapshotCommandProcessor implements CommandProcessor
|
||||
receiver.generateRequsite(transferId, out);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.debug("exception caught", ex);
|
||||
if(transferId != null)
|
||||
if (transferId != null)
|
||||
{
|
||||
logger.debug("ending transfer", ex);
|
||||
receiver.end(transferId);
|
||||
@@ -151,7 +150,8 @@ public class PostSnapshotCommandProcessor implements CommandProcessor
|
||||
}
|
||||
|
||||
/**
|
||||
* @param receiver the receiver to set
|
||||
* @param receiver
|
||||
* the receiver to set
|
||||
*/
|
||||
public void setReceiver(TransferReceiver receiver)
|
||||
{
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -2,23 +2,23 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
@@ -38,11 +38,19 @@ import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.query.PagingRequest;
|
||||
import org.alfresco.repo.security.permissions.AccessDeniedException;
|
||||
import org.alfresco.repo.site.SiteModel;
|
||||
import org.alfresco.rest.api.Nodes;
|
||||
import org.alfresco.rest.api.People;
|
||||
@@ -77,8 +85,6 @@ import org.alfresco.util.AlfrescoCollator;
|
||||
import org.alfresco.util.ISO9075;
|
||||
import org.alfresco.util.ParameterCheck;
|
||||
import org.alfresco.util.SearchLanguageConversion;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
/**
|
||||
* Queries implementation
|
||||
@@ -88,43 +94,43 @@ import org.springframework.extensions.surf.util.I18NUtil;
|
||||
*/
|
||||
public class QueriesImpl implements Queries, InitializingBean
|
||||
{
|
||||
private final static Map<String,QName> NODE_SORT_PARAMS_TO_QNAMES = sortParamsToQNames(
|
||||
PARAM_NAME, ContentModel.PROP_NAME,
|
||||
PARAM_CREATEDAT, ContentModel.PROP_CREATED,
|
||||
PARAM_MODIFIEDAT, ContentModel.PROP_MODIFIED);
|
||||
private static final Log LOGGER = LogFactory.getLog(QueriesImpl.class);
|
||||
private final static Map<String, QName> NODE_SORT_PARAMS_TO_QNAMES = sortParamsToQNames(
|
||||
PARAM_NAME, ContentModel.PROP_NAME,
|
||||
PARAM_CREATEDAT, ContentModel.PROP_CREATED,
|
||||
PARAM_MODIFIEDAT, ContentModel.PROP_MODIFIED);
|
||||
|
||||
private final static Map<String, QName> PEOPLE_SORT_PARAMS_TO_QNAMES = sortParamsToQNames(
|
||||
PARAM_PERSON_ID, ContentModel.PROP_USERNAME,
|
||||
ContentModel.PROP_FIRSTNAME,
|
||||
ContentModel.PROP_LASTNAME);
|
||||
PARAM_PERSON_ID, ContentModel.PROP_USERNAME,
|
||||
ContentModel.PROP_FIRSTNAME,
|
||||
ContentModel.PROP_LASTNAME);
|
||||
|
||||
private final static Map<String, QName> SITE_SORT_PARAMS_TO_QNAMES = sortParamsToQNames(
|
||||
PARAM_SITE_ID, ContentModel.PROP_NAME,
|
||||
PARAM_SITE_TITLE, ContentModel.PROP_TITLE,
|
||||
PARAM_SITE_ID, ContentModel.PROP_NAME,
|
||||
PARAM_SITE_TITLE, ContentModel.PROP_TITLE,
|
||||
PARAM_SITE_DESCRIPTION, ContentModel.PROP_DESCRIPTION);
|
||||
|
||||
/**
|
||||
* Helper method to build a map of sort parameter names to QNames. This method iterates through
|
||||
* the parameters. If a parameter is a String it is assumed to be a sort parameter name and will
|
||||
* be followed by a QName to which it maps. If however it is a QName the local name of the OName
|
||||
* is used as the sort parameter name.
|
||||
* @param parameters to build up the map.
|
||||
* Helper method to build a map of sort parameter names to QNames. This method iterates through the parameters. If a parameter is a String it is assumed to be a sort parameter name and will be followed by a QName to which it maps. If however it is a QName the local name of the OName is used as the sort parameter name.
|
||||
*
|
||||
* @param parameters
|
||||
* to build up the map.
|
||||
* @return the map
|
||||
*/
|
||||
private static Map<String, QName> sortParamsToQNames(Object... parameters)
|
||||
{
|
||||
Map<String, QName> map = new HashMap<>();
|
||||
for (int i=0; i<parameters.length; i++)
|
||||
for (int i = 0; i < parameters.length; i++)
|
||||
{
|
||||
map.put(
|
||||
parameters[i] instanceof String
|
||||
? (String)parameters[i++]
|
||||
: ((QName)parameters[i]).getLocalName(),
|
||||
(QName)parameters[i]);
|
||||
parameters[i] instanceof String
|
||||
? (String) parameters[i++]
|
||||
: ((QName) parameters[i]).getLocalName(),
|
||||
(QName) parameters[i]);
|
||||
}
|
||||
return Collections.unmodifiableMap(map);
|
||||
}
|
||||
|
||||
|
||||
private ServiceRegistry sr;
|
||||
private NodeService nodeService;
|
||||
private NamespaceService namespaceService;
|
||||
@@ -162,7 +168,7 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
ParameterCheck.mandatory("nodes", this.nodes);
|
||||
ParameterCheck.mandatory("people", this.people);
|
||||
ParameterCheck.mandatory("sites", this.sites);
|
||||
|
||||
|
||||
this.nodeService = sr.getNodeService();
|
||||
this.namespaceService = sr.getNamespaceService();
|
||||
this.dictionaryService = sr.getDictionaryService();
|
||||
@@ -173,10 +179,9 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
public CollectionWithPagingInfo<Node> findNodes(Parameters parameters)
|
||||
{
|
||||
SearchService searchService = sr.getSearchService();
|
||||
return new AbstractQuery<Node>(nodeService, searchService)
|
||||
{
|
||||
return new AbstractQuery<Node>(nodeService, searchService) {
|
||||
private final Map<String, UserInfo> mapUserInfo = new HashMap<>(10);
|
||||
|
||||
|
||||
@Override
|
||||
protected void buildQuery(StringBuilder query, String term, SearchParameters sp, String queryTemplateName)
|
||||
{
|
||||
@@ -198,14 +203,14 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
{
|
||||
query.append(")");
|
||||
}
|
||||
|
||||
|
||||
String nodeTypeStr = parameters.getParameter(PARAM_NODE_TYPE);
|
||||
if (nodeTypeStr != null)
|
||||
{
|
||||
QName filterNodeTypeQName = nodes.createQName(nodeTypeStr);
|
||||
if (dictionaryService.getType(filterNodeTypeQName) == null)
|
||||
{
|
||||
throw new InvalidArgumentException("Unknown filter nodeType: "+nodeTypeStr);
|
||||
throw new InvalidArgumentException("Unknown filter nodeType: " + nodeTypeStr);
|
||||
}
|
||||
|
||||
query.append(" AND (+TYPE:\"").append(nodeTypeStr).append(("\")"));
|
||||
@@ -229,7 +234,7 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
Path path = null;
|
||||
try
|
||||
{
|
||||
path = nodeService.getPath(nodeRef);
|
||||
path = nodeService.getPath(nodeRef);
|
||||
}
|
||||
catch (InvalidNodeRefException inre)
|
||||
{
|
||||
@@ -248,7 +253,7 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
{
|
||||
// first request for this namespace prefix, get and cache result
|
||||
Collection<String> prefixes = namespaceService.getPrefixes(qname.getNamespaceURI());
|
||||
prefix = prefixes.size() != 0 ? prefixes.iterator().next() : "";
|
||||
prefix = !prefixes.isEmpty() ? prefixes.iterator().next() : "";
|
||||
cache.put(qname.getNamespaceURI(), prefix);
|
||||
}
|
||||
buf.append('/').append(prefix).append(':').append(ISO9075.encode(qname.getLocalName()));
|
||||
@@ -262,12 +267,6 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
return buf.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Node> newList(int capacity)
|
||||
{
|
||||
return new ArrayList<Node>(capacity);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node convert(NodeRef nodeRef, List<String> includeParam)
|
||||
{
|
||||
@@ -281,18 +280,17 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
term = SearchLanguageConversion.escapeLuceneQuery(term);
|
||||
return term;
|
||||
}
|
||||
|
||||
|
||||
}.find(parameters, PARAM_TERM, MIN_TERM_LENGTH_NODES, "keywords",
|
||||
IN_QUERY_SORT, NODE_SORT_PARAMS_TO_QNAMES,
|
||||
new SortColumn(PARAM_MODIFIEDAT, false));
|
||||
IN_QUERY_SORT, NODE_SORT_PARAMS_TO_QNAMES,
|
||||
new SortColumn(PARAM_MODIFIEDAT, false));
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public CollectionWithPagingInfo<Person> findPeople(Parameters parameters)
|
||||
{
|
||||
SearchService searchService = sr.getSearchService();
|
||||
return new AbstractQuery<Person>(nodeService, searchService)
|
||||
{
|
||||
return new AbstractQuery<Person>(nodeService, searchService) {
|
||||
@Override
|
||||
protected void buildQuery(StringBuilder query, String term, SearchParameters sp, String queryTemplateName)
|
||||
{
|
||||
@@ -305,33 +303,25 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
query.append("*\")");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Person> newList(int capacity)
|
||||
{
|
||||
return new ArrayList<Person>(capacity);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Person convert(NodeRef nodeRef, List<String> includeParam)
|
||||
{
|
||||
String personId = (String) nodeService.getProperty(nodeRef, ContentModel.PROP_USERNAME);
|
||||
Person person = people.getPerson(personId);
|
||||
return person;
|
||||
return people.getPerson(personId);
|
||||
}
|
||||
|
||||
|
||||
// TODO Do the sort in the query on day. A comment in the code for the V0 API used for live people
|
||||
// search says adding sort values for this query don't work - tried it and they really don't.
|
||||
// search says adding sort values for this query don't work - tried it and they really don't.
|
||||
}.find(parameters, PARAM_TERM, MIN_TERM_LENGTH_PEOPLE, "_PERSON",
|
||||
POST_QUERY_SORT, PEOPLE_SORT_PARAMS_TO_QNAMES,
|
||||
new SortColumn(PARAM_FIRSTNAME, true), new SortColumn(PARAM_LASTNAME, true));
|
||||
POST_QUERY_SORT, PEOPLE_SORT_PARAMS_TO_QNAMES,
|
||||
new SortColumn(PARAM_FIRSTNAME, true), new SortColumn(PARAM_LASTNAME, true));
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionWithPagingInfo<Site> findSites(Parameters parameters)
|
||||
{
|
||||
SearchService searchService = sr.getSearchService();
|
||||
return new AbstractQuery<Site>(nodeService, searchService)
|
||||
{
|
||||
return new AbstractQuery<Site>(nodeService, searchService) {
|
||||
@Override
|
||||
protected void buildQuery(StringBuilder query, String term, SearchParameters sp, String queryTemplateName)
|
||||
{
|
||||
@@ -343,44 +333,34 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
query.append(term);
|
||||
query.append("*\")");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Site> newList(int capacity)
|
||||
{
|
||||
return new ArrayList<>(capacity);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Site convert(NodeRef nodeRef, List<String> includeParam)
|
||||
{
|
||||
return getSite(siteService.getSite(nodeRef), true);
|
||||
return getSite(siteService.getSite(nodeRef));
|
||||
}
|
||||
|
||||
// note: see also Sites.getSite
|
||||
private Site getSite(SiteInfo siteInfo, boolean includeRole)
|
||||
private Site getSite(SiteInfo siteInfo)
|
||||
{
|
||||
// set the site id to the short name (to deal with case sensitivity issues with using the siteId from the url)
|
||||
String siteId = siteInfo.getShortName();
|
||||
String role = null;
|
||||
if(includeRole)
|
||||
{
|
||||
role = sites.getSiteRole(siteId);
|
||||
}
|
||||
String role = sites.getSiteRole(siteId);
|
||||
return new Site(siteInfo, role);
|
||||
}
|
||||
}.find(parameters, PARAM_TERM, MIN_TERM_LENGTH_SITES, "_SITE", POST_QUERY_SORT, SITE_SORT_PARAMS_TO_QNAMES, new SortColumn(PARAM_SITE_TITLE, true));
|
||||
}
|
||||
|
||||
|
||||
public abstract static class AbstractQuery<T>
|
||||
{
|
||||
public enum Sort
|
||||
{
|
||||
IN_QUERY_SORT, POST_QUERY_SORT
|
||||
}
|
||||
|
||||
|
||||
private final NodeService nodeService;
|
||||
private final SearchService searchService;
|
||||
|
||||
|
||||
public AbstractQuery(NodeService nodeService, SearchService searchService)
|
||||
{
|
||||
this.nodeService = nodeService;
|
||||
@@ -388,14 +368,14 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
}
|
||||
|
||||
public CollectionWithPagingInfo<T> find(Parameters parameters,
|
||||
String termName, int minTermLength, String queryTemplateName,
|
||||
Sort sort, Map<String, QName> sortParamsToQNames, SortColumn... defaultSort)
|
||||
String termName, int minTermLength, String queryTemplateName,
|
||||
Sort sort, Map<String, QName> sortParamsToQNames, SortColumn... defaultSort)
|
||||
{
|
||||
SearchParameters sp = new SearchParameters();
|
||||
sp.setLanguage(SearchService.LANGUAGE_FTS_ALFRESCO);
|
||||
sp.addStore(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
|
||||
sp.setDefaultFieldName(queryTemplateName);
|
||||
|
||||
|
||||
String term = getTerm(parameters, termName, minTermLength);
|
||||
|
||||
StringBuilder query = new StringBuilder();
|
||||
@@ -404,7 +384,7 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
|
||||
Paging paging = parameters.getPaging();
|
||||
PagingRequest pagingRequest = Util.getPagingRequest(paging);
|
||||
|
||||
|
||||
List<SortColumn> defaultSortCols = (defaultSort != null ? Arrays.asList(defaultSort) : Collections.emptyList());
|
||||
if (sort == IN_QUERY_SORT)
|
||||
{
|
||||
@@ -413,36 +393,43 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
sp.setSkipCount(pagingRequest.getSkipCount());
|
||||
sp.setMaxItems(pagingRequest.getMaxItems());
|
||||
}
|
||||
|
||||
|
||||
ResultSet queryResults = null;
|
||||
List<T> collection = null;
|
||||
try
|
||||
{
|
||||
queryResults = searchService.query(sp);
|
||||
|
||||
|
||||
List<NodeRef> nodeRefs = queryResults.getNodeRefs();
|
||||
|
||||
if (sort == POST_QUERY_SORT)
|
||||
{
|
||||
nodeRefs = postQuerySort(parameters, sortParamsToQNames, defaultSortCols, nodeRefs);
|
||||
}
|
||||
|
||||
collection = newList(nodeRefs.size());
|
||||
|
||||
Map<NodeRef, T>
|
||||
|
||||
collection = new LinkedHashMap<>(nodeRefs.size());
|
||||
List<String> includeParam = parameters.getInclude();
|
||||
|
||||
for (NodeRef nodeRef : nodeRefs)
|
||||
{
|
||||
T t = convert(nodeRef, includeParam);
|
||||
collection.add(t);
|
||||
try
|
||||
{
|
||||
T t = convert(nodeRef, includeParam);
|
||||
collection.put(nodeRef, t);
|
||||
}
|
||||
catch (AccessDeniedException ade)
|
||||
{
|
||||
LOGGER.debug("Ignoring search result for nodeRef " + nodeRef + " due to access denied exception", ade);
|
||||
}
|
||||
}
|
||||
|
||||
if (sort == POST_QUERY_SORT)
|
||||
{
|
||||
return listPage(collection, paging);
|
||||
List<T> postQuerySortedCollection = postQuerySort(parameters, sortParamsToQNames, defaultSortCols, collection.keySet())
|
||||
.stream()
|
||||
.map(collection::get)
|
||||
.toList();
|
||||
return listPage(postQuerySortedCollection, paging);
|
||||
}
|
||||
else
|
||||
{
|
||||
return CollectionWithPagingInfo.asPaged(paging, collection, queryResults.hasMore(), Long.valueOf(queryResults.getNumberFound()).intValue());
|
||||
return CollectionWithPagingInfo.asPaged(paging, collection.values(), queryResults.hasMore(), Long.valueOf(queryResults.getNumberFound()).intValue());
|
||||
}
|
||||
}
|
||||
finally
|
||||
@@ -455,40 +442,39 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds up the query and is expected to call {@link SearchParameters#setDefaultFieldName(String)}
|
||||
* and {@link SearchParameters#addQueryTemplate(String, String)}
|
||||
* @param query StringBuilder into which the query should be built.
|
||||
* @param term to be searched for
|
||||
* @param sp SearchParameters
|
||||
* Builds up the query and is expected to call {@link SearchParameters#setDefaultFieldName(String)} and {@link SearchParameters#addQueryTemplate(String, String)}
|
||||
*
|
||||
* @param query
|
||||
* StringBuilder into which the query should be built.
|
||||
* @param term
|
||||
* to be searched for
|
||||
* @param sp
|
||||
* SearchParameters
|
||||
* @param queryTemplateName
|
||||
*/
|
||||
protected abstract void buildQuery(StringBuilder query, String term, SearchParameters sp, String queryTemplateName);
|
||||
|
||||
/**
|
||||
* Returns a list of the correct type.
|
||||
* @param capacity of the list
|
||||
* @return a new list.
|
||||
*/
|
||||
protected abstract List<T> newList(int capacity);
|
||||
|
||||
/**
|
||||
* Converts a nodeRef into the an object of the required type.
|
||||
* @param nodeRef to be converted
|
||||
* @param includeParam additional fields to be included
|
||||
*
|
||||
* @param nodeRef
|
||||
* to be converted
|
||||
* @param includeParam
|
||||
* additional fields to be included
|
||||
* @return the object
|
||||
*/
|
||||
protected abstract T convert(NodeRef nodeRef, List<String> includeParam);
|
||||
|
||||
|
||||
protected String getTerm(Parameters parameters, String termName, int minTermLength)
|
||||
{
|
||||
String term = parameters.getParameter(termName);
|
||||
if (term == null)
|
||||
{
|
||||
throw new InvalidArgumentException("Query '"+termName+"' not specified");
|
||||
throw new InvalidArgumentException("Query '" + termName + "' not specified");
|
||||
}
|
||||
|
||||
|
||||
term = escapeTerm(term);
|
||||
|
||||
|
||||
int cnt = 0;
|
||||
for (int i = 0; i < term.length(); i++)
|
||||
{
|
||||
@@ -505,7 +491,7 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
|
||||
if (cnt < minTermLength)
|
||||
{
|
||||
throw new InvalidArgumentException("Query '"+termName+"' is too short. Must have at least "+minTermLength+" alphanumeric chars");
|
||||
throw new InvalidArgumentException("Query '" + termName + "' is too short. Must have at least " + minTermLength + " alphanumeric chars");
|
||||
}
|
||||
|
||||
return term;
|
||||
@@ -524,12 +510,12 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
term = SearchLanguageConversion.escapeLuceneQuery(term);
|
||||
return term;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Adds sort order to the SearchParameters.
|
||||
*/
|
||||
protected void addSortOrder(Parameters parameters, Map<String, QName> sortParamsToQNames,
|
||||
List<SortColumn> defaultSortCols, SearchParameters sp)
|
||||
List<SortColumn> defaultSortCols, SearchParameters sp)
|
||||
{
|
||||
List<SortColumn> sortCols = getSorting(parameters, defaultSortCols);
|
||||
for (SortColumn sortCol : sortCols)
|
||||
@@ -537,16 +523,16 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
QName sortPropQName = sortParamsToQNames.get(sortCol.column);
|
||||
if (sortPropQName == null)
|
||||
{
|
||||
throw new InvalidArgumentException("Invalid sort field: "+sortCol.column);
|
||||
throw new InvalidArgumentException("Invalid sort field: " + sortCol.column);
|
||||
}
|
||||
sp.addSort("@" + sortPropQName, sortCol.asc);
|
||||
sp.addSort("@" + sortPropQName, sortCol.asc);
|
||||
}
|
||||
}
|
||||
|
||||
private List<SortColumn> getSorting(Parameters parameters, List<SortColumn> defaultSortCols)
|
||||
{
|
||||
List<SortColumn> sortCols = parameters.getSorting();
|
||||
if (sortCols == null || sortCols.size() == 0)
|
||||
if (sortCols == null || sortCols.isEmpty())
|
||||
{
|
||||
sortCols = defaultSortCols == null ? Collections.emptyList() : defaultSortCols;
|
||||
}
|
||||
@@ -554,63 +540,66 @@ public class QueriesImpl implements Queries, InitializingBean
|
||||
}
|
||||
|
||||
protected List<NodeRef> postQuerySort(Parameters parameters, Map<String, QName> sortParamsToQNames,
|
||||
List<SortColumn> defaultSortCols, List<NodeRef> nodeRefs)
|
||||
List<SortColumn> defaultSortCols, Set<NodeRef> unsortedNodeRefs)
|
||||
{
|
||||
final List<SortColumn> sortCols = getSorting(parameters, defaultSortCols);
|
||||
int sortColCount = sortCols.size();
|
||||
if (sortColCount > 0)
|
||||
{
|
||||
// make copy of nodeRefs because it can be unmodifiable list.
|
||||
nodeRefs = new ArrayList<NodeRef>(nodeRefs);
|
||||
|
||||
List<QName> sortPropQNames = new ArrayList<>(sortColCount);
|
||||
for (SortColumn sortCol : sortCols)
|
||||
{
|
||||
QName sortPropQName = sortParamsToQNames.get(sortCol.column);
|
||||
if (sortPropQName == null)
|
||||
{
|
||||
throw new InvalidArgumentException("Invalid sort field: "+sortCol.column);
|
||||
}
|
||||
sortPropQNames.add(sortPropQName);
|
||||
}
|
||||
|
||||
final Collator col = AlfrescoCollator.getInstance(I18NUtil.getLocale());
|
||||
Collections.sort(nodeRefs, new Comparator<NodeRef>()
|
||||
{
|
||||
@Override
|
||||
public int compare(NodeRef n1, NodeRef n2)
|
||||
{
|
||||
int result = 0;
|
||||
for (int i=0; i<sortCols.size(); i++)
|
||||
{
|
||||
SortColumn sortCol = sortCols.get(i);
|
||||
QName sortPropQName = sortPropQNames.get(i);
|
||||
|
||||
Serializable p1 = getProperty(n1, sortPropQName);
|
||||
Serializable p2 = getProperty(n2, sortPropQName);
|
||||
|
||||
result = ((p1 instanceof Long) && (p2 instanceof Long)
|
||||
? Long.compare((Long)p1, (Long)p2)
|
||||
if (sortColCount == 0)
|
||||
{
|
||||
return new ArrayList<>(unsortedNodeRefs);
|
||||
}
|
||||
|
||||
// make copy of nodeRefs because it can be unmodifiable list.
|
||||
List<NodeRef> sortedNodeRefs = new ArrayList<>(unsortedNodeRefs);
|
||||
|
||||
List<QName> sortPropQNames = new ArrayList<>(sortColCount);
|
||||
for (SortColumn sortCol : sortCols)
|
||||
{
|
||||
QName sortPropQName = sortParamsToQNames.get(sortCol.column);
|
||||
if (sortPropQName == null)
|
||||
{
|
||||
throw new InvalidArgumentException("Invalid sort field: " + sortCol.column);
|
||||
}
|
||||
sortPropQNames.add(sortPropQName);
|
||||
}
|
||||
|
||||
final Collator col = AlfrescoCollator.getInstance(I18NUtil.getLocale());
|
||||
Collections.sort(sortedNodeRefs, new Comparator<NodeRef>() {
|
||||
@Override
|
||||
public int compare(NodeRef n1, NodeRef n2)
|
||||
{
|
||||
int result = 0;
|
||||
for (int i = 0; i < sortCols.size(); i++)
|
||||
{
|
||||
SortColumn sortCol = sortCols.get(i);
|
||||
QName sortPropQName = sortPropQNames.get(i);
|
||||
|
||||
Serializable p1 = getProperty(n1, sortPropQName);
|
||||
Serializable p2 = getProperty(n2, sortPropQName);
|
||||
|
||||
result = ((p1 instanceof Long) && (p2 instanceof Long)
|
||||
? Long.compare((Long) p1, (Long) p2)
|
||||
: col.compare(p1.toString(), p2.toString()))
|
||||
* (sortCol.asc ? 1 : -1);
|
||||
|
||||
if (result != 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
if (result != 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private Serializable getProperty(NodeRef nodeRef, QName sortPropQName)
|
||||
{
|
||||
Serializable result = nodeService.getProperty(nodeRef, sortPropQName);
|
||||
return result == null ? "" : result;
|
||||
}
|
||||
private Serializable getProperty(NodeRef nodeRef, QName sortPropQName)
|
||||
{
|
||||
Serializable result = nodeService.getProperty(nodeRef, sortPropQName);
|
||||
return result == null ? "" : result;
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
return nodeRefs;
|
||||
});
|
||||
|
||||
return sortedNodeRefs;
|
||||
}
|
||||
|
||||
// note: see also AbstractNodeRelation
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2022 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -39,6 +39,9 @@ import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.collections.MapUtils;
|
||||
import org.apache.logging.log4j.util.Strings;
|
||||
|
||||
import org.alfresco.repo.action.executer.CheckOutActionExecuter;
|
||||
import org.alfresco.repo.action.executer.CopyActionExecuter;
|
||||
import org.alfresco.repo.action.executer.ImageTransformActionExecuter;
|
||||
@@ -58,8 +61,6 @@ import org.alfresco.rest.framework.core.exceptions.PermissionDeniedException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.security.PermissionService;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.apache.commons.collections.MapUtils;
|
||||
import org.apache.logging.log4j.util.Strings;
|
||||
|
||||
/**
|
||||
* This class provides logic for validation of permissions for action parameters which reference node.
|
||||
@@ -67,15 +68,14 @@ import org.apache.logging.log4j.util.Strings;
|
||||
public class ActionNodeParameterValidator implements ActionValidator
|
||||
{
|
||||
/**
|
||||
* This list holds action parameter names which require only READ permission on a referenced node
|
||||
* That means, all other parameters that reference nodes will require WRITE permission
|
||||
* This list holds action parameter names which require only READ permission on a referenced node That means, all other parameters that reference nodes will require WRITE permission
|
||||
*/
|
||||
static final Map<String, List<String>> REQUIRE_READ_PERMISSION_PARAMS =
|
||||
Map.of(LinkCategoryActionExecuter.NAME, List.of(LinkCategoryActionExecuter.PARAM_CATEGORY_VALUE));
|
||||
static final Map<String, List<String>> REQUIRE_READ_PERMISSION_PARAMS = Map.of(LinkCategoryActionExecuter.NAME, List.of(LinkCategoryActionExecuter.PARAM_CATEGORY_VALUE));
|
||||
|
||||
static final String NO_PROPER_PERMISSIONS_FOR_NODE = "No proper permissions for node: ";
|
||||
static final String NOT_A_CATEGORY = "Node is not a category ";
|
||||
static final String NOT_A_FOLDER = "Node is not a folder ";
|
||||
static final String NO_LONGER_EXISTS = "%s having Id: %s no longer exists. Please update your rule definition.";
|
||||
|
||||
private final Actions actions;
|
||||
private final NamespaceService namespaceService;
|
||||
@@ -83,7 +83,7 @@ public class ActionNodeParameterValidator implements ActionValidator
|
||||
private final PermissionService permissionService;
|
||||
|
||||
public ActionNodeParameterValidator(Actions actions, NamespaceService namespaceService, Nodes nodes,
|
||||
PermissionService permissionService)
|
||||
PermissionService permissionService)
|
||||
{
|
||||
this.actions = actions;
|
||||
this.namespaceService = namespaceService;
|
||||
@@ -94,7 +94,8 @@ public class ActionNodeParameterValidator implements ActionValidator
|
||||
/**
|
||||
* Validates action parameters that reference nodes against access permissions for executing user.
|
||||
*
|
||||
* @param action Action to be validated
|
||||
* @param action
|
||||
* Action to be validated
|
||||
*/
|
||||
@Override
|
||||
public void validate(Action action)
|
||||
@@ -124,7 +125,7 @@ public class ActionNodeParameterValidator implements ActionValidator
|
||||
}
|
||||
|
||||
private void validateNodes(final List<ActionDefinition.ParameterDefinition> nodeRefParamDefinitions,
|
||||
final Action action)
|
||||
final Action action)
|
||||
{
|
||||
if (MapUtils.isNotEmpty(action.getParams()))
|
||||
{
|
||||
@@ -132,7 +133,15 @@ public class ActionNodeParameterValidator implements ActionValidator
|
||||
.filter(pd -> action.getParams().containsKey(pd.getName()))
|
||||
.forEach(p -> {
|
||||
final String nodeId = Objects.toString(action.getParams().get(p.getName()), Strings.EMPTY);
|
||||
final NodeRef nodeRef = nodes.validateNode(nodeId);
|
||||
NodeRef nodeRef;
|
||||
try
|
||||
{
|
||||
nodeRef = nodes.validateNode(nodeId);
|
||||
}
|
||||
catch (EntityNotFoundException e)
|
||||
{
|
||||
throw new EntityNotFoundException(String.format(NO_LONGER_EXISTS, p.getDisplayLabel(), nodeId), e);
|
||||
}
|
||||
validatePermission(action.getActionDefinitionId(), p.getName(), nodeRef);
|
||||
validateType(action.getActionDefinitionId(), nodeRef);
|
||||
});
|
||||
@@ -163,7 +172,8 @@ public class ActionNodeParameterValidator implements ActionValidator
|
||||
{
|
||||
throw new InvalidArgumentException(NOT_A_FOLDER + nodeRef.getId());
|
||||
}
|
||||
} else if (!nodes.nodeMatches(nodeRef, Set.of(TYPE_CATEGORY), Collections.emptySet()))
|
||||
}
|
||||
else if (!nodes.nodeMatches(nodeRef, Set.of(TYPE_CATEGORY), Collections.emptySet()))
|
||||
{
|
||||
throw new InvalidArgumentException(NOT_A_CATEGORY + nodeRef.getId());
|
||||
}
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=Admin Console
|
||||
admin-console.help=Help
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=Successfully saved values.
|
||||
|
||||
admin-console.host=Host
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=Konzole pro spr\u00e1vce
|
||||
admin-console.help=N\u00e1pov\u011bda
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=Hodnoty byly \u00fasp\u011b\u0161n\u011b ulo\u017eeny.
|
||||
|
||||
admin-console.host=Hostitel
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=Administrationskonsol
|
||||
admin-console.help=Hj\u00e6lp
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=V\u00e6rdierne blev gemt.
|
||||
|
||||
admin-console.host=V\u00e6rt
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=Administratorkonsole
|
||||
admin-console.help=Hilfe
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=Erfolgreich gespeicherte Werte.
|
||||
|
||||
admin-console.host=Host
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=Consola de administraci\u00f3n
|
||||
admin-console.help=Ayuda
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=Valores guardados correctamente.
|
||||
|
||||
admin-console.host=Host
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=Hallintakonsoli
|
||||
admin-console.help=Ohje
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=Arvot tallennettiin.
|
||||
|
||||
admin-console.host=Is\u00e4nt\u00e4
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=Console d'administration
|
||||
admin-console.help=Aide
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=Les valeurs ont bien \u00e9t\u00e9 enregistr\u00e9es.
|
||||
|
||||
admin-console.host=H\u00f4te
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=Console di amministrazione
|
||||
admin-console.help=Aiuto
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=I valori sono stati salvati.
|
||||
|
||||
admin-console.host=Host
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=\u7ba1\u7406\u30b3\u30f3\u30bd\u30fc\u30eb
|
||||
admin-console.help=\u30d8\u30eb\u30d7
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=\u5024\u3092\u6b63\u5e38\u306b\u4fdd\u5b58\u3057\u307e\u3057\u305f\u3002
|
||||
|
||||
admin-console.host=\u30db\u30b9\u30c8
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=Admin-konsoll
|
||||
admin-console.help=Hjelp
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=Verdier som ble lagret.
|
||||
|
||||
admin-console.host=Vert
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=Beheerconsole
|
||||
admin-console.help=Help
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=Waarden zijn opgeslagen.
|
||||
|
||||
admin-console.host=Host
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=Konsola administracyjna
|
||||
admin-console.help=Pomoc
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=Warto\u015bci zosta\u0142y zapisane pomy\u015blnie.
|
||||
|
||||
admin-console.host=Host
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=Console de administra\u00e7\u00e3o
|
||||
admin-console.help=Ajuda
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=Valores salvos com sucesso.
|
||||
|
||||
admin-console.host=Host
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=\u041a\u043e\u043d\u0441\u043e\u043b\u044c \u0430\u0434\u043c\u0438\u043d\u0438\u0441\u0442\u0440\u0430\u0442\u043e\u0440\u0430
|
||||
admin-console.help=\u0421\u043f\u0440\u0430\u0432\u043a\u0430
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=\u0423\u0441\u043f\u0435\u0448\u043d\u043e \u0441\u043e\u0445\u0440\u0430\u043d\u0435\u043d\u043d\u044b\u0435 \u0437\u043d\u0430\u0447\u0435\u043d\u0438\u044f.
|
||||
|
||||
admin-console.host=\u0425\u043e\u0441\u0442
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=Admin-konsol
|
||||
admin-console.help=Hj\u00e4lp
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=V\u00e4rden sparades.
|
||||
|
||||
admin-console.host=V\u00e4rd
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# I18N messages for the Repository Admin Console
|
||||
admin-console.header=\u7ba1\u7406\u63a7\u5236\u53f0
|
||||
admin-console.help=\u5e2e\u52a9
|
||||
admin-console.help-link=http://docs.alfresco.com/{0}/concepts/ch-administering.html
|
||||
admin-console.help-link=https://support.hyland.com/p/alfresco
|
||||
admin-console.success=\u5df2\u6210\u529f\u4fdd\u5b58\u7684\u503c\u3002
|
||||
|
||||
admin-console.host=\u4e3b\u673a
|
||||
|
@@ -5,4 +5,4 @@
|
||||
<authentication>guest</authentication>
|
||||
<transaction allow="readonly">required</transaction>
|
||||
<lifecycle>internal</lifecycle>
|
||||
</webscript>
|
||||
</webscript>
|
||||
|
@@ -12,9 +12,9 @@
|
||||
<#macro page title readonly=false controller=DEFAULT_CONTROLLER!"/admin" params="" dialog=false>
|
||||
<#assign FORM_ID="admin-jmx-form" />
|
||||
<#if server.edition == "Community">
|
||||
<#assign docsEdition = "community" />
|
||||
<#assign docsEdition = "/Alfresco-Content-Services-Community-Edition/" + server.getVersionMajor() + "." + server.getVersionMinor() + "/Alfresco-Content-Services-Community-Edition" />
|
||||
<#elseif server.edition == "Enterprise" >
|
||||
<#assign docsEdition = server.getVersionMajor() + "." + server.getVersionMinor() />
|
||||
<#assign docsEdition = "/Alfresco-Content-Services/" + server.getVersionMajor() + "." + server.getVersionMinor() + "/Alfresco-Content-Services" />
|
||||
</#if>
|
||||
<#if metadata??>
|
||||
<#assign HOSTNAME>${msg("admin-console.host")}: ${metadata.hostname}</#assign>
|
||||
@@ -551,7 +551,7 @@ Admin.addEventListener(window, 'load', function() {
|
||||
Template for a full page view
|
||||
-->
|
||||
<div class="sticky-wrapper">
|
||||
|
||||
|
||||
<div class="header">
|
||||
<span><a href="${url.serviceContext}${DEFAULT_CONTROLLER!"/admin"}">${msg("admin-console.header")}</a></span><#if metadata??><span class="meta">${HOSTNAME}</span><span class="meta">${HOSTADDR}</span></#if>
|
||||
<div style="float:right"><a href="${msg("admin-console.help-link", docsEdition)}" target="_blank">${msg("admin-console.help")}</a></div>
|
||||
@@ -908,4 +908,4 @@ Admin.addEventListener(window, 'load', function() {
|
||||
<#macro button label description="" onclick="" style="" id="" class="" disabled="false">
|
||||
<input class="<#if class?has_content>${class?html}<#else>inline</#if>" <#if id?has_content>id="${id?html}"</#if> <#if style?has_content>style="${style?html}"</#if> type="button" value="${label?html}" onclick="${onclick?html}" <#if disabled="true">disabled="true"</#if> />
|
||||
<#if description?has_content><span class="description">${description?html}</span></#if>
|
||||
</#macro>
|
||||
</#macro>
|
||||
|
@@ -40,6 +40,7 @@
|
||||
"items":
|
||||
[
|
||||
<#list results as row>
|
||||
<#if row.item.hasPermission("Read")>
|
||||
{
|
||||
"type": "${row.item.typeShort}",
|
||||
"parentType": "${row.item.parentTypeShort!""}",
|
||||
@@ -75,6 +76,7 @@
|
||||
"nodeRef": "${row.item.nodeRef}"<#if row.selectable?exists>,
|
||||
"selectable" : ${row.selectable?string}</#if>
|
||||
}<#if row_has_next>,</#if>
|
||||
</#if>
|
||||
</#list>
|
||||
]
|
||||
}
|
||||
|
@@ -214,9 +214,13 @@
|
||||
<property name="authenticationListener" ref="webScriptAuthenticationListener"/>
|
||||
<property name="remoteUserMapper" ref="RemoteUserMapper" />
|
||||
<property name="adminConsoleAuthenticator" ref="AdminConsoleAuthenticator" />
|
||||
<property name="webScriptsHomeAuthenticator" ref="WebScriptsHomeAuthenticator" />
|
||||
<property name="alwaysAllowBasicAuthForAdminConsole">
|
||||
<value>${authentication.alwaysAllowBasicAuthForAdminConsole.enabled}</value>
|
||||
</property>
|
||||
<property name="alwaysAllowBasicAuthForWebScriptsHome">
|
||||
<value>${authentication.alwaysAllowBasicAuthForWebScriptsHome.enabled}</value>
|
||||
</property>
|
||||
<property name="getRemoteUserTimeoutMilliseconds">
|
||||
<value>${authentication.getRemoteUserTimeoutMilliseconds}</value>
|
||||
</property>
|
||||
|
@@ -25,6 +25,20 @@
|
||||
*/
|
||||
package org.alfresco.rest.api.tests;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.rest.AbstractSingleNetworkSiteTest;
|
||||
import org.alfresco.rest.api.Queries;
|
||||
@@ -36,27 +50,12 @@ import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.search.SearchParameters;
|
||||
import org.alfresco.service.cmr.site.SiteService;
|
||||
import org.alfresco.service.cmr.site.SiteVisibility;
|
||||
import org.alfresco.util.testing.category.LuceneTests;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
/**
|
||||
* V1 REST API tests for pre-defined 'live' search Queries on Sites
|
||||
* V1 REST API tests for pre-defined 'live' search Queries on Sites
|
||||
*
|
||||
* <ul>
|
||||
* <li> {@literal <host>:<port>/alfresco/api/<networkId>/public/alfresco/versions/1/queries/sites} </li>
|
||||
* <li>{@literal <host>:<port>/alfresco/api/<networkId>/public/alfresco/versions/1/queries/sites}</li>
|
||||
* </ul>
|
||||
*
|
||||
* @author janv
|
||||
@@ -64,7 +63,7 @@ import static org.mockito.Mockito.verify;
|
||||
public class QueriesSitesApiTest extends AbstractSingleNetworkSiteTest
|
||||
{
|
||||
private static final String URL_QUERIES_LSS = "queries/sites";
|
||||
|
||||
|
||||
private SiteService siteService;
|
||||
|
||||
@Before
|
||||
@@ -72,7 +71,7 @@ public class QueriesSitesApiTest extends AbstractSingleNetworkSiteTest
|
||||
public void setup() throws Exception
|
||||
{
|
||||
super.setup();
|
||||
siteService = (SiteService)applicationContext.getBean("SiteService");
|
||||
siteService = (SiteService) applicationContext.getBean("SiteService");
|
||||
}
|
||||
|
||||
// Note expectedIds defaults to ids
|
||||
@@ -86,7 +85,7 @@ public class QueriesSitesApiTest extends AbstractSingleNetworkSiteTest
|
||||
}
|
||||
|
||||
dummySearchServiceQueryNodeRefs.clear();
|
||||
for (String id: ids)
|
||||
for (String id : ids)
|
||||
{
|
||||
NodeRef nodeRef = getNodeRef(id);
|
||||
dummySearchServiceQueryNodeRefs.add(nodeRef);
|
||||
@@ -98,7 +97,7 @@ public class QueriesSitesApiTest extends AbstractSingleNetworkSiteTest
|
||||
if (expectedStatus == 200)
|
||||
{
|
||||
String termWithEscapedAsterisks = term.replaceAll("\\*", "\\\\*");
|
||||
String expectedQuery = "TYPE:\"{http://www.alfresco.org/model/site/1.0}site\" AND (\"*"+ termWithEscapedAsterisks +"*\")";
|
||||
String expectedQuery = "TYPE:\"{http://www.alfresco.org/model/site/1.0}site\" AND (\"*" + termWithEscapedAsterisks + "*\")";
|
||||
ArgumentCaptor<SearchParameters> searchParametersCaptor = ArgumentCaptor.forClass(SearchParameters.class);
|
||||
verify(mockSearchService, times(++callCountToMockSearchService)).query(searchParametersCaptor.capture());
|
||||
SearchParameters parameters = searchParametersCaptor.getValue();
|
||||
@@ -109,30 +108,32 @@ public class QueriesSitesApiTest extends AbstractSingleNetworkSiteTest
|
||||
|
||||
if (orderBy != null)
|
||||
{
|
||||
for (int i=0; i<expectedIds.length; i++)
|
||||
for (int i = 0; i < expectedIds.length; i++)
|
||||
{
|
||||
String id = expectedIds[i];
|
||||
String actualId = sites.get(i).getId();
|
||||
assertEquals("Order "+i+":", id, actualId);
|
||||
assertEquals("Order " + i + ":", id, actualId);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Tests basic api for nodes live search sites - metadata (id, title, description)
|
||||
*
|
||||
* <p>GET:</p>
|
||||
* <p>
|
||||
* GET:
|
||||
* </p>
|
||||
* {@literal <host>:<port>/alfresco/api/<networkId>/public/alfresco/versions/1/queries/sites}
|
||||
*/
|
||||
@Test
|
||||
public void testLiveSearchSites() throws Exception
|
||||
{
|
||||
setRequestContext(user1);
|
||||
|
||||
AuthenticationUtil.setFullyAuthenticatedUser(user1);
|
||||
int sCount = 5;
|
||||
assertTrue(sCount > 4); // as relied on by test below
|
||||
|
||||
|
||||
List<String> siteIds = new ArrayList<>(sCount);
|
||||
|
||||
try
|
||||
@@ -149,14 +150,14 @@ public class QueriesSitesApiTest extends AbstractSingleNetworkSiteTest
|
||||
String siteD = "siteD";
|
||||
|
||||
int charValue = siteI.charAt(0);
|
||||
|
||||
|
||||
// create some some sites with site id: ab00001, abc00002, abcd00003, abcde00004, abcdef00005 (and some specific titles and descriptions)
|
||||
for (int i = 1; i <= sCount; i++)
|
||||
{
|
||||
String num = String.format("%05d", i);
|
||||
|
||||
charValue = charValue+1;
|
||||
siteI = siteI + String.valueOf((char)charValue);
|
||||
charValue = charValue + 1;
|
||||
siteI = siteI + String.valueOf((char) charValue);
|
||||
|
||||
String siteId = siteI + num + RUNID;
|
||||
String siteTitle = siteT + num + siteT;
|
||||
@@ -220,7 +221,7 @@ public class QueriesSitesApiTest extends AbstractSingleNetworkSiteTest
|
||||
{
|
||||
// some cleanup
|
||||
setRequestContext(user1);
|
||||
|
||||
|
||||
for (String siteId : siteIds)
|
||||
{
|
||||
deleteSite(siteId, true, 204);
|
||||
@@ -230,14 +231,20 @@ public class QueriesSitesApiTest extends AbstractSingleNetworkSiteTest
|
||||
|
||||
private NodeRef getNodeRef(String createdSiteId)
|
||||
{
|
||||
AuthenticationUtil.setFullyAuthenticatedUser(user1);
|
||||
// Created sites do not return NodeRefs to the caller so we need to get the NodeRef from the siteService.
|
||||
// Temporarily as admin we will get NodeRefs to handle ACL authorization.
|
||||
String userUnderTest = AuthenticationUtil.getFullyAuthenticatedUser();
|
||||
AuthenticationUtil.setFullyAuthenticatedUser(DEFAULT_ADMIN);
|
||||
|
||||
// The following call to siteService.getSite(createdSiteId).getNodeRef() returns a NodeRef like:
|
||||
// workspace://SpacesStore/9db76769-96de-4de4-bdb4-a127130af362
|
||||
// workspace://SpacesStore/9db76769-96de-4de4-bdb4-a127130af362
|
||||
// We call tenantService.getName(nodeRef) to get a fully qualified NodeRef as Solr returns this.
|
||||
// They look like:
|
||||
// workspace://@org.alfresco.rest.api.tests.queriespeopleapitest@SpacesStore/9db76769-96de-4de4-bdb4-a127130af362
|
||||
// workspace://@org.alfresco.rest.api.tests.queriespeopleapitest@SpacesStore/9db76769-96de-4de4-bdb4-a127130af362
|
||||
NodeRef nodeRef = siteService.getSite(createdSiteId).getNodeRef();
|
||||
nodeRef = tenantService.getName(nodeRef);
|
||||
|
||||
AuthenticationUtil.setFullyAuthenticatedUser(userUnderTest);
|
||||
return nodeRef;
|
||||
}
|
||||
|
||||
@@ -245,7 +252,7 @@ public class QueriesSitesApiTest extends AbstractSingleNetworkSiteTest
|
||||
public void testLiveSearchSites_SortPage() throws Exception
|
||||
{
|
||||
setRequestContext(user1);
|
||||
|
||||
AuthenticationUtil.setFullyAuthenticatedUser(user1);
|
||||
List<String> siteIds = new ArrayList<>(5);
|
||||
|
||||
try
|
||||
@@ -253,14 +260,14 @@ public class QueriesSitesApiTest extends AbstractSingleNetworkSiteTest
|
||||
// As user 1 ...
|
||||
|
||||
Paging paging = getPaging(0, 100);
|
||||
|
||||
|
||||
// create site
|
||||
String s1 = createSite("siABCDEF"+RUNID, "ABCDEF DEF", "sdABCDEF", SiteVisibility.PRIVATE, 201).getId();
|
||||
String s2 = createSite("siABCD"+RUNID, "ABCD DEF", "sdABCD", SiteVisibility.PRIVATE, 201).getId();
|
||||
String s3 = createSite("siABCDE"+RUNID, "ABCDE DEF", "sdABCDE", SiteVisibility.PRIVATE, 201).getId();
|
||||
String s4 = createSite("siAB"+RUNID, "AB DEF", "sdAB", SiteVisibility.PRIVATE, 201).getId();
|
||||
String s5 = createSite("siABC"+RUNID, "ABC DEF", "sdABC", SiteVisibility.PRIVATE, 201).getId();
|
||||
|
||||
String s1 = createSite("siABCDEF" + RUNID, "ABCDEF DEF", "sdABCDEF", SiteVisibility.PRIVATE, 201).getId();
|
||||
String s2 = createSite("siABCD" + RUNID, "ABCD DEF", "sdABCD", SiteVisibility.PRIVATE, 201).getId();
|
||||
String s3 = createSite("siABCDE" + RUNID, "ABCDE DEF", "sdABCDE", SiteVisibility.PRIVATE, 201).getId();
|
||||
String s4 = createSite("siAB" + RUNID, "AB DEF", "sdAB", SiteVisibility.PRIVATE, 201).getId();
|
||||
String s5 = createSite("siABC" + RUNID, "ABC DEF", "sdABC", SiteVisibility.PRIVATE, 201).getId();
|
||||
|
||||
// test sort order
|
||||
{
|
||||
// default sort order - title asc
|
||||
@@ -276,11 +283,11 @@ public class QueriesSitesApiTest extends AbstractSingleNetworkSiteTest
|
||||
// basic paging tests
|
||||
{
|
||||
// sort order - title desc
|
||||
checkApiCall("siAB", "title desc", getPaging(0, 2), 200, new String[] {s1, s3}, s1, s3, s2, s5, s4);
|
||||
checkApiCall("siAB", "title desc", getPaging(2, 2), 200, new String[] {s2, s5}, s1, s3, s2, s5, s4);
|
||||
checkApiCall("siAB", "title desc", getPaging(4, 2), 200, new String[] {s4}, s1, s3, s2, s5, s4);
|
||||
checkApiCall("siAB", "title desc", getPaging(0, 2), 200, new String[]{s1, s3}, s1, s3, s2, s5, s4);
|
||||
checkApiCall("siAB", "title desc", getPaging(2, 2), 200, new String[]{s2, s5}, s1, s3, s2, s5, s4);
|
||||
checkApiCall("siAB", "title desc", getPaging(4, 2), 200, new String[]{s4}, s1, s3, s2, s5, s4);
|
||||
}
|
||||
|
||||
|
||||
// -ve tests
|
||||
{
|
||||
// -ve test - invalid sort field
|
||||
@@ -304,7 +311,52 @@ public class QueriesSitesApiTest extends AbstractSingleNetworkSiteTest
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* If the search service do not support ACL filtering, then the Queries API should handle the response to exclude private sites and potential unauthorized error when building response.
|
||||
*/
|
||||
@Test
|
||||
public void testLiveSearchExcludesPrivateSites() throws Exception
|
||||
{
|
||||
String publicSiteId = null;
|
||||
String privateSiteId = null;
|
||||
try
|
||||
{
|
||||
// given
|
||||
setRequestContext(null, DEFAULT_ADMIN, DEFAULT_ADMIN_PWD);
|
||||
createUser("bartender");
|
||||
|
||||
publicSiteId = createSite("samePrefixPublicSite", "samePrefixPublicSite", "Visible to all users", SiteVisibility.PUBLIC, 201).getId();
|
||||
privateSiteId = createSite("samePrefixPrivateSite", "samePrefixPrivateSite", "Hidden from bartender", SiteVisibility.PRIVATE, 201).getId();
|
||||
|
||||
String[] searchResults = {publicSiteId, privateSiteId};
|
||||
String[] expectedSites = {publicSiteId};
|
||||
|
||||
// when
|
||||
setRequestContext(null, "bartender", "password");
|
||||
AuthenticationUtil.setFullyAuthenticatedUser("bartender");
|
||||
|
||||
// then
|
||||
checkApiCall("samePrefix", null, getPaging(0, 100), 200, expectedSites, searchResults);
|
||||
}
|
||||
finally
|
||||
{
|
||||
// cleanup
|
||||
AuthenticationUtil.setFullyAuthenticatedUser(DEFAULT_ADMIN);
|
||||
setRequestContext(null, DEFAULT_ADMIN, DEFAULT_ADMIN_PWD);
|
||||
if (publicSiteId != null)
|
||||
{
|
||||
deleteSite(publicSiteId, true, 204);
|
||||
}
|
||||
if (privateSiteId != null)
|
||||
{
|
||||
deleteSite(privateSiteId, true, 204);
|
||||
}
|
||||
deleteUser("bartender", null);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getScope()
|
||||
{
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Remote API
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2023 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -212,7 +212,7 @@ public class ProcessesImplTest extends TestCase implements RecognizedParamsExtra
|
||||
{
|
||||
// the tests are always run on PostgreSQL only
|
||||
// Dialect dialect = (Dialect) applicationContext.getBean("dialect");
|
||||
// if (dialect instanceof AlfrescoSQLServerDialect)
|
||||
// if (dialect instanceof SQLServerDialect)
|
||||
// {
|
||||
// REPO-1104: we do not run this test on MS SQL server because it will fail
|
||||
// until the Activiti defect related to REPO-1104 will be fixed
|
||||
|
@@ -27,7 +27,7 @@ to integrate with a number of external Authentication providers including
|
||||
* https://github.com/Alfresco/alfresco-data-model/tree/master/src/main/java/org/alfresco/repo/security/authentication
|
||||
* License: LGPL
|
||||
* Issue Tracker Link: https://issues.alfresco.com/jira/issues/?jql=project%3DREPO
|
||||
* Documentation Link: http://docs.alfresco.com/5.2/concepts/auth-intro.html
|
||||
* Documentation Link: https://support.hyland.com/r/Alfresco/Alfresco-Content-Services-Community-Edition/23.4/Alfresco-Content-Services-Community-Edition/Administer/Manage-Security/Authentication-and-sync
|
||||
* Contribution Model: Alfresco Open Source
|
||||
***
|
||||
|
||||
|
@@ -16,7 +16,7 @@
|
||||
* Source Code Link:m https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/
|
||||
* License: LGPL
|
||||
* Issue Tracker Link: https://issues.alfresco.com/jira/secure/RapidBoard.jspa?projectKey=REPO&useStoredSettings=true&rapidView=379
|
||||
* Documentation Link: http://docs.alfresco.com/5.1/concepts/versioning.html
|
||||
* Documentation Link: https://support.hyland.com/r/Alfresco/Alfresco-Content-Services/23.4/Alfresco-Content-Services/Configure/Repository/About-Versioning
|
||||
* Contribution Model: Alfresco publishes the source code and will review proposed patch requests
|
||||
***
|
||||
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.5.0.6</version>
|
||||
<version>23.6.0.18</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
@@ -85,7 +85,7 @@
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-fileupload2-jakarta</artifactId>
|
||||
<artifactId>commons-fileupload2-jakarta-servlet6</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
@@ -94,7 +94,6 @@
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>3.17.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
@@ -742,10 +741,6 @@
|
||||
<artifactId>reflections</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-lang</groupId>
|
||||
<artifactId>commons-lang</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
@@ -2,93 +2,96 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.action.executer;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.repo.action.ParameterDefinitionImpl;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.action.Action;
|
||||
import org.alfresco.service.cmr.action.ParameterDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
|
||||
/**
|
||||
* Add features action executor implementation.
|
||||
*
|
||||
* @author Roy Wetherall
|
||||
*/
|
||||
public class AddFeaturesActionExecuter extends ActionExecuterAbstractBase
|
||||
{
|
||||
/**
|
||||
* Action constants
|
||||
*/
|
||||
public static final String NAME = "add-features";
|
||||
public static final String PARAM_ASPECT_NAME = "aspect-name";
|
||||
public static final String PARAM_CONSTRAINT = "ac-aspects";
|
||||
|
||||
/**
|
||||
* The node service
|
||||
*/
|
||||
private NodeService nodeService;
|
||||
|
||||
/** Transaction Service, used for retrying operations */
|
||||
private TransactionService transactionService;
|
||||
|
||||
/**
|
||||
* Set the node service
|
||||
*
|
||||
* @param nodeService the node service
|
||||
*/
|
||||
public void setNodeService(NodeService nodeService)
|
||||
{
|
||||
this.nodeService = nodeService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the transaction service
|
||||
*
|
||||
* @param transactionService the transaction service
|
||||
*/
|
||||
public void setTransactionService(TransactionService transactionService)
|
||||
{
|
||||
this.transactionService = transactionService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adhoc properties are allowed for this executor
|
||||
*/
|
||||
@Override
|
||||
protected boolean getAdhocPropertiesAllowed()
|
||||
{
|
||||
return true;
|
||||
package org.alfresco.repo.action.executer;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.repo.action.ParameterDefinitionImpl;
|
||||
import org.alfresco.repo.action.access.ActionAccessRestriction;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.action.Action;
|
||||
import org.alfresco.service.cmr.action.ParameterDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
|
||||
/**
|
||||
* Add features action executor implementation.
|
||||
*
|
||||
* @author Roy Wetherall
|
||||
*/
|
||||
public class AddFeaturesActionExecuter extends ActionExecuterAbstractBase
|
||||
{
|
||||
/**
|
||||
* Action constants
|
||||
*/
|
||||
public static final String NAME = "add-features";
|
||||
public static final String PARAM_ASPECT_NAME = "aspect-name";
|
||||
public static final String PARAM_CONSTRAINT = "ac-aspects";
|
||||
|
||||
/**
|
||||
* The node service
|
||||
*/
|
||||
private NodeService nodeService;
|
||||
|
||||
/** Transaction Service, used for retrying operations */
|
||||
private TransactionService transactionService;
|
||||
|
||||
/**
|
||||
* Set the node service
|
||||
*
|
||||
* @param nodeService
|
||||
* the node service
|
||||
*/
|
||||
public void setNodeService(NodeService nodeService)
|
||||
{
|
||||
this.nodeService = nodeService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the transaction service
|
||||
*
|
||||
* @param transactionService
|
||||
* the transaction service
|
||||
*/
|
||||
public void setTransactionService(TransactionService transactionService)
|
||||
{
|
||||
this.transactionService = transactionService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adhoc properties are allowed for this executor
|
||||
*/
|
||||
@Override
|
||||
protected boolean getAdhocPropertiesAllowed()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -96,55 +99,61 @@ public class AddFeaturesActionExecuter extends ActionExecuterAbstractBase
|
||||
*/
|
||||
public void executeImpl(final Action ruleAction, final NodeRef actionedUponNodeRef)
|
||||
{
|
||||
if (this.nodeService.exists(actionedUponNodeRef))
|
||||
{
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(
|
||||
new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
Map<QName, Serializable> properties = new HashMap<QName, Serializable>();
|
||||
QName aspectQName = null;
|
||||
|
||||
if(! nodeService.exists(actionedUponNodeRef))
|
||||
{
|
||||
// Node has gone away, skip
|
||||
return null;
|
||||
}
|
||||
|
||||
// Build the aspect details
|
||||
Map<String, Serializable> paramValues = ruleAction.getParameterValues();
|
||||
for (Map.Entry<String, Serializable> entry : paramValues.entrySet())
|
||||
{
|
||||
if (entry.getKey().equals(PARAM_ASPECT_NAME) == true)
|
||||
{
|
||||
aspectQName = (QName)entry.getValue();
|
||||
}
|
||||
else
|
||||
{
|
||||
// Must be an adhoc property
|
||||
QName propertyQName = QName.createQName(entry.getKey());
|
||||
Serializable propertyValue = entry.getValue();
|
||||
properties.put(propertyQName, propertyValue);
|
||||
}
|
||||
}
|
||||
|
||||
// Add the aspect
|
||||
nodeService.addAspect(actionedUponNodeRef, aspectQName, properties);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.alfresco.repo.action.ParameterizedItemAbstractBase#addParameterDefinitions(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
protected void addParameterDefinitions(List<ParameterDefinition> paramList)
|
||||
{
|
||||
paramList.add(new ParameterDefinitionImpl(PARAM_ASPECT_NAME, DataTypeDefinition.QNAME, true, getParamDisplayLabel(PARAM_ASPECT_NAME), false, "ac-aspects"));
|
||||
}
|
||||
|
||||
}
|
||||
if (this.nodeService.exists(actionedUponNodeRef))
|
||||
{
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(
|
||||
new RetryingTransactionCallback<Void>() {
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
Map<QName, Serializable> properties = new HashMap<QName, Serializable>();
|
||||
QName aspectQName = null;
|
||||
|
||||
if (!nodeService.exists(actionedUponNodeRef))
|
||||
{
|
||||
// Node has gone away, skip
|
||||
return null;
|
||||
}
|
||||
|
||||
// Build the aspect details
|
||||
Map<String, Serializable> paramValues = ruleAction.getParameterValues();
|
||||
removeActionContextParameter(paramValues);
|
||||
for (Map.Entry<String, Serializable> entry : paramValues.entrySet())
|
||||
{
|
||||
if (entry.getKey().equals(PARAM_ASPECT_NAME) == true)
|
||||
{
|
||||
aspectQName = (QName) entry.getValue();
|
||||
}
|
||||
else
|
||||
{
|
||||
// Must be an adhoc property
|
||||
QName propertyQName = QName.createQName(entry.getKey());
|
||||
Serializable propertyValue = entry.getValue();
|
||||
properties.put(propertyQName, propertyValue);
|
||||
}
|
||||
}
|
||||
|
||||
// Add the aspect
|
||||
nodeService.addAspect(actionedUponNodeRef, aspectQName, properties);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.alfresco.repo.action.ParameterizedItemAbstractBase#addParameterDefinitions(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
protected void addParameterDefinitions(List<ParameterDefinition> paramList)
|
||||
{
|
||||
paramList.add(new ParameterDefinitionImpl(PARAM_ASPECT_NAME, DataTypeDefinition.QNAME, true, getParamDisplayLabel(PARAM_ASPECT_NAME), false, "ac-aspects"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove actionContext from the parameter values to declassify as an adhoc property
|
||||
*/
|
||||
private void removeActionContextParameter(Map<String, Serializable> paramValues)
|
||||
{
|
||||
paramValues.remove(ActionAccessRestriction.ACTION_CONTEXT_PARAM_NAME);
|
||||
}
|
||||
}
|
||||
|
@@ -58,6 +58,7 @@ import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.model.FileExistsException;
|
||||
import org.alfresco.service.cmr.model.FileFolderService;
|
||||
import org.alfresco.service.cmr.model.FileInfo;
|
||||
import org.alfresco.service.cmr.model.FolderExistsException;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.cmr.repository.ContentService;
|
||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||
@@ -73,6 +74,7 @@ import org.alfresco.util.TempFileProvider;
|
||||
*
|
||||
* @author gavinc
|
||||
*/
|
||||
@SuppressWarnings("PMD.PreserveStackTrace")
|
||||
public class ImporterActionExecuter extends ActionExecuterAbstractBase
|
||||
{
|
||||
public static final String NAME = "import";
|
||||
@@ -366,7 +368,11 @@ public class ImporterActionExecuter extends ActionExecuterAbstractBase
|
||||
catch (FileExistsException e)
|
||||
{
|
||||
// TODO: add failed file info to status message?
|
||||
throw new AlfrescoRuntimeException("Failed to process ZIP file.", e);
|
||||
if (e.getType().equalsIgnoreCase("folder"))
|
||||
{
|
||||
throw new FolderExistsException(root, file.getName());
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -542,10 +542,7 @@ public class EventGenerator extends AbstractLifecycleBean implements Initializin
|
||||
@Override
|
||||
protected void onShutdown(ApplicationEvent applicationEvent)
|
||||
{
|
||||
if (eventSender != null)
|
||||
{
|
||||
eventSender.destroy();
|
||||
}
|
||||
// NOOP
|
||||
}
|
||||
|
||||
protected class EventTransactionListener extends TransactionListenerAdapter
|
||||
|
@@ -52,7 +52,7 @@ public interface EventSender
|
||||
}
|
||||
|
||||
/**
|
||||
* It's called when the application context is closing, allowing {@link org.alfresco.repo.event2.EventGenerator} to perform cleanup operations.
|
||||
* It's called when the bean instance is destroyed, allowing to perform cleanup operations.
|
||||
*/
|
||||
default void destroy()
|
||||
{
|
||||
|
@@ -25,15 +25,16 @@
|
||||
*/
|
||||
package org.alfresco.repo.event2;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.Executor;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.alfresco.util.PropertyCheck;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.core.env.PropertyResolver;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.Executor;
|
||||
import org.alfresco.util.PropertyCheck;
|
||||
|
||||
public class EventSenderFactoryBean extends AbstractFactoryBean<EventSender>
|
||||
{
|
||||
@@ -51,7 +52,7 @@ public class EventSenderFactoryBean extends AbstractFactoryBean<EventSender>
|
||||
private boolean legacySkipQueueConfig;
|
||||
|
||||
public EventSenderFactoryBean(@Autowired PropertyResolver propertyResolver, Event2MessageProducer event2MessageProducer,
|
||||
Executor enqueueThreadPoolExecutor, Executor dequeueThreadPoolExecutor)
|
||||
Executor enqueueThreadPoolExecutor, Executor dequeueThreadPoolExecutor)
|
||||
{
|
||||
super();
|
||||
PropertyCheck.mandatory(this, "propertyResolver", propertyResolver);
|
||||
@@ -155,4 +156,13 @@ public class EventSenderFactoryBean extends AbstractFactoryBean<EventSender>
|
||||
{
|
||||
return event2MessageProducer;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void destroyInstance(EventSender eventSender)
|
||||
{
|
||||
if (eventSender != null)
|
||||
{
|
||||
eventSender.destroy();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -125,6 +125,9 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
|
||||
/** Number of (bytecode) instructions that will trigger the observer */
|
||||
private int observerInstructionCount = 100;
|
||||
|
||||
/** Flag to enable or disable scope cleaning at the end of each script execution */
|
||||
private boolean cleanScope = true;
|
||||
|
||||
/** Custom context factory */
|
||||
public static AlfrescoContextFactory contextFactory;
|
||||
|
||||
@@ -209,6 +212,15 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
|
||||
}
|
||||
|
||||
/**
|
||||
* @param cleanScope
|
||||
* true to enable scope cleaning at the end of each script execution - set to false to disable this feature.
|
||||
*/
|
||||
public void setCleanScope(boolean cleanScope)
|
||||
{
|
||||
this.cleanScope = cleanScope;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.alfresco.service.cmr.repository.ScriptProcessor#reset()
|
||||
*/
|
||||
public void reset()
|
||||
@@ -614,7 +626,7 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (!secure)
|
||||
if (!secure && cleanScope)
|
||||
{
|
||||
unsetScope(model, scope);
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -37,6 +37,7 @@ import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.transform.config.CoreFunction;
|
||||
import org.alfresco.util.PropertyCheck;
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
@@ -46,6 +47,7 @@ import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ThreadFactory;
|
||||
|
||||
import static org.alfresco.model.ContentModel.PROP_CONTENT;
|
||||
import static org.alfresco.transform.common.RequestParamMap.DIRECT_ACCESS_URL;
|
||||
@@ -68,6 +70,7 @@ public class LocalTransformClient implements TransformClient, InitializingBean
|
||||
private ContentService contentService;
|
||||
private RenditionService2Impl renditionService2;
|
||||
private boolean directAccessUrlEnabled;
|
||||
private int threadPoolSize;
|
||||
|
||||
private ExecutorService executorService;
|
||||
private ThreadLocal<LocalTransform> transform = new ThreadLocal<>();
|
||||
@@ -97,6 +100,11 @@ public class LocalTransformClient implements TransformClient, InitializingBean
|
||||
this.directAccessUrlEnabled = directAccessUrlEnabled;
|
||||
}
|
||||
|
||||
public void setThreadPoolSize(int threadPoolSize)
|
||||
{
|
||||
this.threadPoolSize = threadPoolSize;
|
||||
}
|
||||
|
||||
public void setExecutorService(ExecutorService executorService)
|
||||
{
|
||||
this.executorService = executorService;
|
||||
@@ -110,9 +118,11 @@ public class LocalTransformClient implements TransformClient, InitializingBean
|
||||
PropertyCheck.mandatory(this, "contentService", contentService);
|
||||
PropertyCheck.mandatory(this, "renditionService2", renditionService2);
|
||||
PropertyCheck.mandatory(this, "directAccessUrlEnabled", directAccessUrlEnabled);
|
||||
PropertyCheck.mandatory(this, "threadPoolSize", threadPoolSize);
|
||||
if (executorService == null)
|
||||
{
|
||||
executorService = Executors.newCachedThreadPool();
|
||||
ThreadFactory threadFactory = new ThreadFactoryBuilder().setNameFormat("local-transform-%d").build();
|
||||
executorService = Executors.newFixedThreadPool(threadPoolSize, threadFactory);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2022 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -25,35 +25,27 @@
|
||||
*/
|
||||
package org.alfresco.repo.rendition2;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.service.NotAuditable;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* The Async Rendition service. Replaces the original rendition services which included synchronous renditions and
|
||||
* asynchronous methods with Java call backs.<p/>
|
||||
* The Async Rendition service. Replaces the original rendition services which included synchronous renditions and asynchronous methods with Java call backs.
|
||||
* <p/>
|
||||
*
|
||||
* Renditions are defined as {@link RenditionDefinition2}s and may be registered and looked by the associated
|
||||
* {@link RenditionDefinitionRegistry2}.<p/>
|
||||
* Renditions are defined as {@link RenditionDefinition2}s and may be registered and looked by the associated {@link RenditionDefinitionRegistry2}.
|
||||
* <p/>
|
||||
*
|
||||
* Unlike the original RenditionService this service, it:
|
||||
* <ul>
|
||||
* <li>Performs async renditions without a Java callback, as another node in the cluster may complete the rendition.
|
||||
* The current node requests a transform, but another node might take the resulting transform and turn it into a
|
||||
* rendition if the external Transform Service is used.</li>
|
||||
* <li>Reduces the configurable options to do with with the associations of rendition nodes, their type. They
|
||||
* are identical to 'hidden' (not normally seen as nodes in their own right in a
|
||||
* UI) renditions produced by the original service. So, they are always directly under the source node connected by a
|
||||
* {@code}rn:rendition{@code} association with the name of the rendition.</li>
|
||||
* <li>The rendition nodes additionally have a {@code}rn:rendition2{@code} aspect and a {@code}contentUrlHashCode{@code}
|
||||
* property. This property contains a value that allows the service to work out if it holds a rendition of the
|
||||
* source node's current content.</li>
|
||||
* <li>Failures are handled by setting the rendition node's content to null.</li>
|
||||
* <li>When a rendition is requested via the REST API, only the newer service is used.</li>
|
||||
* <li>Where possible old service renditions migrate automatically over to the new service when content on a
|
||||
* source node is updated.</li>
|
||||
* <li>Performs async renditions without a Java callback, as another node in the cluster may complete the rendition. The current node requests a transform, but another node might take the resulting transform and turn it into a rendition if the external Transform Service is used.</li>
|
||||
* <li>Reduces the configurable options to do with with the associations of rendition nodes, their type. They are identical to 'hidden' (not normally seen as nodes in their own right in a UI) renditions produced by the original service. So, they are always directly under the source node connected by a {@code}rn:rendition{@code} association with the name of the rendition.</li>
|
||||
* <li>The rendition nodes additionally have a {@code}rn:rendition2{@code} aspect and a {@code}contentUrlHashCode{@code} property. This property contains a value that allows the service to work out if it holds a rendition of the source node's current content.</li>
|
||||
* <li>Failures are handled by setting the rendition node's content to null.</li>
|
||||
* <li>When a rendition is requested via the REST API, only the newer service is used.</li>
|
||||
* <li>Where possible old service renditions migrate automatically over to the new service when content on a source node is updated.</li>
|
||||
* </ul>
|
||||
*
|
||||
* @author adavis
|
||||
@@ -66,29 +58,30 @@ public interface RenditionService2
|
||||
RenditionDefinitionRegistry2 getRenditionDefinitionRegistry2();
|
||||
|
||||
/**
|
||||
* This method asynchronously transforms content to a target mimetype with transform options supplied in the
|
||||
* {@code transformDefinition}. A response is set on a message queue once the transform is complete or fails,
|
||||
* together with some client supplied data. The response queue and client data are also included in the
|
||||
* transformDefinition.<p>
|
||||
* This method asynchronously transforms content to a target mimetype with transform options supplied in the {@code transformDefinition}. A response is set on a message queue once the transform is complete or fails, together with some client supplied data. The response queue and client data are also included in the transformDefinition.
|
||||
* <p>
|
||||
*
|
||||
* This method does not create a rendition node, but uses the same code as renditions to perform the transform. The
|
||||
* {@code transformDefinition} extends {@link RenditionDefinition2}, but is not stored in a
|
||||
* {@link RenditionDefinitionRegistry2}, as it is transient in nature.
|
||||
* This method does not create a rendition node, but uses the same code as renditions to perform the transform. The {@code transformDefinition} extends {@link RenditionDefinition2}, but is not stored in a {@link RenditionDefinitionRegistry2}, as it is transient in nature.
|
||||
*
|
||||
* @param sourceNodeRef the node from which the content is retrieved.
|
||||
* @param transformDefinition which defines the transform, where to sent the response and some client specified data.
|
||||
* @throws UnsupportedOperationException if the transform is not supported.
|
||||
* @param sourceNodeRef
|
||||
* the node from which the content is retrieved.
|
||||
* @param transformDefinition
|
||||
* which defines the transform, where to sent the response and some client specified data.
|
||||
* @throws UnsupportedOperationException
|
||||
* if the transform is not supported.
|
||||
*/
|
||||
@NotAuditable
|
||||
public void transform(NodeRef sourceNodeRef, TransformDefinition transformDefinition);
|
||||
|
||||
/**
|
||||
* This method asynchronously renders content as specified by the {@code renditionName}. The content to be
|
||||
* rendered is provided by {@code sourceNodeRef}.
|
||||
* This method asynchronously renders content as specified by the {@code renditionName}. The content to be rendered is provided by {@code sourceNodeRef}.
|
||||
*
|
||||
* @param sourceNodeRef the node from which the content is retrieved.
|
||||
* @param renditionName the rendition to be performed.
|
||||
* @throws UnsupportedOperationException if the transform is not supported AND the rendition has not been created before.
|
||||
* @param sourceNodeRef
|
||||
* the node from which the content is retrieved.
|
||||
* @param renditionName
|
||||
* the rendition to be performed.
|
||||
* @throws UnsupportedOperationException
|
||||
* if the transform is not supported AND the rendition has not been created before.
|
||||
*/
|
||||
@NotAuditable
|
||||
public void render(NodeRef sourceNodeRef, String renditionName);
|
||||
@@ -104,10 +97,11 @@ public interface RenditionService2
|
||||
/**
|
||||
* This method gets the rendition of the {@code sourceNodeRef} identified by its name.
|
||||
*
|
||||
* @param sourceNodeRef the source node for the renditions
|
||||
* @param renditionName the renditionName used to identify a rendition.
|
||||
* @return the {@link ChildAssociationRef} which links the source node to the
|
||||
* rendition or <code>null</code> if there is no rendition or it is not up to date.
|
||||
* @param sourceNodeRef
|
||||
* the source node for the renditions
|
||||
* @param renditionName
|
||||
* the renditionName used to identify a rendition.
|
||||
* @return the {@link ChildAssociationRef} which links the source node to the rendition or <code>null</code> if there is no rendition or it is not up to date.
|
||||
*/
|
||||
@NotAuditable
|
||||
ChildAssociationRef getRenditionByName(NodeRef sourceNodeRef, String renditionName);
|
||||
@@ -115,7 +109,8 @@ public interface RenditionService2
|
||||
/**
|
||||
* This method clears source nodeRef rendition content and content hash code using supplied rendition name.
|
||||
*
|
||||
* @param renditionNode the rendition node
|
||||
* @param renditionNode
|
||||
* the rendition node
|
||||
*/
|
||||
@NotAuditable
|
||||
void clearRenditionContentDataInTransaction(NodeRef renditionNode);
|
||||
@@ -124,4 +119,13 @@ public interface RenditionService2
|
||||
* Indicates if renditions are enabled. Set using the {@code system.thumbnail.generate} value.
|
||||
*/
|
||||
boolean isEnabled();
|
||||
}
|
||||
|
||||
/**
|
||||
* This method forces the content hash code for every {@code sourceNodeRef} renditions.
|
||||
*
|
||||
* @param sourceNodeRef
|
||||
* the source node to update renditions hash code
|
||||
*/
|
||||
@NotAuditable
|
||||
void forceRenditionsContentHashCode(NodeRef sourceNodeRef);
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2022 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -25,6 +25,24 @@
|
||||
*/
|
||||
package org.alfresco.repo.rendition2;
|
||||
|
||||
import static org.alfresco.model.ContentModel.PROP_CONTENT;
|
||||
import static org.alfresco.model.RenditionModel.PROP_RENDITION_CONTENT_HASH_CODE;
|
||||
import static org.alfresco.service.namespace.QName.createQName;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.model.RenditionModel;
|
||||
import org.alfresco.repo.content.ContentServicePolicies;
|
||||
@@ -51,23 +69,6 @@ import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.namespace.RegexQNamePattern;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.util.PropertyCheck;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import static org.alfresco.model.ContentModel.PROP_CONTENT;
|
||||
import static org.alfresco.model.RenditionModel.PROP_RENDITION_CONTENT_HASH_CODE;
|
||||
import static org.alfresco.service.namespace.QName.createQName;
|
||||
|
||||
/**
|
||||
* The Async Rendition service. Replaces the original deprecated RenditionService.
|
||||
@@ -80,11 +81,19 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
|
||||
public static final QName DEFAULT_RENDITION_CONTENT_PROP = ContentModel.PROP_CONTENT;
|
||||
public static final String DEFAULT_MIMETYPE = MimetypeMap.MIMETYPE_TEXT_PLAIN;
|
||||
public static final String MIMETYPE_METADATA_EXTRACT = "alfresco-metadata-extract";
|
||||
public static final String MIMETYPE_METADATA_EMBED = "alfresco-metadata-embed";
|
||||
public static final String DEFAULT_ENCODING = "UTF-8";
|
||||
|
||||
public static final int SOURCE_HAS_NO_CONTENT = -1;
|
||||
public static final int RENDITION2_DOES_NOT_EXIST = -2;
|
||||
|
||||
// Allowed mimetypes to support text or metadata extract transforms when thumbnails are disabled.
|
||||
private static final Set<String> ALLOWED_MIMETYPES = Set.of(
|
||||
MimetypeMap.MIMETYPE_TEXT_PLAIN,
|
||||
MIMETYPE_METADATA_EXTRACT,
|
||||
MIMETYPE_METADATA_EMBED);
|
||||
|
||||
private static Log logger = LogFactory.getLog(RenditionService2Impl.class);
|
||||
|
||||
// As Async transforms and renditions are so similar, this class provides a way to provide the code that is different.
|
||||
@@ -95,12 +104,10 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
abstract RenditionDefinition2 getRenditionDefinition();
|
||||
|
||||
void handleUnsupported(UnsupportedOperationException e)
|
||||
{
|
||||
}
|
||||
{}
|
||||
|
||||
void throwIllegalStateExceptionIfAlreadyDone(int sourceContentHashCode)
|
||||
{
|
||||
}
|
||||
{}
|
||||
}
|
||||
|
||||
private TransactionService transactionService;
|
||||
@@ -217,8 +224,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
@Override
|
||||
public void transform(NodeRef sourceNodeRef, TransformDefinition transformDefinition)
|
||||
{
|
||||
requestAsyncTransformOrRendition(sourceNodeRef, new RenderOrTransformCallBack()
|
||||
{
|
||||
requestAsyncTransformOrRendition(sourceNodeRef, new RenderOrTransformCallBack() {
|
||||
@Override
|
||||
public String getName()
|
||||
{
|
||||
@@ -237,8 +243,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
@Override
|
||||
public void render(NodeRef sourceNodeRef, String renditionName)
|
||||
{
|
||||
requestAsyncTransformOrRendition(sourceNodeRef, new RenderOrTransformCallBack()
|
||||
{
|
||||
requestAsyncTransformOrRendition(sourceNodeRef, new RenderOrTransformCallBack() {
|
||||
@Override
|
||||
public String getName()
|
||||
{
|
||||
@@ -261,7 +266,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
@Override
|
||||
public void handleUnsupported(UnsupportedOperationException e)
|
||||
{
|
||||
// On the initial request for a rendition throw the exception.
|
||||
// On the initial request for a rendition throw the exception.
|
||||
NodeRef renditionNode = getRenditionNode(sourceNodeRef, renditionName);
|
||||
if (renditionNode == null)
|
||||
{
|
||||
@@ -277,7 +282,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
int renditionContentHashCode = getRenditionContentHashCode(renditionNode);
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug(getName() + ": Source " + sourceContentHashCode + " rendition " + renditionContentHashCode+ " hashCodes");
|
||||
logger.debug(getName() + ": Source " + sourceContentHashCode + " rendition " + renditionContentHashCode + " hashCodes");
|
||||
}
|
||||
if (renditionContentHashCode == sourceContentHashCode)
|
||||
{
|
||||
@@ -291,7 +296,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!isEnabled())
|
||||
if (!isAsyncAllowed(renderOrTransform))
|
||||
{
|
||||
throw new RenditionService2Exception("Async transforms and renditions are disabled " +
|
||||
"(system.thumbnail.generate=false or renditionService2.enabled=false).");
|
||||
@@ -299,14 +304,14 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
|
||||
if (!nodeService.exists(sourceNodeRef))
|
||||
{
|
||||
throw new IllegalArgumentException(renderOrTransform.getName()+ ": The supplied sourceNodeRef "+sourceNodeRef+" does not exist.");
|
||||
throw new IllegalArgumentException(renderOrTransform.getName() + ": The supplied sourceNodeRef " + sourceNodeRef + " does not exist.");
|
||||
}
|
||||
|
||||
RenditionDefinition2 renditionDefinition = renderOrTransform.getRenditionDefinition();
|
||||
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug(renderOrTransform.getName()+ ": transform " +sourceNodeRef);
|
||||
logger.debug(renderOrTransform.getName() + ": transform " + sourceNodeRef);
|
||||
}
|
||||
|
||||
AtomicBoolean supported = new AtomicBoolean(true);
|
||||
@@ -328,14 +333,13 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
}
|
||||
|
||||
String user = AuthenticationUtil.getRunAsUser();
|
||||
RetryingTransactionHelper.RetryingTransactionCallback callback = () ->
|
||||
{
|
||||
RetryingTransactionHelper.RetryingTransactionCallback callback = () -> {
|
||||
int sourceContentHashCode = getSourceContentHashCode(sourceNodeRef);
|
||||
if (!supported.get())
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug(renderOrTransform.getName() +" is not supported. " +
|
||||
logger.debug(renderOrTransform.getName() + " is not supported. " +
|
||||
"The content might be too big or the source mimetype cannot be converted.");
|
||||
}
|
||||
failure(sourceNodeRef, renditionDefinition, sourceContentHashCode);
|
||||
@@ -372,26 +376,24 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
public void failure(NodeRef sourceNodeRef, RenditionDefinition2 renditionDefinition, int transformContentHashCode)
|
||||
{
|
||||
// The original transaction may have already have failed
|
||||
AuthenticationUtil.runAsSystem((AuthenticationUtil.RunAsWork<Void>) () ->
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(() ->
|
||||
{
|
||||
consume(sourceNodeRef, null, renditionDefinition, transformContentHashCode);
|
||||
return null;
|
||||
}, false, true));
|
||||
AuthenticationUtil.runAsSystem((AuthenticationUtil.RunAsWork<Void>) () -> transactionService.getRetryingTransactionHelper().doInTransaction(() -> {
|
||||
consume(sourceNodeRef, null, renditionDefinition, transformContentHashCode);
|
||||
return null;
|
||||
}, false, true));
|
||||
}
|
||||
|
||||
public void consume(NodeRef sourceNodeRef, InputStream transformInputStream, RenditionDefinition2 renditionDefinition,
|
||||
int transformContentHashCode)
|
||||
int transformContentHashCode)
|
||||
{
|
||||
int sourceContentHashCode = getSourceContentHashCode(sourceNodeRef);
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Consume: Source " + sourceContentHashCode + " and transform's source " + transformContentHashCode+" hashcodes");
|
||||
logger.debug("Consume: Source " + sourceContentHashCode + " and transform's source " + transformContentHashCode + " hashcodes");
|
||||
}
|
||||
|
||||
if (renditionDefinition instanceof TransformDefinition)
|
||||
{
|
||||
TransformDefinition transformDefinition = (TransformDefinition)renditionDefinition;
|
||||
TransformDefinition transformDefinition = (TransformDefinition) renditionDefinition;
|
||||
String targetMimetype = transformDefinition.getTargetMimetype();
|
||||
if (AsynchronousExtractor.isMetadataExtractMimetype(targetMimetype))
|
||||
{
|
||||
@@ -413,7 +415,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
}
|
||||
|
||||
private void consumeExtractedMetadata(NodeRef nodeRef, int sourceContentHashCode, InputStream transformInputStream,
|
||||
TransformDefinition transformDefinition, int transformContentHashCode)
|
||||
TransformDefinition transformDefinition, int transformContentHashCode)
|
||||
{
|
||||
if (transformInputStream == null)
|
||||
{
|
||||
@@ -440,7 +442,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
}
|
||||
|
||||
private void consumeEmbeddedMetadata(NodeRef nodeRef, int sourceContentHashCode, InputStream transformInputStream,
|
||||
TransformDefinition transformDefinition, int transformContentHashCode)
|
||||
TransformDefinition transformDefinition, int transformContentHashCode)
|
||||
{
|
||||
if (transformInputStream == null)
|
||||
{
|
||||
@@ -468,7 +470,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
}
|
||||
|
||||
private void consumeTransformReply(NodeRef sourceNodeRef, InputStream transformInputStream,
|
||||
TransformDefinition transformDefinition, int transformContentHashCode)
|
||||
TransformDefinition transformDefinition, int transformContentHashCode)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
@@ -484,12 +486,10 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a transformation (InputStream) and attaches it as a rendition to the source node.
|
||||
* Does nothing if there is already a newer rendition.
|
||||
* If the transformInputStream is null, this is taken to be a transform failure.
|
||||
* Takes a transformation (InputStream) and attaches it as a rendition to the source node. Does nothing if there is already a newer rendition. If the transformInputStream is null, this is taken to be a transform failure.
|
||||
*/
|
||||
private void consumeRendition(NodeRef sourceNodeRef, int sourceContentHashCode, InputStream transformInputStream,
|
||||
RenditionDefinition2 renditionDefinition, int transformContentHashCode)
|
||||
RenditionDefinition2 renditionDefinition, int transformContentHashCode)
|
||||
{
|
||||
String renditionName = renditionDefinition.getRenditionName();
|
||||
if (transformContentHashCode != sourceContentHashCode)
|
||||
@@ -507,93 +507,92 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
(transformInputStream == null ? " to null as the transform failed" : " to the transform result"));
|
||||
}
|
||||
|
||||
AuthenticationUtil.runAsSystem((AuthenticationUtil.RunAsWork<Void>) () ->
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(() ->
|
||||
AuthenticationUtil.runAsSystem((AuthenticationUtil.RunAsWork<Void>) () -> transactionService.getRetryingTransactionHelper().doInTransaction(() -> {
|
||||
// Ensure that the creation of a rendition does not cause updates to the modified, modifier properties on the source node
|
||||
NodeRef renditionNode = getRenditionNode(sourceNodeRef, renditionName);
|
||||
boolean createRenditionNode = renditionNode == null;
|
||||
boolean sourceHasAspectRenditioned = nodeService.hasAspect(sourceNodeRef, RenditionModel.ASPECT_RENDITIONED);
|
||||
try
|
||||
{
|
||||
ruleService.disableRuleType(RuleType.UPDATE);
|
||||
behaviourFilter.disableBehaviour(sourceNodeRef, ContentModel.ASPECT_AUDITABLE);
|
||||
behaviourFilter.disableBehaviour(sourceNodeRef, ContentModel.ASPECT_VERSIONABLE);
|
||||
|
||||
// If they do not exist create the rendition association and the rendition node.
|
||||
if (createRenditionNode)
|
||||
{
|
||||
renditionNode = createRenditionNode(sourceNodeRef, renditionDefinition);
|
||||
}
|
||||
else if (!nodeService.hasAspect(renditionNode, RenditionModel.ASPECT_RENDITION2))
|
||||
{
|
||||
nodeService.addAspect(renditionNode, RenditionModel.ASPECT_RENDITION2, null);
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Added rendition2 aspect to rendition " + renditionName + " on " + sourceNodeRef);
|
||||
}
|
||||
}
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Set ThumbnailLastModified for " + renditionName);
|
||||
}
|
||||
setThumbnailLastModified(sourceNodeRef, renditionName);
|
||||
|
||||
if (transformInputStream != null)
|
||||
{
|
||||
// Ensure that the creation of a rendition does not cause updates to the modified, modifier properties on the source node
|
||||
NodeRef renditionNode = getRenditionNode(sourceNodeRef, renditionName);
|
||||
boolean createRenditionNode = renditionNode == null;
|
||||
boolean sourceHasAspectRenditioned = nodeService.hasAspect(sourceNodeRef, RenditionModel.ASPECT_RENDITIONED);
|
||||
try
|
||||
{
|
||||
ruleService.disableRuleType(RuleType.UPDATE);
|
||||
behaviourFilter.disableBehaviour(sourceNodeRef, ContentModel.ASPECT_AUDITABLE);
|
||||
behaviourFilter.disableBehaviour(sourceNodeRef, ContentModel.ASPECT_VERSIONABLE);
|
||||
// Set or replace rendition content
|
||||
ContentWriter contentWriter = contentService.getWriter(renditionNode, DEFAULT_RENDITION_CONTENT_PROP, true);
|
||||
String targetMimetype = renditionDefinition.getTargetMimetype();
|
||||
contentWriter.setMimetype(targetMimetype);
|
||||
contentWriter.setEncoding(DEFAULT_ENCODING);
|
||||
ContentWriter renditionWriter = contentWriter;
|
||||
renditionWriter.putContent(transformInputStream);
|
||||
|
||||
// If they do not exist create the rendition association and the rendition node.
|
||||
if (createRenditionNode)
|
||||
ContentReader contentReader = renditionWriter.getReader();
|
||||
long sizeOfRendition = contentReader.getSize();
|
||||
if (sizeOfRendition > 0L)
|
||||
{
|
||||
renditionNode = createRenditionNode(sourceNodeRef, renditionDefinition);
|
||||
}
|
||||
else if (!nodeService.hasAspect(renditionNode, RenditionModel.ASPECT_RENDITION2))
|
||||
{
|
||||
nodeService.addAspect(renditionNode, RenditionModel.ASPECT_RENDITION2, null);
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Added rendition2 aspect to rendition " + renditionName + " on " + sourceNodeRef);
|
||||
}
|
||||
}
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Set ThumbnailLastModified for " + renditionName);
|
||||
}
|
||||
setThumbnailLastModified(sourceNodeRef, renditionName);
|
||||
|
||||
if (transformInputStream != null)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Set or replace rendition content
|
||||
ContentWriter contentWriter = contentService.getWriter(renditionNode, DEFAULT_RENDITION_CONTENT_PROP, true);
|
||||
String targetMimetype = renditionDefinition.getTargetMimetype();
|
||||
contentWriter.setMimetype(targetMimetype);
|
||||
contentWriter.setEncoding(DEFAULT_ENCODING);
|
||||
ContentWriter renditionWriter = contentWriter;
|
||||
renditionWriter.putContent(transformInputStream);
|
||||
|
||||
ContentReader contentReader = renditionWriter.getReader();
|
||||
long sizeOfRendition = contentReader.getSize();
|
||||
if (sizeOfRendition > 0L)
|
||||
{
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Set rendition hashcode for " + renditionName);
|
||||
}
|
||||
nodeService.setProperty(renditionNode, RenditionModel.PROP_RENDITION_CONTENT_HASH_CODE, transformContentHashCode);
|
||||
}
|
||||
else
|
||||
{
|
||||
logger.error("Transform was zero bytes for " + renditionName + " on " + sourceNodeRef);
|
||||
clearRenditionContentData(renditionNode);
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
logger.error("Failed to copy transform InputStream into rendition " + renditionName + " on " + sourceNodeRef);
|
||||
throw e;
|
||||
logger.debug("Set rendition hashcode for " + renditionName);
|
||||
}
|
||||
nodeService.setProperty(renditionNode, RenditionModel.PROP_RENDITION_CONTENT_HASH_CODE, transformContentHashCode);
|
||||
}
|
||||
else
|
||||
{
|
||||
logger.error("Transform was zero bytes for " + renditionName + " on " + sourceNodeRef);
|
||||
clearRenditionContentData(renditionNode);
|
||||
}
|
||||
|
||||
if (!sourceHasAspectRenditioned)
|
||||
{
|
||||
nodeService.addAspect(sourceNodeRef, RenditionModel.ASPECT_RENDITIONED, null);
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
throw new RenditionService2Exception(TRANSFORMING_ERROR_MESSAGE + e.getMessage(), e);
|
||||
logger.error("Failed to copy transform InputStream into rendition " + renditionName + " on " + sourceNodeRef);
|
||||
throw e;
|
||||
}
|
||||
finally
|
||||
{
|
||||
behaviourFilter.enableBehaviour(sourceNodeRef, ContentModel.ASPECT_AUDITABLE);
|
||||
behaviourFilter.enableBehaviour(sourceNodeRef, ContentModel.ASPECT_VERSIONABLE);
|
||||
ruleService.enableRuleType(RuleType.UPDATE);
|
||||
}
|
||||
return null;
|
||||
}, false, true));
|
||||
}
|
||||
else
|
||||
{
|
||||
clearRenditionContentData(renditionNode);
|
||||
}
|
||||
|
||||
if (!sourceHasAspectRenditioned)
|
||||
{
|
||||
nodeService.addAspect(sourceNodeRef, RenditionModel.ASPECT_RENDITIONED, null);
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
throw new RenditionService2Exception(TRANSFORMING_ERROR_MESSAGE + e.getMessage(), e);
|
||||
}
|
||||
finally
|
||||
{
|
||||
behaviourFilter.enableBehaviour(sourceNodeRef, ContentModel.ASPECT_AUDITABLE);
|
||||
behaviourFilter.enableBehaviour(sourceNodeRef, ContentModel.ASPECT_VERSIONABLE);
|
||||
ruleService.enableRuleType(RuleType.UPDATE);
|
||||
}
|
||||
return null;
|
||||
}, false, true));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -634,14 +633,14 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
|
||||
if (logger.isTraceEnabled())
|
||||
{
|
||||
logger.trace("Setting thumbnail last modified date to " + lastModifiedValue +" on source node: " + sourceNodeRef);
|
||||
logger.trace("Setting thumbnail last modified date to " + lastModifiedValue + " on source node: " + sourceNodeRef);
|
||||
}
|
||||
|
||||
if (nodeService.hasAspect(sourceNodeRef, ContentModel.ASPECT_THUMBNAIL_MODIFICATION))
|
||||
{
|
||||
List<String> thumbnailMods = (List<String>) nodeService.getProperty(sourceNodeRef, ContentModel.PROP_LAST_THUMBNAIL_MODIFICATION_DATA);
|
||||
String target = null;
|
||||
for (String currThumbnailMod: thumbnailMods)
|
||||
for (String currThumbnailMod : thumbnailMods)
|
||||
{
|
||||
if (currThumbnailMod.startsWith(prefix))
|
||||
{
|
||||
@@ -665,8 +664,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the hash code of the source node's content url. As transformations may be returned in a different
|
||||
* sequences to which they were requested, this is used work out if a rendition should be replaced.
|
||||
* Returns the hash code of the source node's content url. As transformations may be returned in a different sequences to which they were requested, this is used work out if a rendition should be replaced.
|
||||
*/
|
||||
private int getSourceContentHashCode(NodeRef sourceNodeRef)
|
||||
{
|
||||
@@ -675,7 +673,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
if (contentData != null)
|
||||
{
|
||||
// Originally we used the contentData URL, but that is not enough if the mimetype changes.
|
||||
String contentString = contentData.getContentUrl()+contentData.getMimetype();
|
||||
String contentString = contentData.getContentUrl() + contentData.getMimetype();
|
||||
if (contentString != null)
|
||||
{
|
||||
hashCode = contentString.hashCode();
|
||||
@@ -685,13 +683,11 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the hash code of source node's content url on the rendition node (node may be null) if it does not exist.
|
||||
* Used work out if a rendition should be replaced. {@code -2} is returned if the rendition does not exist or was
|
||||
* not created by RenditionService2. {@code -1} is returned if there was no source content or the rendition failed.
|
||||
* Returns the hash code of source node's content url on the rendition node (node may be null) if it does not exist. Used work out if a rendition should be replaced. {@code -2} is returned if the rendition does not exist or was not created by RenditionService2. {@code -1} is returned if there was no source content or the rendition failed.
|
||||
*/
|
||||
private int getRenditionContentHashCode(NodeRef renditionNode)
|
||||
{
|
||||
if ( renditionNode == null || !nodeService.hasAspect(renditionNode, RenditionModel.ASPECT_RENDITION2))
|
||||
if (renditionNode == null || !nodeService.hasAspect(renditionNode, RenditionModel.ASPECT_RENDITION2))
|
||||
{
|
||||
return RENDITION2_DOES_NOT_EXIST;
|
||||
}
|
||||
@@ -699,7 +695,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
Serializable hashCode = nodeService.getProperty(renditionNode, PROP_RENDITION_CONTENT_HASH_CODE);
|
||||
return hashCode == null
|
||||
? SOURCE_HAS_NO_CONTENT
|
||||
: (int)hashCode;
|
||||
: (int) hashCode;
|
||||
}
|
||||
|
||||
private NodeRef getRenditionNode(NodeRef sourceNodeRef, String renditionName)
|
||||
@@ -773,11 +769,12 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
}
|
||||
|
||||
/**
|
||||
* This method checks whether the specified source node is of a content class which has been registered for
|
||||
* rendition prevention.
|
||||
* This method checks whether the specified source node is of a content class which has been registered for rendition prevention.
|
||||
*
|
||||
* @param sourceNode the node to check.
|
||||
* @throws RenditionService2PreventedException if the source node is configured for rendition prevention.
|
||||
* @param sourceNode
|
||||
* the node to check.
|
||||
* @throws RenditionService2PreventedException
|
||||
* if the source node is configured for rendition prevention.
|
||||
*/
|
||||
// This code is based on the old RenditionServiceImpl.checkSourceNodeForPreventionClass(...)
|
||||
private void checkSourceNodeForPreventionClass(NodeRef sourceNode)
|
||||
@@ -823,7 +820,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
|
||||
for (ChildAssociationRef childAssoc : childAsocs)
|
||||
{
|
||||
NodeRef renditionNode = childAssoc.getChildRef();
|
||||
NodeRef renditionNode = childAssoc.getChildRef();
|
||||
if (isRenditionAvailable(sourceNodeRef, renditionNode))
|
||||
{
|
||||
result.add(childAssoc);
|
||||
@@ -833,8 +830,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates if the rendition is available. Failed renditions (there was an error) don't have a contentUrl
|
||||
* and out of date renditions or those still being created don't have a matching contentHashCode.
|
||||
* Indicates if the rendition is available. Failed renditions (there was an error) don't have a contentUrl and out of date renditions or those still being created don't have a matching contentHashCode.
|
||||
*/
|
||||
public boolean isRenditionAvailable(NodeRef sourceNodeRef, NodeRef renditionNode)
|
||||
{
|
||||
@@ -852,7 +848,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
int renditionContentHashCode = getRenditionContentHashCode(renditionNode);
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("isRenditionAvailable source " + sourceContentHashCode + " and rendition " + renditionContentHashCode+" hashcodes");
|
||||
logger.debug("isRenditionAvailable source " + sourceContentHashCode + " and rendition " + renditionContentHashCode + " hashcodes");
|
||||
}
|
||||
if (sourceContentHashCode != renditionContentHashCode)
|
||||
{
|
||||
@@ -892,19 +888,17 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
}
|
||||
ChildAssociationRef childAssoc = renditions.get(0);
|
||||
NodeRef renditionNode = childAssoc.getChildRef();
|
||||
return !isRenditionAvailable(sourceNodeRef, renditionNode) ? null: childAssoc;
|
||||
return !isRenditionAvailable(sourceNodeRef, renditionNode) ? null : childAssoc;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearRenditionContentDataInTransaction(NodeRef renditionNode)
|
||||
{
|
||||
AuthenticationUtil.runAsSystem((AuthenticationUtil.RunAsWork<Void>) () ->
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(() ->
|
||||
{
|
||||
clearRenditionContentData(renditionNode);
|
||||
return null;
|
||||
}, false, true));
|
||||
AuthenticationUtil.runAsSystem((AuthenticationUtil.RunAsWork<Void>) () -> transactionService.getRetryingTransactionHelper().doInTransaction(() -> {
|
||||
clearRenditionContentData(renditionNode);
|
||||
return null;
|
||||
}, false, true));
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -950,4 +944,54 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void forceRenditionsContentHashCode(NodeRef sourceNodeRef)
|
||||
{
|
||||
if (sourceNodeRef != null && nodeService.exists(sourceNodeRef))
|
||||
{
|
||||
List<ChildAssociationRef> renditions = getRenditionChildAssociations(sourceNodeRef);
|
||||
if (renditions != null)
|
||||
{
|
||||
int sourceContentHashCode = getSourceContentHashCode(sourceNodeRef);
|
||||
for (ChildAssociationRef rendition : renditions)
|
||||
{
|
||||
NodeRef renditionNode = rendition.getChildRef();
|
||||
if (nodeService.hasAspect(renditionNode, RenditionModel.ASPECT_RENDITION2))
|
||||
{
|
||||
int renditionContentHashCode = getRenditionContentHashCode(renditionNode);
|
||||
String renditionName = rendition.getQName().getLocalName();
|
||||
if (sourceContentHashCode != renditionContentHashCode)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Update content hash code for rendition " + renditionName + " of node "
|
||||
+ sourceNodeRef);
|
||||
}
|
||||
nodeService.setProperty(renditionNode, PROP_RENDITION_CONTENT_HASH_CODE,
|
||||
sourceContentHashCode);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Checks if the given transform callback is a text extract transform for content indexing or metadata extract/embed.
|
||||
private boolean isTextOrMetadataExtractTransform(RenderOrTransformCallBack renderOrTransform)
|
||||
{
|
||||
RenditionDefinition2 renditionDefinition = renderOrTransform.getRenditionDefinition();
|
||||
return renditionDefinition != null && ALLOWED_MIMETYPES.contains(renditionDefinition.getTargetMimetype());
|
||||
}
|
||||
|
||||
private boolean isAsyncAllowed(RenderOrTransformCallBack renderOrTransform)
|
||||
{
|
||||
// If enabled is false, all async transforms/renditions must be blocked
|
||||
if (!enabled)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// If thumbnails are disabled, allow only text extract or metadata extract/embed transforms
|
||||
return thumbnailsEnabled || isTextOrMetadataExtractTransform(renderOrTransform);
|
||||
}
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -30,6 +30,9 @@ import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.cache.lookup.EntityLookupCache;
|
||||
import org.alfresco.repo.domain.node.Node;
|
||||
@@ -41,8 +44,6 @@ import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
|
||||
import org.alfresco.service.cmr.security.PermissionService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.EqualsHelper;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
public class NodePermissionAssessor
|
||||
{
|
||||
@@ -57,7 +58,7 @@ public class NodePermissionAssessor
|
||||
private long startTime;
|
||||
private int maxPermissionChecks;
|
||||
private long maxPermissionCheckTimeMillis;
|
||||
|
||||
|
||||
private EntityLookupCache<Long, Node, NodeRef> nodesCache;
|
||||
private NodeService nodeService;
|
||||
private PermissionService permissionService;
|
||||
@@ -68,7 +69,7 @@ public class NodePermissionAssessor
|
||||
this.permissionService = permissionService;
|
||||
this.nodesCache = nodeCache;
|
||||
this.nodeService = nodeService;
|
||||
|
||||
|
||||
this.checksPerformed = 0;
|
||||
this.maxPermissionChecks = Integer.MAX_VALUE;
|
||||
this.maxPermissionCheckTimeMillis = Long.MAX_VALUE;
|
||||
@@ -82,12 +83,12 @@ public class NodePermissionAssessor
|
||||
}
|
||||
|
||||
public boolean isIncluded(Node node)
|
||||
{
|
||||
{
|
||||
if (isFirstRecord())
|
||||
{
|
||||
this.startTime = System.currentTimeMillis();
|
||||
}
|
||||
|
||||
|
||||
checksPerformed++;
|
||||
return isReallyIncluded(node);
|
||||
}
|
||||
@@ -107,34 +108,34 @@ public class NodePermissionAssessor
|
||||
String owner = getOwner(node);
|
||||
return EqualsHelper.nullSafeEquals(authority.getAuthority(), owner);
|
||||
}
|
||||
|
||||
|
||||
private String getOwner(Node node)
|
||||
{
|
||||
nodesCache.setValue(node.getId(), node);
|
||||
Set<QName> nodeAspects = nodeService.getAspects(node.getNodeRef());
|
||||
|
||||
|
||||
String userName = null;
|
||||
if (nodeAspects.contains(ContentModel.ASPECT_AUDITABLE))
|
||||
{
|
||||
userName = node.getAuditableProperties().getAuditCreator();
|
||||
}
|
||||
else if (nodeAspects.contains(ContentModel.ASPECT_OWNABLE))
|
||||
if (nodeAspects.contains(ContentModel.ASPECT_OWNABLE))
|
||||
{
|
||||
Serializable owner = nodeService.getProperty(node.getNodeRef(), ContentModel.PROP_OWNER);
|
||||
userName = DefaultTypeConverter.INSTANCE.convert(String.class, owner);
|
||||
}
|
||||
|
||||
else if (nodeAspects.contains(ContentModel.ASPECT_AUDITABLE))
|
||||
{
|
||||
userName = node.getAuditableProperties().getAuditCreator();
|
||||
}
|
||||
|
||||
return userName;
|
||||
}
|
||||
|
||||
|
||||
boolean isReallyIncluded(Node node)
|
||||
{
|
||||
if (isNullReading)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return isSystemReading ||
|
||||
|
||||
return isSystemReading ||
|
||||
isAdminReading ||
|
||||
canRead(node.getAclId()) ||
|
||||
isOwnerReading(node, authority);
|
||||
@@ -151,7 +152,7 @@ public class NodePermissionAssessor
|
||||
this.maxPermissionChecks = maxPermissionChecks + 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public boolean shouldQuitChecks()
|
||||
{
|
||||
if (checksPerformed >= maxPermissionChecks)
|
||||
@@ -173,7 +174,7 @@ public class NodePermissionAssessor
|
||||
{
|
||||
this.maxPermissionCheckTimeMillis = maxPermissionCheckTimeMillis;
|
||||
}
|
||||
|
||||
|
||||
protected boolean canRead(Long aclId)
|
||||
{
|
||||
Boolean res = aclReadCache.get(aclId);
|
||||
@@ -184,7 +185,7 @@ public class NodePermissionAssessor
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
protected boolean canCurrentUserRead(Long aclId)
|
||||
{
|
||||
// cache resolved ACLs
|
||||
@@ -195,7 +196,7 @@ public class NodePermissionAssessor
|
||||
{
|
||||
if (authorities.contains(auth))
|
||||
{
|
||||
return false;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -204,10 +205,10 @@ public class NodePermissionAssessor
|
||||
{
|
||||
if (authorities.contains(auth))
|
||||
{
|
||||
return true;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -30,12 +30,12 @@ import jakarta.servlet.http.HttpServletResponse;
|
||||
import org.alfresco.repo.management.subsystems.ActivateableBean;
|
||||
|
||||
/**
|
||||
* A default {@link AdminConsoleAuthenticator} implementation. Returns null to request a basic auth challenge.
|
||||
* A default {@link ExternalUserAuthenticator} implementation. Returns null to request a basic auth challenge.
|
||||
*/
|
||||
public class DefaultAdminConsoleAuthenticator implements AdminConsoleAuthenticator, ActivateableBean
|
||||
public class DefaultAdminConsoleAuthenticator implements ExternalUserAuthenticator, ActivateableBean
|
||||
{
|
||||
@Override
|
||||
public String getAdminConsoleUser(HttpServletRequest request, HttpServletResponse response)
|
||||
public String getUserId(HttpServletRequest request, HttpServletResponse response)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
@@ -0,0 +1,55 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.security.authentication.external;
|
||||
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.alfresco.repo.management.subsystems.ActivateableBean;
|
||||
|
||||
/**
|
||||
* A default {@link ExternalUserAuthenticator} implementation. Returns null to request a basic auth challenge.
|
||||
*/
|
||||
public class DefaultWebScriptsHomeAuthenticator implements ExternalUserAuthenticator, ActivateableBean
|
||||
{
|
||||
@Override
|
||||
public String getUserId(HttpServletRequest request, HttpServletResponse response)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void requestAuthentication(HttpServletRequest request, HttpServletResponse response)
|
||||
{
|
||||
// No implementation
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isActive()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
@@ -29,28 +29,17 @@ import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
|
||||
/**
|
||||
* An interface for objects capable of extracting an externally authenticated user ID from the HTTP Admin Console webscript request.
|
||||
* An interface for objects capable of extracting an externally authenticated user ID from the HTTP request.
|
||||
*/
|
||||
public interface AdminConsoleAuthenticator
|
||||
public interface ExternalUserAuthenticator
|
||||
{
|
||||
/**
|
||||
* Gets an externally authenticated user ID from the HTTP Admin Console webscript request.
|
||||
* Gets an externally authenticated user ID from the HTTP request.
|
||||
*
|
||||
* @param request
|
||||
* the request
|
||||
* @param response
|
||||
* the response
|
||||
* @return the user ID or <code>null</code> if the user is unauthenticated
|
||||
*/
|
||||
String getAdminConsoleUser(HttpServletRequest request, HttpServletResponse response);
|
||||
String getUserId(HttpServletRequest request, HttpServletResponse response);
|
||||
|
||||
/**
|
||||
* Requests an authentication.
|
||||
*
|
||||
* @param request
|
||||
* the request
|
||||
* @param response
|
||||
* the response
|
||||
*/
|
||||
/* Sends redirect to external site to initiate the OIDC authorization code flow. */
|
||||
void requestAuthentication(HttpServletRequest request, HttpServletResponse response);
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user