mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-10-01 14:41:46 +00:00
Compare commits
51 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
545ba61609 | ||
|
aedd04beb3 | ||
|
c6f271fc49 | ||
|
71b901d16f | ||
|
1819400ac0 | ||
|
22d6f68c48 | ||
|
e18edd2c07 | ||
|
7229105b86 | ||
|
996809e3c2 | ||
|
bc76bedc4d | ||
|
179366c974 | ||
|
981b98ed88 | ||
|
b1469f4410 | ||
|
2e92dab995 | ||
|
ffcd8973e3 | ||
|
b3dcb3ea35 | ||
|
7a49b4c331 | ||
|
dc0b1988ca | ||
|
378abdfe9b | ||
|
5efeeffe3c | ||
|
d15a71fddd | ||
|
0f2d7a857d | ||
|
e30707fd47 | ||
|
373e0a2d35 | ||
|
e5443cf558 | ||
|
8badf8747a | ||
|
f4274f6900 | ||
|
5184a95d5f | ||
|
80c79a45b7 | ||
|
d1a67e2773 | ||
|
881b8f05c9 | ||
|
e74ec9425c | ||
|
f855bda65c | ||
|
27406cda19 | ||
|
e5b3afc560 | ||
|
f13a0480dc | ||
|
68c0aea1f5 | ||
|
9e91d28ade | ||
|
8f1175de64 | ||
|
7d4b74cab7 | ||
|
b51374532e | ||
|
6e5b64be12 | ||
|
67195f6dda | ||
|
d060c548b3 | ||
|
9de39009a7 | ||
|
b920670ebd | ||
|
94fd1f4c98 | ||
|
5a04dabd72 | ||
|
00eef170ef | ||
|
43dd274b6c | ||
|
842687e25d |
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-amps</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-parent</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-automation-community-repo</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<build>
|
||||
|
@@ -134,6 +134,16 @@ public class AddToHoldsBulkV1Tests extends BaseRMRestTest
|
||||
.until(() -> getRestAPIFactory().getSearchAPI(null).search(searchRequest).getPagination()
|
||||
.getTotalItems() == NUMBER_OF_FILES);
|
||||
|
||||
RestRequestQueryModel ancestorReq = getContentFromFolderAndAllSubfoldersQuery(rootFolder.getNodeRefWithoutVersion());
|
||||
SearchRequest ancestorSearchRequest = new SearchRequest();
|
||||
ancestorSearchRequest.setQuery(ancestorReq);
|
||||
|
||||
STEP("Wait until paths are indexed.");
|
||||
// to improve stability on CI - seems that sometimes during big load we need to wait longer for the condition
|
||||
await().atMost(120, TimeUnit.SECONDS)
|
||||
.until(() -> getRestAPIFactory().getSearchAPI(null).search(ancestorSearchRequest).getPagination()
|
||||
.getTotalItems() == NUMBER_OF_FILES);
|
||||
|
||||
holdBulkOperation = HoldBulkOperation.builder()
|
||||
.query(queryReq)
|
||||
.op(HoldBulkOperationType.ADD).build();
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-parent</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -119,6 +119,11 @@ rm.patch.v35.holdNewChildAssocPatch.batchSize=1000
|
||||
rm.haspermissionmap.read=Read
|
||||
rm.haspermissionmap.write=WriteProperties,AddChildren,ReadContent
|
||||
|
||||
# Extended Permissions
|
||||
# Enable matching the given username with the correct casing username when retrieving an IPR group.
|
||||
# Only needs to be used if there are owners that don't have the username in the correct casing.
|
||||
rm.extendedSecurity.enableUsernameNormalization=false
|
||||
|
||||
#
|
||||
# Extended auto-version behaviour. If true and other auto-version properties are satisfied, then
|
||||
# a document will be auto-versioned when its type is changed.
|
||||
|
@@ -611,6 +611,7 @@
|
||||
<property name="authorityService" ref="authorityService"/>
|
||||
<property name="permissionService" ref="permissionService"/>
|
||||
<property name="transactionService" ref="transactionService"/>
|
||||
<property name="enableUsernameNormalization" value="${rm.extendedSecurity.enableUsernameNormalization}" />
|
||||
</bean>
|
||||
|
||||
<bean id="ExtendedSecurityService" class="org.springframework.aop.framework.ProxyFactoryBean">
|
||||
|
@@ -8,7 +8,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -34,6 +34,12 @@ import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.context.event.ContextRefreshedEvent;
|
||||
import org.springframework.dao.ConcurrencyFailureException;
|
||||
import org.springframework.extensions.webscripts.ui.common.StringUtils;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.model.RenditionModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.capability.RMPermissionModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService;
|
||||
@@ -42,7 +48,10 @@ import org.alfresco.module.org_alfresco_module_rm.role.FilePlanRoleService;
|
||||
import org.alfresco.module.org_alfresco_module_rm.util.ServiceBaseImpl;
|
||||
import org.alfresco.query.PagingRequest;
|
||||
import org.alfresco.query.PagingResults;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.security.authority.RMAuthority;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
@@ -54,12 +63,6 @@ import org.alfresco.service.namespace.RegexQNamePattern;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.alfresco.util.ParameterCheck;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.context.event.ContextRefreshedEvent;
|
||||
import org.springframework.extensions.webscripts.ui.common.StringUtils;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
|
||||
/**
|
||||
* Extended security service implementation.
|
||||
@@ -68,9 +71,9 @@ import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransacti
|
||||
* @since 2.1
|
||||
*/
|
||||
public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
implements ExtendedSecurityService,
|
||||
RecordsManagementModel,
|
||||
ApplicationListener<ContextRefreshedEvent>
|
||||
implements ExtendedSecurityService,
|
||||
RecordsManagementModel,
|
||||
ApplicationListener<ContextRefreshedEvent>
|
||||
{
|
||||
/** ipr group names */
|
||||
static final String ROOT_IPR_GROUP = "INPLACE_RECORD_MANAGEMENT";
|
||||
@@ -95,8 +98,11 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/** transaction service */
|
||||
private TransactionService transactionService;
|
||||
|
||||
private boolean enableUsernameNormalization;
|
||||
|
||||
/**
|
||||
* @param filePlanService file plan service
|
||||
* @param filePlanService
|
||||
* file plan service
|
||||
*/
|
||||
public void setFilePlanService(FilePlanService filePlanService)
|
||||
{
|
||||
@@ -104,7 +110,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
}
|
||||
|
||||
/**
|
||||
* @param filePlanRoleService file plan role service
|
||||
* @param filePlanRoleService
|
||||
* file plan role service
|
||||
*/
|
||||
public void setFilePlanRoleService(FilePlanRoleService filePlanRoleService)
|
||||
{
|
||||
@@ -112,7 +119,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
}
|
||||
|
||||
/**
|
||||
* @param authorityService authority service
|
||||
* @param authorityService
|
||||
* authority service
|
||||
*/
|
||||
public void setAuthorityService(AuthorityService authorityService)
|
||||
{
|
||||
@@ -120,7 +128,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
}
|
||||
|
||||
/**
|
||||
* @param permissionService permission service
|
||||
* @param permissionService
|
||||
* permission service
|
||||
*/
|
||||
public void setPermissionService(PermissionService permissionService)
|
||||
{
|
||||
@@ -128,13 +137,23 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
}
|
||||
|
||||
/**
|
||||
* @param transactionService transaction service
|
||||
* @param transactionService
|
||||
* transaction service
|
||||
*/
|
||||
public void setTransactionService(TransactionService transactionService)
|
||||
{
|
||||
this.transactionService = transactionService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param enableUsernameNormalization
|
||||
* enable username normalization to ensure correct casing
|
||||
*/
|
||||
public void setEnableUsernameNormalization(boolean enableUsernameNormalization)
|
||||
{
|
||||
this.enableUsernameNormalization = enableUsernameNormalization;
|
||||
}
|
||||
|
||||
/**
|
||||
* Application context refresh event handler
|
||||
*/
|
||||
@@ -142,19 +161,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
public void onApplicationEvent(ContextRefreshedEvent contextRefreshedEvent)
|
||||
{
|
||||
// run as System on bootstrap
|
||||
AuthenticationUtil.runAs(new RunAsWork<Object>()
|
||||
{
|
||||
AuthenticationUtil.runAs(new RunAsWork<Object>() {
|
||||
public Object doWork()
|
||||
{
|
||||
RetryingTransactionCallback<Void> callback = new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
RetryingTransactionCallback<Void> callback = new RetryingTransactionCallback<Void>() {
|
||||
public Void execute()
|
||||
{
|
||||
// if the root group doesn't exist then create it
|
||||
if (!authorityService.authorityExists(getRootIRPGroup()))
|
||||
{
|
||||
authorityService.createAuthority(AuthorityType.GROUP, ROOT_IPR_GROUP, ROOT_IPR_GROUP,
|
||||
Collections.singleton(RMAuthority.ZONE_APP_RM));
|
||||
Collections.singleton(RMAuthority.ZONE_APP_RM));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -174,7 +191,7 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
return GROUP_PREFIX + ROOT_IPR_GROUP;
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService#hasExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef)
|
||||
*/
|
||||
@Override
|
||||
@@ -224,8 +241,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Helper to get authorities for a given group
|
||||
*
|
||||
* @param group group name
|
||||
* @return Set<String> immediate authorities
|
||||
* @param group
|
||||
* group name
|
||||
* @return Set<String> immediate authorities
|
||||
*/
|
||||
private Set<String> getAuthorities(String group)
|
||||
{
|
||||
@@ -284,8 +302,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
* <p>
|
||||
* Return null if none found.
|
||||
*
|
||||
* @param nodeRef node reference
|
||||
* @return Pair<String, String> where first is the read group and second if the write group, null if none found
|
||||
* @param nodeRef
|
||||
* node reference
|
||||
* @return Pair<String, String> where first is the read group and second if the write group, null if none found
|
||||
*/
|
||||
private Pair<String, String> getIPRGroups(NodeRef nodeRef)
|
||||
{
|
||||
@@ -321,17 +340,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Given a set of readers and writers find or create the appropriate IPR groups.
|
||||
* <p>
|
||||
* The IPR groups are named with hashes of the authority lists in order to reduce
|
||||
* the set of groups that require exact match. A further index is used to handle
|
||||
* a situation where there is a hash clash, but a difference in the authority lists.
|
||||
* The IPR groups are named with hashes of the authority lists in order to reduce the set of groups that require exact match. A further index is used to handle a situation where there is a hash clash, but a difference in the authority lists.
|
||||
* <p>
|
||||
* When no match is found the groups are created. Once created
|
||||
* When no match is found the groups are created. Once created
|
||||
*
|
||||
* @param filePlan file plan
|
||||
* @param readers authorities with read
|
||||
* @param writers authorities with write
|
||||
* @return Pair<String, String> where first is the full name of the read group and
|
||||
* second is the full name of the write group
|
||||
* @param filePlan
|
||||
* file plan
|
||||
* @param readers
|
||||
* authorities with read
|
||||
* @param writers
|
||||
* authorities with write
|
||||
* @return Pair<String, String> where first is the full name of the read group and second is the full name of the write group
|
||||
*/
|
||||
private Pair<String, String> createOrFindIPRGroups(Set<String> readers, Set<String> writers)
|
||||
{
|
||||
@@ -343,20 +362,28 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Create or find an IPR group based on the provided prefix and authorities.
|
||||
*
|
||||
* @param groupPrefix group prefix
|
||||
* @param authorities authorities
|
||||
* @return String full group name
|
||||
* @param groupPrefix
|
||||
* group prefix
|
||||
* @param authorities
|
||||
* authorities
|
||||
* @return String full group name
|
||||
*/
|
||||
private String createOrFindIPRGroup(String groupPrefix, Set<String> authorities)
|
||||
{
|
||||
String group = null;
|
||||
|
||||
// If enabled, the authorities are forced to match the correct casing of the usernames in case they were set
|
||||
// with the incorrect casing.
|
||||
// If not, it will just use the authorities as they are.
|
||||
// In normal circumstances, the authorities are in the correct casing, so this is disabled by default.
|
||||
Set<String> authoritySet = normalizeAuthorities(authorities);
|
||||
|
||||
// find group or determine what the next index is if no group exists or there is a clash
|
||||
Pair<String, Integer> groupResult = findIPRGroup(groupPrefix, authorities);
|
||||
Pair<String, Integer> groupResult = findIPRGroup(groupPrefix, authoritySet);
|
||||
|
||||
if (groupResult.getFirst() == null)
|
||||
{
|
||||
group = createIPRGroup(groupPrefix, authorities, groupResult.getSecond());
|
||||
group = createIPRGroup(groupPrefix, authoritySet, groupResult.getSecond());
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -369,13 +396,13 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Given a group name prefix and the authorities, finds the exact match existing group.
|
||||
* <p>
|
||||
* If the group does not exist then the group returned is null and the index shows the next available
|
||||
* group index for creation.
|
||||
* If the group does not exist then the group returned is null and the index shows the next available group index for creation.
|
||||
*
|
||||
* @param groupPrefix group name prefix
|
||||
* @param authorities authorities
|
||||
* @return Pair<String, Integer> where first is the name of the found group, null if none found and second
|
||||
* if the next available create index
|
||||
* @param groupPrefix
|
||||
* group name prefix
|
||||
* @param authorities
|
||||
* authorities
|
||||
* @return Pair<String, Integer> where first is the name of the found group, null if none found and second if the next available create index
|
||||
*/
|
||||
private Pair<String, Integer> findIPRGroup(String groupPrefix, Set<String> authorities)
|
||||
{
|
||||
@@ -391,12 +418,13 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
while (hasMoreItems == true)
|
||||
{
|
||||
// get matching authorities
|
||||
PagingResults<String> results = authorityService.getAuthorities(AuthorityType.GROUP,
|
||||
RMAuthority.ZONE_APP_RM,
|
||||
groupShortNamePrefix,
|
||||
false,
|
||||
false,
|
||||
new PagingRequest(MAX_ITEMS*pageCount, MAX_ITEMS));
|
||||
PagingResults<String> results = authorityService.getAuthorities(
|
||||
AuthorityType.GROUP,
|
||||
RMAuthority.ZONE_APP_RM,
|
||||
groupShortNamePrefix,
|
||||
false,
|
||||
false,
|
||||
new PagingRequest(MAX_ITEMS * pageCount, MAX_ITEMS));
|
||||
|
||||
// record the total count
|
||||
nextGroupIndex = nextGroupIndex + results.getPage().size();
|
||||
@@ -413,29 +441,88 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
|
||||
// determine if there are any more pages to inspect
|
||||
hasMoreItems = results.hasMoreItems();
|
||||
pageCount ++;
|
||||
pageCount++;
|
||||
}
|
||||
|
||||
return new Pair<>(iprGroup, nextGroupIndex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a set of authorities, normalizes the authority names to ensure correct casing.
|
||||
*
|
||||
* @param authNames
|
||||
* @return
|
||||
*/
|
||||
private Set<String> normalizeAuthorities(Set<String> authNames)
|
||||
{
|
||||
// If disabled or no authorities, return as is
|
||||
if (!enableUsernameNormalization || authNames == null || authNames.isEmpty())
|
||||
{
|
||||
return authNames;
|
||||
}
|
||||
|
||||
Set<String> normalizedAuthorities = new HashSet<>();
|
||||
for (String authorityName : authNames)
|
||||
{
|
||||
normalizedAuthorities.add(normalizeAuthorityName(authorityName));
|
||||
}
|
||||
return normalizedAuthorities;
|
||||
}
|
||||
|
||||
/**
|
||||
* Usernames are case insensitive but affect the IPR group matching when set with different casing. For a given authority of type user, this method normalizes the authority name. If group, it returns the name as-is.
|
||||
*
|
||||
* @param authorityName
|
||||
* the authority name to normalize
|
||||
* @return the normalized authority name
|
||||
*/
|
||||
private String normalizeAuthorityName(String authorityName)
|
||||
{
|
||||
if (authorityName == null || authorityName.startsWith(GROUP_PREFIX))
|
||||
{
|
||||
return authorityName;
|
||||
}
|
||||
|
||||
// For users, attempt to get the correct casing from the username property of the user node
|
||||
if (authorityService.authorityExists(authorityName))
|
||||
{
|
||||
try
|
||||
{
|
||||
NodeRef authorityNodeRef = authorityService.getAuthorityNodeRef(authorityName);
|
||||
if (authorityNodeRef != null)
|
||||
{
|
||||
String username = (String) nodeService.getProperty(authorityNodeRef, ContentModel.PROP_USERNAME);
|
||||
return username != null ? username : authorityName;
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
// If anything goes wrong, fallback to the original name
|
||||
}
|
||||
}
|
||||
|
||||
return authorityName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether a group exactly matches a list of authorities.
|
||||
*
|
||||
* @param authorities list of authorities
|
||||
* @param group group
|
||||
* @param authorities
|
||||
* list of authorities
|
||||
* @param group
|
||||
* group
|
||||
* @return
|
||||
*/
|
||||
private boolean isIPRGroupTrueMatch(String group, Set<String> authorities)
|
||||
{
|
||||
//Remove GROUP_EVERYONE for proper comparison as GROUP_EVERYONE is never included in an IPR group
|
||||
// Remove GROUP_EVERYONE for proper comparison as GROUP_EVERYONE is never included in an IPR group
|
||||
Set<String> plainAuthorities = new HashSet<String>();
|
||||
if (authorities != null)
|
||||
{
|
||||
plainAuthorities.addAll(authorities);
|
||||
plainAuthorities.remove(PermissionService.ALL_AUTHORITIES);
|
||||
}
|
||||
Set<String> contained = authorityService.getContainedAuthorities(null, group, true);
|
||||
Set<String> contained = authorityService.getContainedAuthorities(null, group, true);
|
||||
return contained.equals(plainAuthorities);
|
||||
}
|
||||
|
||||
@@ -444,15 +531,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
* <p>
|
||||
* 'package' scope to help testing.
|
||||
*
|
||||
* @param prefix prefix
|
||||
* @param authorities authorities
|
||||
* @return String group prefix short name
|
||||
* @param prefix
|
||||
* prefix
|
||||
* @param authorities
|
||||
* authorities
|
||||
* @return String group prefix short name
|
||||
*/
|
||||
/*package*/ String getIPRGroupPrefixShortName(String prefix, Set<String> authorities)
|
||||
/* package */ String getIPRGroupPrefixShortName(String prefix, Set<String> authorities)
|
||||
{
|
||||
StringBuilder builder = new StringBuilder(128)
|
||||
.append(prefix)
|
||||
.append(getAuthoritySetHashCode(authorities));
|
||||
.append(prefix)
|
||||
.append(getAuthoritySetHashCode(authorities));
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
@@ -464,13 +553,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
* <p>
|
||||
* 'package' scope to help testing.
|
||||
*
|
||||
* @param prefix prefix
|
||||
* @param readers read authorities
|
||||
* @param writers write authorities
|
||||
* @param index group index
|
||||
* @return String group short name
|
||||
* @param prefix
|
||||
* prefix
|
||||
* @param readers
|
||||
* read authorities
|
||||
* @param writers
|
||||
* write authorities
|
||||
* @param index
|
||||
* group index
|
||||
* @return String group short name
|
||||
*/
|
||||
/*package*/ String getIPRGroupShortName(String prefix, Set<String> authorities, int index)
|
||||
/* package */ String getIPRGroupShortName(String prefix, Set<String> authorities, int index)
|
||||
{
|
||||
return getIPRGroupShortName(prefix, authorities, Integer.toString(index));
|
||||
}
|
||||
@@ -480,17 +573,21 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
* <p>
|
||||
* Note this excludes the "GROUP_" prefix.
|
||||
*
|
||||
* @param prefix prefix
|
||||
* @param readers read authorities
|
||||
* @param writers write authorities
|
||||
* @param index group index
|
||||
* @return String group short name
|
||||
* @param prefix
|
||||
* prefix
|
||||
* @param readers
|
||||
* read authorities
|
||||
* @param writers
|
||||
* write authorities
|
||||
* @param index
|
||||
* group index
|
||||
* @return String group short name
|
||||
*/
|
||||
private String getIPRGroupShortName(String prefix, Set<String> authorities, String index)
|
||||
{
|
||||
StringBuilder builder = new StringBuilder(128)
|
||||
.append(getIPRGroupPrefixShortName(prefix, authorities))
|
||||
.append(index);
|
||||
.append(getIPRGroupPrefixShortName(prefix, authorities))
|
||||
.append(index);
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
@@ -498,8 +595,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Gets the hashcode value of a set of authorities.
|
||||
*
|
||||
* @param authorities set of authorities
|
||||
* @return int hash code
|
||||
* @param authorities
|
||||
* set of authorities
|
||||
* @return int hash code
|
||||
*/
|
||||
private int getAuthoritySetHashCode(Set<String> authorities)
|
||||
{
|
||||
@@ -514,10 +612,13 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Creates a new IPR group.
|
||||
*
|
||||
* @param groupNamePrefix group name prefix
|
||||
* @param children child authorities
|
||||
* @param index group index
|
||||
* @return String full name of created group
|
||||
* @param groupNamePrefix
|
||||
* group name prefix
|
||||
* @param children
|
||||
* child authorities
|
||||
* @param index
|
||||
* group index
|
||||
* @return String full name of created group
|
||||
*/
|
||||
private String createIPRGroup(String groupNamePrefix, Set<String> children, int index)
|
||||
{
|
||||
@@ -547,10 +648,10 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
}
|
||||
}
|
||||
}
|
||||
catch(DuplicateChildNodeNameException ex)
|
||||
catch (DuplicateChildNodeNameException ex)
|
||||
{
|
||||
// the group was concurrently created
|
||||
group = authorityService.getName(AuthorityType.GROUP, groupShortName);
|
||||
// Rethrow as ConcurrencyFailureException so that is can be retried and linked to the group created by the concurrent transaction
|
||||
throw new ConcurrencyFailureException("IPR group creation failed due to concurrent duplicate group name creation: " + groupShortName);
|
||||
}
|
||||
|
||||
return group;
|
||||
@@ -559,8 +660,10 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Assign IPR groups to a node reference with the correct permissions.
|
||||
*
|
||||
* @param iprGroups iprGroups, first read and second write
|
||||
* @param nodeRef node reference
|
||||
* @param iprGroups
|
||||
* iprGroups, first read and second write
|
||||
* @param nodeRef
|
||||
* node reference
|
||||
*/
|
||||
private void assignIPRGroupsToNode(Pair<String, String> iprGroups, NodeRef nodeRef)
|
||||
{
|
||||
@@ -598,7 +701,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* Clear the nodes IPR permissions
|
||||
*
|
||||
* @param nodeRef node reference
|
||||
* @param nodeRef
|
||||
* node reference
|
||||
*/
|
||||
private void clearPermissions(NodeRef nodeRef, Pair<String, String> iprGroups)
|
||||
{
|
||||
@@ -610,7 +714,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#getExtendedReaders(org.alfresco.service.cmr.repository.NodeRef)
|
||||
*/
|
||||
@Override @Deprecated public Set<String> getExtendedReaders(NodeRef nodeRef)
|
||||
@Override
|
||||
@Deprecated
|
||||
public Set<String> getExtendedReaders(NodeRef nodeRef)
|
||||
{
|
||||
return getReaders(nodeRef);
|
||||
}
|
||||
@@ -618,7 +724,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#getExtendedWriters(org.alfresco.service.cmr.repository.NodeRef)
|
||||
*/
|
||||
@Override @Deprecated public Set<String> getExtendedWriters(NodeRef nodeRef)
|
||||
@Override
|
||||
@Deprecated
|
||||
public Set<String> getExtendedWriters(NodeRef nodeRef)
|
||||
{
|
||||
return getWriters(nodeRef);
|
||||
}
|
||||
@@ -626,7 +734,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#addExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set)
|
||||
*/
|
||||
@Override @Deprecated public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
|
||||
{
|
||||
set(nodeRef, readers, writers);
|
||||
}
|
||||
@@ -634,7 +744,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#addExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set, boolean)
|
||||
*/
|
||||
@Override @Deprecated public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers, boolean applyToParents)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers, boolean applyToParents)
|
||||
{
|
||||
set(nodeRef, readers, writers);
|
||||
}
|
||||
@@ -642,7 +754,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeAllExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef)
|
||||
*/
|
||||
@Override @Deprecated public void removeAllExtendedSecurity(NodeRef nodeRef)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void removeAllExtendedSecurity(NodeRef nodeRef)
|
||||
{
|
||||
remove(nodeRef);
|
||||
}
|
||||
@@ -650,7 +764,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set)
|
||||
*/
|
||||
@Override @Deprecated public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
|
||||
{
|
||||
remove(nodeRef);
|
||||
}
|
||||
@@ -658,7 +774,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set, boolean)
|
||||
*/
|
||||
@Override @Deprecated public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String>writers, boolean applyToParents)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers, boolean applyToParents)
|
||||
{
|
||||
remove(nodeRef);
|
||||
}
|
||||
@@ -666,7 +784,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
|
||||
/**
|
||||
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeAllExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, boolean)
|
||||
*/
|
||||
@Override @Deprecated public void removeAllExtendedSecurity(NodeRef nodeRef, boolean applyToParents)
|
||||
@Override
|
||||
@Deprecated
|
||||
public void removeAllExtendedSecurity(NodeRef nodeRef, boolean applyToParents)
|
||||
{
|
||||
remove(nodeRef);
|
||||
}
|
||||
|
@@ -29,14 +29,23 @@ package org.alfresco.module.org_alfresco_module_rm.test.legacy.service;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.springframework.dao.ConcurrencyFailureException;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase;
|
||||
import org.alfresco.query.PagingRequest;
|
||||
import org.alfresco.query.PagingResults;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.site.SiteModel;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.security.AccessPermission;
|
||||
import org.alfresco.service.cmr.security.AccessStatus;
|
||||
import org.alfresco.service.cmr.security.AuthorityType;
|
||||
import org.alfresco.service.cmr.site.SiteService;
|
||||
import org.alfresco.service.cmr.site.SiteVisibility;
|
||||
import org.alfresco.util.GUID;
|
||||
@@ -73,8 +82,7 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
|
||||
|
||||
private String createTestUser()
|
||||
{
|
||||
return doTestInTransaction(new Test<String>()
|
||||
{
|
||||
return doTestInTransaction(new Test<String>() {
|
||||
public String run()
|
||||
{
|
||||
String userName = GUID.generate();
|
||||
@@ -90,8 +98,7 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
|
||||
final String elephant = createTestUser();
|
||||
final String snake = createTestUser();
|
||||
|
||||
doTestInTransaction(new Test<Void>()
|
||||
{
|
||||
doTestInTransaction(new Test<Void>() {
|
||||
public Void run()
|
||||
{
|
||||
assertFalse(extendedSecurityService.hasExtendedSecurity(filePlan));
|
||||
@@ -118,7 +125,7 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
|
||||
|
||||
// test remove
|
||||
extendedSecurityService.remove(recordToo);
|
||||
|
||||
|
||||
assertFalse(extendedSecurityService.hasExtendedSecurity(recordToo));
|
||||
assertTrue(extendedSecurityService.getReaders(recordToo).isEmpty());
|
||||
assertTrue(extendedSecurityService.getWriters(recordToo).isEmpty());
|
||||
@@ -133,8 +140,7 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
|
||||
final String monkey = createTestUser();
|
||||
final String elephant = createTestUser();
|
||||
|
||||
doTestInTransaction(new Test<Void>()
|
||||
{
|
||||
doTestInTransaction(new Test<Void>() {
|
||||
Set<String> extendedReaders = new HashSet<>(2);
|
||||
|
||||
public Void run() throws Exception
|
||||
@@ -184,112 +190,337 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
|
||||
|
||||
public void testDifferentUsersDifferentPermissions()
|
||||
{
|
||||
final String userNone = createTestUser();
|
||||
final String userRead = createTestUser();
|
||||
final String userWrite = createTestUser();
|
||||
final String siteShortName = GUID.generate();
|
||||
final String userNone = createTestUser();
|
||||
final String userRead = createTestUser();
|
||||
final String userWrite = createTestUser();
|
||||
final String siteShortName = GUID.generate();
|
||||
|
||||
doTestInTransaction(new Test<Void>()
|
||||
{
|
||||
doTestInTransaction(new Test<Void>() {
|
||||
public Void run() throws Exception
|
||||
{
|
||||
siteService.createSite(null, siteShortName, "test", "test", SiteVisibility.PRIVATE);
|
||||
return null;
|
||||
siteService.createSite(null, siteShortName, "test", "test", SiteVisibility.PRIVATE);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
final NodeRef documentLibrary = doTestInTransaction(new Test<NodeRef>()
|
||||
{
|
||||
final NodeRef documentLibrary = doTestInTransaction(new Test<NodeRef>() {
|
||||
public NodeRef run() throws Exception
|
||||
{
|
||||
siteService.setMembership(siteShortName, userRead, SiteModel.SITE_CONSUMER);
|
||||
siteService.setMembership(siteShortName, userWrite, SiteModel.SITE_COLLABORATOR);
|
||||
return siteService.createContainer(siteShortName, SiteService.DOCUMENT_LIBRARY, null, null);
|
||||
siteService.setMembership(siteShortName, userRead, SiteModel.SITE_CONSUMER);
|
||||
siteService.setMembership(siteShortName, userWrite, SiteModel.SITE_COLLABORATOR);
|
||||
return siteService.createContainer(siteShortName, SiteService.DOCUMENT_LIBRARY, null, null);
|
||||
}
|
||||
});
|
||||
|
||||
final NodeRef record = doTestInTransaction(new Test<NodeRef>()
|
||||
{
|
||||
final NodeRef record = doTestInTransaction(new Test<NodeRef>() {
|
||||
public NodeRef run() throws Exception
|
||||
{
|
||||
NodeRef record = fileFolderService.create(documentLibrary, GUID.generate(), ContentModel.TYPE_CONTENT).getNodeRef();
|
||||
recordService.createRecord(filePlan, record);
|
||||
return record;
|
||||
NodeRef record = fileFolderService.create(documentLibrary, GUID.generate(), ContentModel.TYPE_CONTENT)
|
||||
.getNodeRef();
|
||||
recordService.createRecord(filePlan, record);
|
||||
return record;
|
||||
}
|
||||
});
|
||||
|
||||
doTestInTransaction(new Test<Void>()
|
||||
{
|
||||
doTestInTransaction(new Test<Void>() {
|
||||
public Void run() throws Exception
|
||||
{
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userNone);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userNone);
|
||||
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userRead);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userRead);
|
||||
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userWrite);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userWrite);
|
||||
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userNone);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userNone);
|
||||
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userRead);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userRead);
|
||||
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userWrite);
|
||||
AuthenticationUtil.runAs(new RunAsWork<Void>() {
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
// check permissions
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
|
||||
return null;
|
||||
}
|
||||
}, userWrite);
|
||||
|
||||
return null;
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void testConcurrentSetWithRetry()
|
||||
{
|
||||
Set<String> extendedReaders = new HashSet<>(2);
|
||||
Set<String> extendedWriters = new HashSet<>(2);
|
||||
|
||||
Set<NodeRef> documents = setupConcurrentTestCase(10, extendedReaders, extendedWriters);
|
||||
|
||||
// For each record created previously, spawn a thread to set extended security so we cause concurrency
|
||||
// failure trying to create IPR groups with the same name
|
||||
fireParallelExecutionOfSetExtendedSecurity(documents, extendedReaders, extendedWriters, true);
|
||||
|
||||
// Look for duplicated IPR groups and verify all documents have the same groups assigned
|
||||
verifyCreatedGroups(documents, false);
|
||||
|
||||
AuthenticationUtil.clearCurrentSecurityContext();
|
||||
}
|
||||
|
||||
public void testConcurrentSetWithoutRetry()
|
||||
{
|
||||
Set<String> extendedReaders = new HashSet<>(2);
|
||||
Set<String> extendedWriters = new HashSet<>(2);
|
||||
|
||||
Set<NodeRef> documents = setupConcurrentTestCase(10, extendedReaders, extendedWriters);
|
||||
|
||||
// For each record created previously, spawn a thread to set extended security so we cause concurrency
|
||||
// failure trying to create IPR groups with the same name.
|
||||
// Since there is no retry, we expect to get a ConcurrencyFailureException
|
||||
Assert.assertThrows(ConcurrencyFailureException.class, () -> {
|
||||
fireParallelExecutionOfSetExtendedSecurity(documents, extendedReaders, extendedWriters, false);
|
||||
});
|
||||
|
||||
// Look for duplicated IPR groups and verify all documents have the same groups assigned
|
||||
// Since there was a ConcurrencyFailureException some threads failed to set extended security so some
|
||||
// documents may not have IPR groups created.
|
||||
verifyCreatedGroups(documents, true);
|
||||
|
||||
AuthenticationUtil.clearCurrentSecurityContext();
|
||||
}
|
||||
|
||||
private Set<NodeRef> setupConcurrentTestCase(int concurrentThreads, Set<String> extendedReaders, Set<String> extendedWriters)
|
||||
{
|
||||
final String usera = createTestUser();
|
||||
final String userb = createTestUser();
|
||||
final String owner = createTestUser();
|
||||
|
||||
extendedReaders.add(usera);
|
||||
extendedReaders.add(userb);
|
||||
extendedWriters.add(usera);
|
||||
extendedWriters.add(userb);
|
||||
|
||||
AuthenticationUtil.setAdminUserAsFullyAuthenticatedUser();
|
||||
|
||||
// Create a site
|
||||
NodeRef documentLib = createSite(new HashSet<>(), new HashSet<>());
|
||||
|
||||
// Create records in the site document library
|
||||
return createRecords(concurrentThreads, documentLib, owner);
|
||||
}
|
||||
|
||||
private NodeRef createSite(Set<String> readers, Set<String> writers)
|
||||
{
|
||||
return retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<NodeRef>() {
|
||||
@Override
|
||||
public NodeRef execute() throws Throwable
|
||||
{
|
||||
final String siteShortName = GUID.generate();
|
||||
siteService.createSite(null, siteShortName, "test", "test", SiteVisibility.PRIVATE);
|
||||
readers.forEach(reader -> siteService.setMembership(siteShortName, reader, SiteModel.SITE_CONSUMER));
|
||||
writers.forEach(writer -> siteService.setMembership(siteShortName, writer, SiteModel.SITE_COLLABORATOR));
|
||||
return siteService.createContainer(siteShortName, SiteService.DOCUMENT_LIBRARY, null, null);
|
||||
}
|
||||
}, false, true);
|
||||
}
|
||||
|
||||
private Set<NodeRef> createRecords(int numRecords, NodeRef parent, String owner)
|
||||
{
|
||||
return retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Set<NodeRef>>() {
|
||||
@Override
|
||||
public Set<NodeRef> execute() throws Throwable
|
||||
{
|
||||
int createdRecords = 0;
|
||||
Set<NodeRef> documents = new HashSet<>();
|
||||
while (createdRecords < numRecords)
|
||||
{
|
||||
final NodeRef doc = fileFolderService.create(parent, GUID.generate(), ContentModel.TYPE_CONTENT).getNodeRef();
|
||||
ownableService.setOwner(doc, owner);
|
||||
recordService.createRecord(filePlan, doc, rmFolder, true);
|
||||
recordService.file(doc);
|
||||
recordService.complete(doc);
|
||||
documents.add(doc);
|
||||
createdRecords++;
|
||||
}
|
||||
return documents;
|
||||
}
|
||||
}, false, true);
|
||||
}
|
||||
|
||||
private void setExtendedSecurity(NodeRef doc, Set<String> readers, Set<String> writers, boolean useRetry)
|
||||
{
|
||||
if (!useRetry)
|
||||
{
|
||||
setExtendedSecurity(doc, readers, writers);
|
||||
return;
|
||||
}
|
||||
|
||||
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>() {
|
||||
@Override
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
setExtendedSecurity(doc, readers, writers);
|
||||
return null;
|
||||
}
|
||||
}, false, true);
|
||||
}
|
||||
|
||||
private void setExtendedSecurity(NodeRef doc, Set<String> readers, Set<String> writers)
|
||||
{
|
||||
AuthenticationUtil.setAdminUserAsFullyAuthenticatedUser();
|
||||
extendedSecurityService.set(doc, readers, writers);
|
||||
}
|
||||
|
||||
private void fireParallelExecutionOfSetExtendedSecurity(Set<NodeRef> documents, Set<String> extendedReaders, Set<String> extendedWriters, boolean useRetry)
|
||||
{
|
||||
CompletableFuture<?>[] futures = documents.stream()
|
||||
.map(doc -> CompletableFuture.runAsync(() -> setExtendedSecurity(doc, extendedReaders, extendedWriters, useRetry)))
|
||||
.toArray(CompletableFuture[]::new);
|
||||
|
||||
try
|
||||
{
|
||||
CompletableFuture.allOf(futures).join();
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Throwable cause = e.getCause();
|
||||
if (cause instanceof ConcurrencyFailureException)
|
||||
{
|
||||
throw (ConcurrencyFailureException) cause;
|
||||
}
|
||||
throw new RuntimeException("Error during parallel execution", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void verifyCreatedGroups(Set<NodeRef> documents, boolean onlyDuplicatesValidation)
|
||||
{
|
||||
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>() {
|
||||
@Override
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
Set<String> expectedAuthorities = null;
|
||||
Set<Set<String>> errors = new HashSet<>();
|
||||
for (NodeRef doc : documents)
|
||||
{
|
||||
Set<AccessPermission> permissions = permissionService.getAllSetPermissions(doc);
|
||||
Set<String> authorities = getDocumentAuthorities(permissions);
|
||||
Set<String> authoritiesById = getAuthorityIds(authorities);
|
||||
|
||||
verifyIPRGroups(authorities, onlyDuplicatesValidation);
|
||||
|
||||
if (onlyDuplicatesValidation)
|
||||
{
|
||||
// Some documents may not have IPR groups created if there was a ConcurrencyFailureException
|
||||
continue;
|
||||
}
|
||||
|
||||
// All documents should have the same exact set of groups assigned
|
||||
if (expectedAuthorities == null)
|
||||
{
|
||||
expectedAuthorities = authoritiesById;
|
||||
}
|
||||
|
||||
if (!expectedAuthorities.equals(authoritiesById))
|
||||
{
|
||||
errors.add(authoritiesById);
|
||||
}
|
||||
}
|
||||
|
||||
assertTrue("Unexpected authorities linked to document", errors.isEmpty());
|
||||
|
||||
return null;
|
||||
}
|
||||
}, false, true);
|
||||
}
|
||||
|
||||
private Set<String> getDocumentAuthorities(Set<AccessPermission> permissions)
|
||||
{
|
||||
Set<String> authorities = new HashSet<>();
|
||||
|
||||
for (AccessPermission accessPermission : permissions)
|
||||
{
|
||||
String authority = accessPermission.getAuthority();
|
||||
String authName = authorityService.getName(AuthorityType.GROUP, authority);
|
||||
authorities.add(authName);
|
||||
|
||||
}
|
||||
return authorities;
|
||||
}
|
||||
|
||||
private Set<String> getAuthorityIds(Set<String> authorities)
|
||||
{
|
||||
Set<String> authorityIds = new HashSet<>();
|
||||
for (String authority : authorities)
|
||||
{
|
||||
String authId = authorityService.getAuthorityNodeRef(authority) != null
|
||||
? authorityService.getAuthorityNodeRef(authority).getId()
|
||||
: null;
|
||||
authorityIds.add(authId);
|
||||
}
|
||||
return authorityIds;
|
||||
}
|
||||
|
||||
private void verifyIPRGroups(Set<String> authorities, boolean onlyDuplicatesValidation)
|
||||
{
|
||||
boolean hasGroupIPR = false;
|
||||
|
||||
for (String authorityName : authorities)
|
||||
{
|
||||
String shortName = authorityService.getShortName(authorityName);
|
||||
|
||||
if (authorityName.startsWith("GROUP_IPR"))
|
||||
{
|
||||
hasGroupIPR = true;
|
||||
PagingResults<String> results = authorityService.getAuthorities(AuthorityType.GROUP, null, shortName, false,
|
||||
false, new PagingRequest(0, 10));
|
||||
|
||||
assertEquals("No duplicated IPR group expected", 1, results.getPage().size());
|
||||
}
|
||||
}
|
||||
|
||||
if (!onlyDuplicatesValidation)
|
||||
{
|
||||
assertTrue("No IPR Groups created", hasGroupIPR);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -52,6 +52,7 @@ import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.model.RenditionModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.capability.RMPermissionModel;
|
||||
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService;
|
||||
@@ -67,6 +68,7 @@ import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransacti
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.security.AccessPermission;
|
||||
import org.alfresco.service.cmr.security.AccessStatus;
|
||||
import org.alfresco.service.cmr.security.AuthorityService;
|
||||
@@ -522,6 +524,104 @@ public class ExtendedSecurityServiceImplUnitTest
|
||||
verify(mockedPermissionService).setPermission(nodeRef, writeGroup, RMPermissionModel.FILING, true);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a node with no previous IPR groups assigned
|
||||
* And having pre-existing IPR groups matching the ones we need
|
||||
* When I add some read and write authorities but with a different casing
|
||||
* Then the existing IPR groups are used
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test public void addExtendedSecurityWithMixedCasingUsernames()
|
||||
{
|
||||
// Have the usernames in the node as the correct usernames but with incorrect casing
|
||||
String user1 = "UseR";
|
||||
String user2 = "UseR_w";
|
||||
|
||||
// Incorrect IPR Group names
|
||||
Set<String> diffCasingReaders = Stream.of(user1, GROUP).collect(Collectors.toSet());
|
||||
Set<String> diffCasingWriters = Stream.of(user2, GROUP_W).collect(Collectors.toSet());
|
||||
String wrongReadGroupPrefix = extendedSecurityService.getIPRGroupPrefixShortName(READER_GROUP_PREFIX, diffCasingReaders);
|
||||
String wrongWriteGroupPrefix = extendedSecurityService.getIPRGroupPrefixShortName(WRITER_GROUP_PREFIX, diffCasingWriters);
|
||||
String wrongReadGroup = wrongReadGroupPrefix + "0";
|
||||
String wrongWriteGroup = wrongWriteGroupPrefix + "0";
|
||||
|
||||
// Correct Group names
|
||||
String correctReadGroup = readGroupPrefix + "0";
|
||||
String correctWriteGroup = writeGroupPrefix + "0";
|
||||
|
||||
// If queried for the correct groups, return the results
|
||||
PagingResults<String> mockedCorrectReadPResults = mock(PagingResults.class);
|
||||
PagingResults<String> mockedCorrectWritePResults = mock(PagingResults.class);
|
||||
when(mockedCorrectReadPResults.getPage())
|
||||
.thenReturn(Stream.of(GROUP_PREFIX + correctReadGroup).collect(Collectors.toList()));
|
||||
when(mockedAuthorityService.getAuthorities(
|
||||
eq(AuthorityType.GROUP),
|
||||
eq(RMAuthority.ZONE_APP_RM),
|
||||
eq(readGroupPrefix),
|
||||
eq(false),
|
||||
eq(false),
|
||||
any(PagingRequest.class)))
|
||||
.thenReturn(mockedCorrectReadPResults);
|
||||
|
||||
when(mockedCorrectWritePResults.getPage())
|
||||
.thenReturn(Stream.of(GROUP_PREFIX + correctWriteGroup).collect(Collectors.toList()));
|
||||
when(mockedAuthorityService.getAuthorities(
|
||||
eq(AuthorityType.GROUP),
|
||||
eq(RMAuthority.ZONE_APP_RM),
|
||||
eq(writeGroupPrefix),
|
||||
eq(false),
|
||||
eq(false),
|
||||
any(PagingRequest.class)))
|
||||
.thenReturn(mockedCorrectWritePResults);
|
||||
|
||||
// Don't return results for the incorrect groups (lenient as these may not be called with normalization enabled)
|
||||
PagingResults<String> mockedWrongReadPResults = mock(PagingResults.class);
|
||||
PagingResults<String> mockedWrongWritePResults = mock(PagingResults.class);
|
||||
lenient().when(mockedWrongReadPResults.getPage())
|
||||
.thenReturn(Collections.emptyList());
|
||||
lenient().when(mockedAuthorityService.getAuthorities(
|
||||
eq(AuthorityType.GROUP),
|
||||
eq(RMAuthority.ZONE_APP_RM),
|
||||
eq(wrongReadGroupPrefix),
|
||||
eq(false),
|
||||
eq(false),
|
||||
any(PagingRequest.class)))
|
||||
.thenReturn(mockedWrongReadPResults);
|
||||
|
||||
lenient().when(mockedWrongWritePResults.getPage())
|
||||
.thenReturn(Collections.emptyList());
|
||||
lenient().when(mockedAuthorityService.getAuthorities(
|
||||
eq(AuthorityType.GROUP),
|
||||
eq(RMAuthority.ZONE_APP_RM),
|
||||
eq(wrongWriteGroupPrefix),
|
||||
eq(false),
|
||||
eq(false),
|
||||
any(PagingRequest.class)))
|
||||
.thenReturn(mockedWrongWritePResults);
|
||||
|
||||
// The users do exist, despite being in a different casing and are able to be retrieved
|
||||
NodeRef noderefUser1 = new NodeRef(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE, USER);
|
||||
when(mockedAuthorityService.authorityExists(user1)).thenReturn(true);
|
||||
when(mockedAuthorityService.getAuthorityNodeRef(user1)).thenReturn(noderefUser1);
|
||||
when(mockedNodeService.getProperty(noderefUser1, ContentModel.PROP_USERNAME)).thenReturn(USER);
|
||||
|
||||
NodeRef noderefUser2 = new NodeRef(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE, USER_W);
|
||||
when(mockedAuthorityService.authorityExists(user2)).thenReturn(true);
|
||||
when(mockedAuthorityService.getAuthorityNodeRef(user2)).thenReturn(noderefUser2);
|
||||
when(mockedNodeService.getProperty(noderefUser2, ContentModel.PROP_USERNAME)).thenReturn(USER_W);
|
||||
|
||||
// Set the extended security service to normalize usernames
|
||||
extendedSecurityService.setEnableUsernameNormalization(true);
|
||||
extendedSecurityService.set(nodeRef, diffCasingReaders, diffCasingWriters);
|
||||
|
||||
// Verify that the incorrect read group is not created
|
||||
verify(mockedAuthorityService, never()).createAuthority(AuthorityType.GROUP, wrongReadGroup, wrongReadGroup, Collections.singleton(RMAuthority.ZONE_APP_RM));
|
||||
|
||||
// Verify that the incorrect write group is not created
|
||||
verify(mockedAuthorityService, never()).createAuthority(AuthorityType.GROUP, wrongWriteGroup, wrongWriteGroup, Collections.singleton(RMAuthority.ZONE_APP_RM));
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a node with no previous IPR groups assigned
|
||||
@@ -571,7 +671,7 @@ public class ExtendedSecurityServiceImplUnitTest
|
||||
.thenReturn(Stream
|
||||
.of(USER_W, AlfMock.generateText())
|
||||
.collect(Collectors.toSet()));
|
||||
|
||||
|
||||
// add extended security
|
||||
extendedSecurityService.set(nodeRef, READERS, WRITERS);
|
||||
|
||||
@@ -895,7 +995,7 @@ public class ExtendedSecurityServiceImplUnitTest
|
||||
// group names
|
||||
String readGroup = extendedSecurityService.getIPRGroupShortName(READER_GROUP_FULL_PREFIX, READERS, 0);
|
||||
String writeGroup = extendedSecurityService.getIPRGroupShortName(WRITER_GROUP_FULL_PREFIX, WRITERS, 0);
|
||||
|
||||
|
||||
// setup renditions
|
||||
NodeRef renditionNodeRef = AlfMock.generateNodeRef(mockedNodeService);
|
||||
when(mockedNodeService.hasAspect(nodeRef, RecordsManagementModel.ASPECT_RECORD))
|
||||
@@ -904,7 +1004,7 @@ public class ExtendedSecurityServiceImplUnitTest
|
||||
.thenReturn(renditionNodeRef);
|
||||
when(mockedNodeService.getChildAssocs(nodeRef, RenditionModel.ASSOC_RENDITION, RegexQNamePattern.MATCH_ALL))
|
||||
.thenReturn(Collections.singletonList(mockedChildAssociationRef));
|
||||
|
||||
|
||||
// setup permissions
|
||||
Set<AccessPermission> permissions = Stream
|
||||
.of(new AccessPermissionImpl(AlfMock.generateText(), AccessStatus.ALLOWED, readGroup, 0),
|
||||
@@ -913,17 +1013,17 @@ public class ExtendedSecurityServiceImplUnitTest
|
||||
.collect(Collectors.toSet());
|
||||
when(mockedPermissionService.getAllSetPermissions(nodeRef))
|
||||
.thenReturn(permissions);
|
||||
|
||||
|
||||
// remove extended security
|
||||
extendedSecurityService.remove(nodeRef);
|
||||
|
||||
|
||||
// verify that the groups permissions have been removed
|
||||
verify(mockedPermissionService).clearPermission(nodeRef, readGroup);
|
||||
verify(mockedPermissionService).clearPermission(nodeRef, writeGroup);
|
||||
|
||||
|
||||
// verify that the groups permissions have been removed from the rendition
|
||||
verify(mockedPermissionService).clearPermission(renditionNodeRef, readGroup);
|
||||
verify(mockedPermissionService).clearPermission(renditionNodeRef, writeGroup);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
}
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<build>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -8,7 +8,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-amps</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@@ -9,6 +9,6 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
</project>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<organization>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -8,7 +8,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-tests</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<developers>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo-packaging</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
6
pom.xml
6
pom.xml
@@ -2,7 +2,7 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>Alfresco Community Repo Parent</name>
|
||||
|
||||
@@ -25,7 +25,7 @@
|
||||
<properties>
|
||||
<acs.version.major>23</acs.version.major>
|
||||
<acs.version.minor>3</acs.version.minor>
|
||||
<acs.version.revision>6</acs.version.revision>
|
||||
<acs.version.revision>14</acs.version.revision>
|
||||
<acs.version.label />
|
||||
<amp.min.version>${acs.version.major}.0.0</amp.min.version>
|
||||
|
||||
@@ -154,7 +154,7 @@
|
||||
<connection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</connection>
|
||||
<developerConnection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</developerConnection>
|
||||
<url>https://github.com/Alfresco/alfresco-community-repo</url>
|
||||
<tag>23.3.6.2</tag>
|
||||
<tag>23.3.14.2</tag>
|
||||
</scm>
|
||||
|
||||
<distributionManagement>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-community-repo</artifactId>
|
||||
<version>23.3.6.2</version>
|
||||
<version>23.3.14.2</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@@ -2,93 +2,96 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.action.executer;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.repo.action.ParameterDefinitionImpl;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.action.Action;
|
||||
import org.alfresco.service.cmr.action.ParameterDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
|
||||
/**
|
||||
* Add features action executor implementation.
|
||||
*
|
||||
* @author Roy Wetherall
|
||||
*/
|
||||
public class AddFeaturesActionExecuter extends ActionExecuterAbstractBase
|
||||
{
|
||||
/**
|
||||
* Action constants
|
||||
*/
|
||||
public static final String NAME = "add-features";
|
||||
public static final String PARAM_ASPECT_NAME = "aspect-name";
|
||||
public static final String PARAM_CONSTRAINT = "ac-aspects";
|
||||
|
||||
/**
|
||||
* The node service
|
||||
*/
|
||||
private NodeService nodeService;
|
||||
|
||||
/** Transaction Service, used for retrying operations */
|
||||
private TransactionService transactionService;
|
||||
|
||||
/**
|
||||
* Set the node service
|
||||
*
|
||||
* @param nodeService the node service
|
||||
*/
|
||||
public void setNodeService(NodeService nodeService)
|
||||
{
|
||||
this.nodeService = nodeService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the transaction service
|
||||
*
|
||||
* @param transactionService the transaction service
|
||||
*/
|
||||
public void setTransactionService(TransactionService transactionService)
|
||||
{
|
||||
this.transactionService = transactionService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adhoc properties are allowed for this executor
|
||||
*/
|
||||
@Override
|
||||
protected boolean getAdhocPropertiesAllowed()
|
||||
{
|
||||
return true;
|
||||
package org.alfresco.repo.action.executer;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.repo.action.ParameterDefinitionImpl;
|
||||
import org.alfresco.repo.action.access.ActionAccessRestriction;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.action.Action;
|
||||
import org.alfresco.service.cmr.action.ParameterDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
|
||||
/**
|
||||
* Add features action executor implementation.
|
||||
*
|
||||
* @author Roy Wetherall
|
||||
*/
|
||||
public class AddFeaturesActionExecuter extends ActionExecuterAbstractBase
|
||||
{
|
||||
/**
|
||||
* Action constants
|
||||
*/
|
||||
public static final String NAME = "add-features";
|
||||
public static final String PARAM_ASPECT_NAME = "aspect-name";
|
||||
public static final String PARAM_CONSTRAINT = "ac-aspects";
|
||||
|
||||
/**
|
||||
* The node service
|
||||
*/
|
||||
private NodeService nodeService;
|
||||
|
||||
/** Transaction Service, used for retrying operations */
|
||||
private TransactionService transactionService;
|
||||
|
||||
/**
|
||||
* Set the node service
|
||||
*
|
||||
* @param nodeService
|
||||
* the node service
|
||||
*/
|
||||
public void setNodeService(NodeService nodeService)
|
||||
{
|
||||
this.nodeService = nodeService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the transaction service
|
||||
*
|
||||
* @param transactionService
|
||||
* the transaction service
|
||||
*/
|
||||
public void setTransactionService(TransactionService transactionService)
|
||||
{
|
||||
this.transactionService = transactionService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adhoc properties are allowed for this executor
|
||||
*/
|
||||
@Override
|
||||
protected boolean getAdhocPropertiesAllowed()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -96,55 +99,61 @@ public class AddFeaturesActionExecuter extends ActionExecuterAbstractBase
|
||||
*/
|
||||
public void executeImpl(final Action ruleAction, final NodeRef actionedUponNodeRef)
|
||||
{
|
||||
if (this.nodeService.exists(actionedUponNodeRef))
|
||||
{
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(
|
||||
new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
Map<QName, Serializable> properties = new HashMap<QName, Serializable>();
|
||||
QName aspectQName = null;
|
||||
|
||||
if(! nodeService.exists(actionedUponNodeRef))
|
||||
{
|
||||
// Node has gone away, skip
|
||||
return null;
|
||||
}
|
||||
|
||||
// Build the aspect details
|
||||
Map<String, Serializable> paramValues = ruleAction.getParameterValues();
|
||||
for (Map.Entry<String, Serializable> entry : paramValues.entrySet())
|
||||
{
|
||||
if (entry.getKey().equals(PARAM_ASPECT_NAME) == true)
|
||||
{
|
||||
aspectQName = (QName)entry.getValue();
|
||||
}
|
||||
else
|
||||
{
|
||||
// Must be an adhoc property
|
||||
QName propertyQName = QName.createQName(entry.getKey());
|
||||
Serializable propertyValue = entry.getValue();
|
||||
properties.put(propertyQName, propertyValue);
|
||||
}
|
||||
}
|
||||
|
||||
// Add the aspect
|
||||
nodeService.addAspect(actionedUponNodeRef, aspectQName, properties);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.alfresco.repo.action.ParameterizedItemAbstractBase#addParameterDefinitions(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
protected void addParameterDefinitions(List<ParameterDefinition> paramList)
|
||||
{
|
||||
paramList.add(new ParameterDefinitionImpl(PARAM_ASPECT_NAME, DataTypeDefinition.QNAME, true, getParamDisplayLabel(PARAM_ASPECT_NAME), false, "ac-aspects"));
|
||||
}
|
||||
|
||||
}
|
||||
if (this.nodeService.exists(actionedUponNodeRef))
|
||||
{
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(
|
||||
new RetryingTransactionCallback<Void>() {
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
Map<QName, Serializable> properties = new HashMap<QName, Serializable>();
|
||||
QName aspectQName = null;
|
||||
|
||||
if (!nodeService.exists(actionedUponNodeRef))
|
||||
{
|
||||
// Node has gone away, skip
|
||||
return null;
|
||||
}
|
||||
|
||||
// Build the aspect details
|
||||
Map<String, Serializable> paramValues = ruleAction.getParameterValues();
|
||||
removeActionContextParameter(paramValues);
|
||||
for (Map.Entry<String, Serializable> entry : paramValues.entrySet())
|
||||
{
|
||||
if (entry.getKey().equals(PARAM_ASPECT_NAME) == true)
|
||||
{
|
||||
aspectQName = (QName) entry.getValue();
|
||||
}
|
||||
else
|
||||
{
|
||||
// Must be an adhoc property
|
||||
QName propertyQName = QName.createQName(entry.getKey());
|
||||
Serializable propertyValue = entry.getValue();
|
||||
properties.put(propertyQName, propertyValue);
|
||||
}
|
||||
}
|
||||
|
||||
// Add the aspect
|
||||
nodeService.addAspect(actionedUponNodeRef, aspectQName, properties);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.alfresco.repo.action.ParameterizedItemAbstractBase#addParameterDefinitions(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
protected void addParameterDefinitions(List<ParameterDefinition> paramList)
|
||||
{
|
||||
paramList.add(new ParameterDefinitionImpl(PARAM_ASPECT_NAME, DataTypeDefinition.QNAME, true, getParamDisplayLabel(PARAM_ASPECT_NAME), false, "ac-aspects"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove actionContext from the parameter values to declassify as an adhoc property
|
||||
*/
|
||||
private void removeActionContextParameter(Map<String, Serializable> paramValues)
|
||||
{
|
||||
paramValues.remove(ActionAccessRestriction.ACTION_CONTEXT_PARAM_NAME);
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -37,6 +37,7 @@ import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.transform.config.CoreFunction;
|
||||
import org.alfresco.util.PropertyCheck;
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
@@ -46,6 +47,7 @@ import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ThreadFactory;
|
||||
|
||||
import static org.alfresco.model.ContentModel.PROP_CONTENT;
|
||||
import static org.alfresco.transform.common.RequestParamMap.DIRECT_ACCESS_URL;
|
||||
@@ -68,6 +70,7 @@ public class LocalTransformClient implements TransformClient, InitializingBean
|
||||
private ContentService contentService;
|
||||
private RenditionService2Impl renditionService2;
|
||||
private boolean directAccessUrlEnabled;
|
||||
private int threadPoolSize;
|
||||
|
||||
private ExecutorService executorService;
|
||||
private ThreadLocal<LocalTransform> transform = new ThreadLocal<>();
|
||||
@@ -97,6 +100,11 @@ public class LocalTransformClient implements TransformClient, InitializingBean
|
||||
this.directAccessUrlEnabled = directAccessUrlEnabled;
|
||||
}
|
||||
|
||||
public void setThreadPoolSize(int threadPoolSize)
|
||||
{
|
||||
this.threadPoolSize = threadPoolSize;
|
||||
}
|
||||
|
||||
public void setExecutorService(ExecutorService executorService)
|
||||
{
|
||||
this.executorService = executorService;
|
||||
@@ -110,9 +118,11 @@ public class LocalTransformClient implements TransformClient, InitializingBean
|
||||
PropertyCheck.mandatory(this, "contentService", contentService);
|
||||
PropertyCheck.mandatory(this, "renditionService2", renditionService2);
|
||||
PropertyCheck.mandatory(this, "directAccessUrlEnabled", directAccessUrlEnabled);
|
||||
PropertyCheck.mandatory(this, "threadPoolSize", threadPoolSize);
|
||||
if (executorService == null)
|
||||
{
|
||||
executorService = Executors.newCachedThreadPool();
|
||||
ThreadFactory threadFactory = new ThreadFactoryBuilder().setNameFormat("local-transform-%d").build();
|
||||
executorService = Executors.newFixedThreadPool(threadPoolSize, threadFactory);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -81,11 +81,19 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
|
||||
public static final QName DEFAULT_RENDITION_CONTENT_PROP = ContentModel.PROP_CONTENT;
|
||||
public static final String DEFAULT_MIMETYPE = MimetypeMap.MIMETYPE_TEXT_PLAIN;
|
||||
public static final String MIMETYPE_METADATA_EXTRACT = "alfresco-metadata-extract";
|
||||
public static final String MIMETYPE_METADATA_EMBED = "alfresco-metadata-embed";
|
||||
public static final String DEFAULT_ENCODING = "UTF-8";
|
||||
|
||||
public static final int SOURCE_HAS_NO_CONTENT = -1;
|
||||
public static final int RENDITION2_DOES_NOT_EXIST = -2;
|
||||
|
||||
// Allowed mimetypes to support text or metadata extract transforms when thumbnails are disabled.
|
||||
private static final Set<String> ALLOWED_MIMETYPES = Set.of(
|
||||
MimetypeMap.MIMETYPE_TEXT_PLAIN,
|
||||
MIMETYPE_METADATA_EXTRACT,
|
||||
MIMETYPE_METADATA_EMBED);
|
||||
|
||||
private static Log logger = LogFactory.getLog(RenditionService2Impl.class);
|
||||
|
||||
// As Async transforms and renditions are so similar, this class provides a way to provide the code that is different.
|
||||
@@ -288,7 +296,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!isEnabled())
|
||||
if (!isAsyncAllowed(renderOrTransform))
|
||||
{
|
||||
throw new RenditionService2Exception("Async transforms and renditions are disabled " +
|
||||
"(system.thumbnail.generate=false or renditionService2.enabled=false).");
|
||||
@@ -967,4 +975,23 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Checks if the given transform callback is a text extract transform for content indexing or metadata extract/embed.
|
||||
private boolean isTextOrMetadataExtractTransform(RenderOrTransformCallBack renderOrTransform)
|
||||
{
|
||||
RenditionDefinition2 renditionDefinition = renderOrTransform.getRenditionDefinition();
|
||||
return renditionDefinition != null && ALLOWED_MIMETYPES.contains(renditionDefinition.getTargetMimetype());
|
||||
}
|
||||
|
||||
private boolean isAsyncAllowed(RenderOrTransformCallBack renderOrTransform)
|
||||
{
|
||||
// If enabled is false, all async transforms/renditions must be blocked
|
||||
if (!enabled)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// If thumbnails are disabled, allow only text extract or metadata extract/embed transforms
|
||||
return thumbnailsEnabled || isTextOrMetadataExtractTransform(renderOrTransform);
|
||||
}
|
||||
}
|
||||
|
@@ -82,6 +82,7 @@
|
||||
<property name="contentService" ref="contentService" />
|
||||
<property name="renditionService2" ref="renditionService2" />
|
||||
<property name="directAccessUrlEnabled" value="${local.transform.directAccessUrl.enabled}"/>
|
||||
<property name="threadPoolSize" value="${local.transform.threadPoolSize}" />
|
||||
</bean>
|
||||
|
||||
<bean id="synchronousTransformClient" parent="localSynchronousTransformClient" />
|
||||
|
@@ -1351,6 +1351,9 @@ restApi.directAccessUrl.defaultExpiryTimeInSec=30
|
||||
# Controls whether direct access url URLs may be used in transforms.
|
||||
local.transform.directAccessUrl.enabled=true
|
||||
|
||||
# Controls size of thread pool used for transforms.
|
||||
local.transform.threadPoolSize=8
|
||||
|
||||
# Creates additional indexes on alf_node and alf_transaction. Recommended for large repositories.
|
||||
system.new-node-transaction-indexes.ignored=true
|
||||
|
||||
|
@@ -1,164 +1,177 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.action.executer;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.action.ActionImpl;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationComponent;
|
||||
import org.alfresco.service.cmr.action.ActionDefinition;
|
||||
import org.alfresco.service.cmr.action.ParameterDefinition;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.BaseSpringTest;
|
||||
import org.alfresco.util.GUID;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
/**
|
||||
* Add features action execution test
|
||||
*
|
||||
* @author Roy Wetherall
|
||||
*/
|
||||
@Transactional
|
||||
public class AddFeaturesActionExecuterTest extends BaseSpringTest
|
||||
{
|
||||
/**
|
||||
* The node service
|
||||
*/
|
||||
private NodeService nodeService;
|
||||
|
||||
/**
|
||||
* The store reference
|
||||
*/
|
||||
private StoreRef testStoreRef;
|
||||
|
||||
/**
|
||||
* The root node reference
|
||||
*/
|
||||
private NodeRef rootNodeRef;
|
||||
|
||||
/**
|
||||
* The test node reference
|
||||
*/
|
||||
private NodeRef nodeRef;
|
||||
|
||||
/**
|
||||
* The add features action executer
|
||||
*/
|
||||
private AddFeaturesActionExecuter executer;
|
||||
|
||||
/**
|
||||
* Id used to identify the test action created
|
||||
*/
|
||||
private final static String ID = GUID.generate();
|
||||
|
||||
/**
|
||||
* Called at the begining of all tests
|
||||
*/
|
||||
@Before
|
||||
public void before() throws Exception
|
||||
{
|
||||
this.nodeService = (NodeService)this.applicationContext.getBean("nodeService");
|
||||
|
||||
AuthenticationComponent authenticationComponent = (AuthenticationComponent)applicationContext.getBean("authenticationComponent");
|
||||
authenticationComponent.setCurrentUser(authenticationComponent.getSystemUserName());
|
||||
|
||||
// Create the store and get the root node
|
||||
this.testStoreRef = this.nodeService.createStore(
|
||||
StoreRef.PROTOCOL_WORKSPACE, "Test_"
|
||||
+ System.currentTimeMillis());
|
||||
this.rootNodeRef = this.nodeService.getRootNode(this.testStoreRef);
|
||||
|
||||
// Create the node used for tests
|
||||
this.nodeRef = this.nodeService.createNode(
|
||||
this.rootNodeRef,
|
||||
ContentModel.ASSOC_CHILDREN,
|
||||
QName.createQName("{test}testnode"),
|
||||
ContentModel.TYPE_CONTENT).getChildRef();
|
||||
|
||||
// Get the executer instance
|
||||
this.executer = (AddFeaturesActionExecuter)this.applicationContext.getBean(AddFeaturesActionExecuter.NAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test execution
|
||||
*/
|
||||
@Test
|
||||
public void testExecution()
|
||||
{
|
||||
// Check that the node does not have the classifiable aspect
|
||||
assertFalse(this.nodeService.hasAspect(this.nodeRef, ContentModel.ASPECT_CLASSIFIABLE));
|
||||
|
||||
// Execute the action
|
||||
ActionImpl action = new ActionImpl(null, ID, AddFeaturesActionExecuter.NAME, null);
|
||||
action.setParameterValue(AddFeaturesActionExecuter.PARAM_ASPECT_NAME, ContentModel.ASPECT_CLASSIFIABLE);
|
||||
this.executer.execute(action, this.nodeRef);
|
||||
|
||||
// Check that the node now has the classifiable aspect applied
|
||||
assertTrue(this.nodeService.hasAspect(this.nodeRef, ContentModel.ASPECT_CLASSIFIABLE));
|
||||
}
|
||||
|
||||
/**
|
||||
* MNT-15802
|
||||
*/
|
||||
@Test
|
||||
public void testCheckLocalizedParamDefintionWithConstraint()
|
||||
{
|
||||
// test for other than default locale
|
||||
I18NUtil.setLocale(Locale.GERMAN);
|
||||
|
||||
ActionDefinition actionDef = executer.getActionDefinition();
|
||||
|
||||
List<ParameterDefinition> paramDef = actionDef.getParameterDefinitions();
|
||||
assertNotNull(paramDef);
|
||||
|
||||
String constraintName = paramDef.get(0).getParameterConstraintName();
|
||||
assertNotNull(constraintName);
|
||||
assertEquals(AddFeaturesActionExecuter.PARAM_CONSTRAINT, constraintName);
|
||||
|
||||
// test for other than default locale
|
||||
I18NUtil.setLocale(Locale.ITALY);
|
||||
|
||||
actionDef = executer.getActionDefinition();
|
||||
|
||||
paramDef = actionDef.getParameterDefinitions();
|
||||
assertNotNull(paramDef);
|
||||
|
||||
constraintName = paramDef.get(0).getParameterConstraintName();
|
||||
assertNotNull(constraintName);
|
||||
assertEquals(AddFeaturesActionExecuter.PARAM_CONSTRAINT, constraintName);
|
||||
|
||||
I18NUtil.setLocale(Locale.getDefault());
|
||||
|
||||
}
|
||||
}
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.action.executer;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.action.ActionImpl;
|
||||
import org.alfresco.repo.action.access.ActionAccessRestriction;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationComponent;
|
||||
import org.alfresco.service.cmr.action.ActionDefinition;
|
||||
import org.alfresco.service.cmr.action.ParameterDefinition;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.BaseSpringTest;
|
||||
import org.alfresco.util.GUID;
|
||||
|
||||
/**
|
||||
* Add features action execution test
|
||||
*
|
||||
* @author Roy Wetherall
|
||||
*/
|
||||
@Transactional
|
||||
public class AddFeaturesActionExecuterTest extends BaseSpringTest
|
||||
{
|
||||
/**
|
||||
* Id used to identify the test action created
|
||||
*/
|
||||
private final static String ID = GUID.generate();
|
||||
/**
|
||||
* The node service
|
||||
*/
|
||||
private NodeService nodeService;
|
||||
/**
|
||||
* The store reference
|
||||
*/
|
||||
private StoreRef testStoreRef;
|
||||
/**
|
||||
* The root node reference
|
||||
*/
|
||||
private NodeRef rootNodeRef;
|
||||
/**
|
||||
* The test node reference
|
||||
*/
|
||||
private NodeRef nodeRef;
|
||||
/**
|
||||
* The add features action executer
|
||||
*/
|
||||
private AddFeaturesActionExecuter executer;
|
||||
|
||||
/**
|
||||
* Called at the begining of all tests
|
||||
*/
|
||||
@Before
|
||||
public void before() throws Exception
|
||||
{
|
||||
this.nodeService = (NodeService) this.applicationContext.getBean("nodeService");
|
||||
|
||||
AuthenticationComponent authenticationComponent = (AuthenticationComponent) applicationContext.getBean("authenticationComponent");
|
||||
authenticationComponent.setCurrentUser(authenticationComponent.getSystemUserName());
|
||||
|
||||
// Create the store and get the root node
|
||||
this.testStoreRef = this.nodeService.createStore(
|
||||
StoreRef.PROTOCOL_WORKSPACE, "Test_"
|
||||
+ System.currentTimeMillis());
|
||||
this.rootNodeRef = this.nodeService.getRootNode(this.testStoreRef);
|
||||
|
||||
// Create the node used for tests
|
||||
this.nodeRef = this.nodeService.createNode(
|
||||
this.rootNodeRef,
|
||||
ContentModel.ASSOC_CHILDREN,
|
||||
QName.createQName("{test}testnode"),
|
||||
ContentModel.TYPE_CONTENT).getChildRef();
|
||||
|
||||
// Get the executer instance
|
||||
this.executer = (AddFeaturesActionExecuter) this.applicationContext.getBean(AddFeaturesActionExecuter.NAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test execution
|
||||
*/
|
||||
@Test
|
||||
public void testExecution()
|
||||
{
|
||||
// Check that the node does not have the classifiable aspect
|
||||
assertFalse(this.nodeService.hasAspect(this.nodeRef, ContentModel.ASPECT_CLASSIFIABLE));
|
||||
|
||||
// Execute the action
|
||||
ActionImpl action = new ActionImpl(null, ID, AddFeaturesActionExecuter.NAME, null);
|
||||
action.setParameterValue(AddFeaturesActionExecuter.PARAM_ASPECT_NAME, ContentModel.ASPECT_CLASSIFIABLE);
|
||||
this.executer.execute(action, this.nodeRef);
|
||||
|
||||
// Check that the node now has the classifiable aspect applied
|
||||
assertTrue(this.nodeService.hasAspect(this.nodeRef, ContentModel.ASPECT_CLASSIFIABLE));
|
||||
}
|
||||
|
||||
/**
|
||||
* MNT-15802
|
||||
*/
|
||||
@Test
|
||||
public void testCheckLocalizedParamDefintionWithConstraint()
|
||||
{
|
||||
// test for other than default locale
|
||||
I18NUtil.setLocale(Locale.GERMAN);
|
||||
|
||||
ActionDefinition actionDef = executer.getActionDefinition();
|
||||
|
||||
List<ParameterDefinition> paramDef = actionDef.getParameterDefinitions();
|
||||
assertNotNull(paramDef);
|
||||
|
||||
String constraintName = paramDef.get(0).getParameterConstraintName();
|
||||
assertNotNull(constraintName);
|
||||
assertEquals(AddFeaturesActionExecuter.PARAM_CONSTRAINT, constraintName);
|
||||
|
||||
// test for other than default locale
|
||||
I18NUtil.setLocale(Locale.ITALY);
|
||||
|
||||
actionDef = executer.getActionDefinition();
|
||||
|
||||
paramDef = actionDef.getParameterDefinitions();
|
||||
assertNotNull(paramDef);
|
||||
|
||||
constraintName = paramDef.get(0).getParameterConstraintName();
|
||||
assertNotNull(constraintName);
|
||||
assertEquals(AddFeaturesActionExecuter.PARAM_CONSTRAINT, constraintName);
|
||||
|
||||
I18NUtil.setLocale(Locale.getDefault());
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Test check actionContext param is removed from adhoc properties
|
||||
*/
|
||||
@Test
|
||||
public void testCheckActionContext()
|
||||
{
|
||||
// Execute the action
|
||||
ActionImpl action = new ActionImpl(null, ID, AddFeaturesActionExecuter.NAME, null);
|
||||
action.setParameterValue(ActionAccessRestriction.ACTION_CONTEXT_PARAM_NAME, ActionAccessRestriction.V1_ACTION_CONTEXT);
|
||||
action.setParameterValue(AddFeaturesActionExecuter.PARAM_ASPECT_NAME, ContentModel.ASPECT_CLASSIFIABLE);
|
||||
this.executer.execute(action, this.nodeRef);
|
||||
|
||||
// Ensure the actionContext parameter has been removed
|
||||
assertFalse(nodeService.getProperties(this.nodeRef).containsKey(QName.createQName(ActionAccessRestriction.ACTION_CONTEXT_PARAM_NAME)));
|
||||
}
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2022 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -34,6 +34,10 @@ import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.action.ActionImpl;
|
||||
@@ -44,7 +48,6 @@ import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.ServiceRegistry;
|
||||
import org.alfresco.service.cmr.action.Action;
|
||||
import org.alfresco.service.cmr.repository.ContentData;
|
||||
import org.alfresco.service.cmr.repository.ContentService;
|
||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
@@ -53,15 +56,13 @@ import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.GUID;
|
||||
import org.alfresco.util.test.junitrules.ApplicationContextInit;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* This class contains tests for {@link ImporterActionExecuter}.
|
||||
*
|
||||
* @author abalmus
|
||||
*/
|
||||
@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert")
|
||||
public class ImporterActionExecuterTest
|
||||
{
|
||||
// Rule to initialise the default Alfresco spring configuration
|
||||
@@ -87,8 +88,7 @@ public class ImporterActionExecuterTest
|
||||
AuthenticationUtil.setRunAsUserSystem();
|
||||
|
||||
// we need a store
|
||||
storeRef = serviceRegistry.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<StoreRef>()
|
||||
{
|
||||
storeRef = serviceRegistry.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<StoreRef>() {
|
||||
public StoreRef execute()
|
||||
{
|
||||
StoreRef storeRef = nodeService.createStore(StoreRef.PROTOCOL_WORKSPACE, "Test_" + System.nanoTime());
|
||||
@@ -102,8 +102,7 @@ public class ImporterActionExecuterTest
|
||||
{
|
||||
try
|
||||
{
|
||||
serviceRegistry.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
serviceRegistry.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
|
||||
public Void execute()
|
||||
{
|
||||
if (storeRef != null)
|
||||
@@ -125,8 +124,7 @@ public class ImporterActionExecuterTest
|
||||
{
|
||||
final RetryingTransactionHelper retryingTransactionHelper = serviceRegistry.getRetryingTransactionHelper();
|
||||
|
||||
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>() {
|
||||
public Void execute()
|
||||
{
|
||||
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
|
||||
@@ -162,8 +160,7 @@ public class ImporterActionExecuterTest
|
||||
}
|
||||
|
||||
/**
|
||||
* MNT-16292: Unzipped files which have folders do not get the cm:titled
|
||||
* aspect applied
|
||||
* MNT-16292: Unzipped files which have folders do not get the cm:titled aspect applied
|
||||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
@@ -172,8 +169,7 @@ public class ImporterActionExecuterTest
|
||||
{
|
||||
final RetryingTransactionHelper retryingTransactionHelper = serviceRegistry.getRetryingTransactionHelper();
|
||||
|
||||
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>() {
|
||||
public Void execute()
|
||||
{
|
||||
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
|
||||
@@ -190,7 +186,7 @@ public class ImporterActionExecuterTest
|
||||
{
|
||||
importerActionExecuter.execute(action, zipFileNodeRef);
|
||||
|
||||
// check if import succeeded
|
||||
// check if import succeeded
|
||||
NodeRef importedFolder = nodeService.getChildByName(targetFolderNodeRef, ContentModel.ASSOC_CONTAINS, "folderCmTitledAspectArchive");
|
||||
assertNotNull("import action failed", importedFolder);
|
||||
|
||||
@@ -199,7 +195,7 @@ public class ImporterActionExecuterTest
|
||||
assertTrue("folder didn't get the cm:titled aspect applied", hasAspectTitled);
|
||||
|
||||
// MNT-17017 check ContentModel.PROP_TITLE is not set on the top level folder, just like Share
|
||||
String title = (String)nodeService.getProperty(importedFolder, ContentModel.PROP_TITLE);
|
||||
String title = (String) nodeService.getProperty(importedFolder, ContentModel.PROP_TITLE);
|
||||
assertNull("The title should not have cm:title set", title);
|
||||
}
|
||||
finally
|
||||
@@ -224,8 +220,7 @@ public class ImporterActionExecuterTest
|
||||
{
|
||||
final RetryingTransactionHelper retryingTransactionHelper = serviceRegistry.getRetryingTransactionHelper();
|
||||
|
||||
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>() {
|
||||
public Void execute()
|
||||
{
|
||||
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
|
||||
@@ -270,8 +265,7 @@ public class ImporterActionExecuterTest
|
||||
{
|
||||
final RetryingTransactionHelper retryingTransactionHelper = serviceRegistry.getRetryingTransactionHelper();
|
||||
|
||||
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>() {
|
||||
public Void execute()
|
||||
{
|
||||
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
|
||||
@@ -306,11 +300,51 @@ public class ImporterActionExecuterTest
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUnzipZipFileHavingAccentCharInFolderName() throws IOException
|
||||
{
|
||||
final RetryingTransactionHelper retryingTransactionHelper = serviceRegistry.getRetryingTransactionHelper();
|
||||
|
||||
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>() {
|
||||
@Override
|
||||
public Void execute() throws Throwable
|
||||
|
||||
{
|
||||
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
|
||||
|
||||
// create test data
|
||||
NodeRef zipFileNodeRef = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, ContentModel.ASSOC_CHILDREN, ContentModel.TYPE_CONTENT).getChildRef();
|
||||
NodeRef targetFolderNodeRef = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, ContentModel.ASSOC_CHILDREN, ContentModel.TYPE_FOLDER).getChildRef();
|
||||
|
||||
putContent(zipFileNodeRef, "import-archive-test/accentCharTestZip.zip");
|
||||
|
||||
Action action = createAction(zipFileNodeRef, "ImporterActionExecuterTestActionDefinition", targetFolderNodeRef);
|
||||
|
||||
try
|
||||
{
|
||||
importerActionExecuter.setUncompressedBytesLimit("100000");
|
||||
importerActionExecuter.execute(action, zipFileNodeRef);
|
||||
NodeRef importedFolder = nodeService.getChildByName(targetFolderNodeRef, ContentModel.ASSOC_CONTAINS, "accentCharTestZip");
|
||||
assertNotNull("unzip action failed", importedFolder);
|
||||
assertTrue("multiple folder structure created", nodeService.getChildAssocs(importedFolder).size() == 1);
|
||||
}
|
||||
finally
|
||||
{
|
||||
// clean test data
|
||||
nodeService.deleteNode(targetFolderNodeRef);
|
||||
nodeService.deleteNode(zipFileNodeRef);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void putContent(NodeRef zipFileNodeRef, String resource)
|
||||
{
|
||||
URL url = AbstractContentTransformerTest.class.getClassLoader().getResource(resource);
|
||||
final File file = new File(url.getFile());
|
||||
|
||||
|
||||
ContentWriter writer = contentService.getWriter(zipFileNodeRef, ContentModel.PROP_CONTENT, true);
|
||||
writer.setMimetype(MimetypeMap.MIMETYPE_ZIP);
|
||||
writer.putContent(file);
|
||||
|
@@ -26,6 +26,12 @@
|
||||
|
||||
package org.alfresco.repo.event2;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import static org.alfresco.model.ContentModel.PROP_DESCRIPTION;
|
||||
|
||||
import java.io.Serializable;
|
||||
@@ -35,6 +41,9 @@ import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.repo.dictionary.M2Model;
|
||||
@@ -53,7 +62,6 @@ import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.GUID;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* @author Iulian Aftene
|
||||
@@ -66,20 +74,20 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
public void testUpdateNodeResourceContent()
|
||||
{
|
||||
ContentService contentService = (ContentService) applicationContext.getBean(
|
||||
"contentService");
|
||||
"contentService");
|
||||
|
||||
final NodeRef nodeRef = createNode(ContentModel.TYPE_CONTENT);
|
||||
|
||||
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEvent(1);
|
||||
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(),
|
||||
resultRepoEvent.getType());
|
||||
resultRepoEvent.getType());
|
||||
|
||||
NodeResource resource = getNodeResource(resultRepoEvent);
|
||||
assertNull("Content should have been null.", resource.getContent());
|
||||
|
||||
retryingTransactionHelper.doInTransaction(() -> {
|
||||
ContentWriter writer = contentService.getWriter(nodeRef, ContentModel.TYPE_CONTENT,
|
||||
true);
|
||||
true);
|
||||
writer.setMimetype(MimetypeMap.MIMETYPE_PDF);
|
||||
writer.setEncoding("UTF-8");
|
||||
writer.putContent("test content.");
|
||||
@@ -90,7 +98,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
|
||||
resultRepoEvent = getRepoEvent(2);
|
||||
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(),
|
||||
resultRepoEvent.getType());
|
||||
resultRepoEvent.getType());
|
||||
|
||||
resource = getNodeResource(resultRepoEvent);
|
||||
ContentInfo content = resource.getContent();
|
||||
@@ -105,7 +113,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
// Update the content again
|
||||
retryingTransactionHelper.doInTransaction(() -> {
|
||||
ContentWriter writer = contentService.getWriter(nodeRef, ContentModel.TYPE_CONTENT,
|
||||
true);
|
||||
true);
|
||||
writer.setMimetype(MimetypeMap.MIMETYPE_PDF);
|
||||
writer.setEncoding("UTF-8");
|
||||
writer.putContent("A quick brown fox jumps over the lazy dog.");
|
||||
@@ -370,7 +378,6 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
assertEquals("new test title", title);
|
||||
assertEquals("new test title", getLocalizedProperty(resource, "cm:title", defaultLocale));
|
||||
|
||||
|
||||
resourceBefore = getNodeResourceBefore(3);
|
||||
title = getProperty(resourceBefore, "cm:title");
|
||||
assertEquals("Wrong old property.", "test title", title);
|
||||
@@ -490,14 +497,14 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
resource = getNodeResource(2);
|
||||
assertNotNull(resource.getAspectNames());
|
||||
assertTrue(resource.getAspectNames().contains("cm:versionable"));
|
||||
//Check all aspects
|
||||
// Check all aspects
|
||||
Set<String> expectedAspects = new HashSet<>(originalAspects);
|
||||
expectedAspects.add("cm:versionable");
|
||||
assertEquals(expectedAspects, resource.getAspectNames());
|
||||
// Check properties
|
||||
assertFalse(resource.getProperties().isEmpty());
|
||||
|
||||
//Check resourceBefore
|
||||
// Check resourceBefore
|
||||
NodeResource resourceBefore = getNodeResourceBefore(2);
|
||||
assertNotNull(resourceBefore.getAspectNames());
|
||||
assertEquals(originalAspects, resourceBefore.getAspectNames());
|
||||
@@ -544,21 +551,64 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
assertEquals(aspectsBeforeRemove, resourceBefore.getAspectNames());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRemoveAspectPropertiesTest()
|
||||
{
|
||||
final NodeRef nodeRef = createNode(ContentModel.TYPE_CONTENT);
|
||||
NodeResource resource = getNodeResource(1);
|
||||
final Set<String> originalAspects = resource.getAspectNames();
|
||||
assertNotNull(originalAspects);
|
||||
|
||||
// Add cm:geographic aspect with properties
|
||||
retryingTransactionHelper.doInTransaction(() -> {
|
||||
Map<QName, Serializable> aspectProperties = new HashMap<>();
|
||||
aspectProperties.put(ContentModel.PROP_LATITUDE, "12.345678");
|
||||
aspectProperties.put(ContentModel.PROP_LONGITUDE, "12.345678");
|
||||
nodeService.addAspect(nodeRef, ContentModel.ASPECT_GEOGRAPHIC, aspectProperties);
|
||||
return null;
|
||||
});
|
||||
resource = getNodeResource(2);
|
||||
Set<String> aspectsBeforeRemove = resource.getAspectNames();
|
||||
assertNotNull(aspectsBeforeRemove);
|
||||
assertTrue(aspectsBeforeRemove.contains("cm:geographic"));
|
||||
|
||||
// Remove cm:geographic aspect - this automatically removes the properties from the node
|
||||
retryingTransactionHelper.doInTransaction(() -> {
|
||||
nodeService.removeAspect(nodeRef, ContentModel.ASPECT_GEOGRAPHIC);
|
||||
return null;
|
||||
});
|
||||
|
||||
resource = getNodeResource(3);
|
||||
assertEquals(originalAspects, resource.getAspectNames());
|
||||
|
||||
NodeResource resourceBefore = getNodeResourceBefore(3);
|
||||
assertNotNull(resourceBefore.getAspectNames());
|
||||
assertEquals(aspectsBeforeRemove, resourceBefore.getAspectNames());
|
||||
// Resource before should contain cm:latitude and cm:longitude properties
|
||||
assertNotNull(resourceBefore.getProperties());
|
||||
assertTrue(resourceBefore.getProperties().containsKey("cm:latitude"));
|
||||
assertTrue(resourceBefore.getProperties().containsKey("cm:longitude"));
|
||||
// Resource after should NOT contain cm:latitude and cm:longitude properties
|
||||
assertNotNull(resource.getProperties());
|
||||
assertFalse(resource.getProperties().containsKey("cm:latitude"));
|
||||
assertFalse(resource.getProperties().containsKey("cm:longitude"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateAndUpdateInTheSameTransaction()
|
||||
{
|
||||
retryingTransactionHelper.doInTransaction(() -> {
|
||||
|
||||
NodeRef node1 = nodeService.createNode(
|
||||
rootNodeRef,
|
||||
ContentModel.ASSOC_CHILDREN,
|
||||
QName.createQName(TEST_NAMESPACE, GUID.generate()),
|
||||
ContentModel.TYPE_CONTENT).getChildRef();
|
||||
rootNodeRef,
|
||||
ContentModel.ASSOC_CHILDREN,
|
||||
QName.createQName(TEST_NAMESPACE, GUID.generate()),
|
||||
ContentModel.TYPE_CONTENT).getChildRef();
|
||||
|
||||
nodeService.setProperty(node1, PROP_DESCRIPTION, "test description");
|
||||
return null;
|
||||
});
|
||||
//Create and update node are done in the same transaction so one event is expected
|
||||
// Create and update node are done in the same transaction so one event is expected
|
||||
// to be generated
|
||||
checkNumOfEvents(1);
|
||||
}
|
||||
@@ -593,8 +643,8 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
assertEquals("Incorrect node type was found", "cm:folder", nodeResource.getNodeType());
|
||||
|
||||
NodeResource resourceBefore = getNodeResourceBefore(2);
|
||||
assertEquals("Incorrect node type was found","cm:content", resourceBefore.getNodeType());
|
||||
// assertNotNull(resourceBefore.getModifiedAt()); uncomment this when the issue will be fixed
|
||||
assertEquals("Incorrect node type was found", "cm:content", resourceBefore.getNodeType());
|
||||
// assertNotNull(resourceBefore.getModifiedAt()); uncomment this when the issue will be fixed
|
||||
assertNull(resourceBefore.getId());
|
||||
assertNull(resourceBefore.getContent());
|
||||
assertNull(resourceBefore.isFile());
|
||||
@@ -624,8 +674,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
m2Type.setTitle("Test type title");
|
||||
|
||||
// Create active model
|
||||
CustomModelDefinition modelDefinition =
|
||||
retryingTransactionHelper.doInTransaction(() -> customModelService.createCustomModel(model, true));
|
||||
CustomModelDefinition modelDefinition = retryingTransactionHelper.doInTransaction(() -> customModelService.createCustomModel(model, true));
|
||||
|
||||
assertNotNull(modelDefinition);
|
||||
assertEquals(modelName, modelDefinition.getName().getLocalName());
|
||||
@@ -655,7 +704,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
|
||||
assertEquals("cm:content node type was not found", "cm:content", nodeResource.getNodeType());
|
||||
|
||||
QName typeQName = QName.createQName("{" + namespacePair.getFirst()+ "}" + typeName);
|
||||
QName typeQName = QName.createQName("{" + namespacePair.getFirst() + "}" + typeName);
|
||||
retryingTransactionHelper.doInTransaction(() -> {
|
||||
nodeService.setType(nodeRef, typeQName);
|
||||
|
||||
@@ -757,7 +806,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
// we should have only 1 event, node.Created
|
||||
checkNumOfEvents(1);
|
||||
|
||||
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEvent(1);
|
||||
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEvent(1);
|
||||
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
|
||||
NodeResource nodeResource = getNodeResource(resultRepoEvent);
|
||||
assertEquals("Incorrect node type was found", "cm:folder", nodeResource.getNodeType());
|
||||
@@ -783,10 +832,10 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
|
||||
retryingTransactionHelper.doInTransaction(() -> {
|
||||
nodeService.moveNode(
|
||||
moveFile,
|
||||
folder2,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(TEST_NAMESPACE));
|
||||
moveFile,
|
||||
folder2,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(TEST_NAMESPACE));
|
||||
return null;
|
||||
});
|
||||
|
||||
@@ -801,7 +850,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
assertEquals("Wrong node parent.", folder1ID, moveFileParentBeforeMove);
|
||||
assertEquals("Wrong node parent.", folder2ID, moveFileParentAfterMove);
|
||||
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(),
|
||||
getRepoEvent(4).getType());
|
||||
getRepoEvent(4).getType());
|
||||
|
||||
assertNull(resourceBefore.getId());
|
||||
assertNull(resourceBefore.getName());
|
||||
@@ -833,10 +882,10 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
|
||||
retryingTransactionHelper.doInTransaction(() -> {
|
||||
nodeService.moveNode(
|
||||
moveFolder,
|
||||
grandParent,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(TEST_NAMESPACE));
|
||||
moveFolder,
|
||||
grandParent,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(TEST_NAMESPACE));
|
||||
return null;
|
||||
});
|
||||
|
||||
@@ -845,15 +894,13 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
final String grandParentID = getNodeResource(1).getId();
|
||||
final String parentID = getNodeResource(2).getId();
|
||||
|
||||
final String moveFolderParentBeforeMove =
|
||||
getNodeResourceBefore(4).getPrimaryHierarchy().get(0);
|
||||
final String moveFolderParentAfterMove =
|
||||
getNodeResource(4).getPrimaryHierarchy().get(0);
|
||||
final String moveFolderParentBeforeMove = getNodeResourceBefore(4).getPrimaryHierarchy().get(0);
|
||||
final String moveFolderParentAfterMove = getNodeResource(4).getPrimaryHierarchy().get(0);
|
||||
|
||||
assertEquals("Wrong node parent.", parentID, moveFolderParentBeforeMove);
|
||||
assertEquals("Wrong node parent.", grandParentID, moveFolderParentAfterMove);
|
||||
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(),
|
||||
getRepoEventWithoutWait(4).getType());
|
||||
getRepoEventWithoutWait(4).getType());
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -867,28 +914,25 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
|
||||
retryingTransactionHelper.doInTransaction(() -> {
|
||||
nodeService.moveNode(
|
||||
grandParent,
|
||||
root2,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(TEST_NAMESPACE));
|
||||
grandParent,
|
||||
root2,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(TEST_NAMESPACE));
|
||||
return null;
|
||||
});
|
||||
|
||||
checkNumOfEvents(6);
|
||||
|
||||
final String root2ID = getNodeResource(2).getId();
|
||||
final String grandParentParentAfterMove =
|
||||
getNodeResource(6).getPrimaryHierarchy().get(0);
|
||||
final String grandParentParentAfterMove = getNodeResource(6).getPrimaryHierarchy().get(0);
|
||||
assertEquals("Wrong node parent.", root2ID, grandParentParentAfterMove);
|
||||
|
||||
final String grandParentID = getNodeResource(3).getId();
|
||||
final String parentIDOfTheParentFolder =
|
||||
getNodeResource(4).getPrimaryHierarchy().get(0);
|
||||
final String parentIDOfTheParentFolder = getNodeResource(4).getPrimaryHierarchy().get(0);
|
||||
assertEquals("Wrong node parent.", grandParentID, parentIDOfTheParentFolder);
|
||||
|
||||
final String parentID = getNodeResource(4).getId();
|
||||
final String contentParentID =
|
||||
getNodeResource(5).getPrimaryHierarchy().get(0);
|
||||
final String contentParentID = getNodeResource(5).getPrimaryHierarchy().get(0);
|
||||
assertEquals("Wrong node parent.", parentID, contentParentID);
|
||||
}
|
||||
|
||||
@@ -906,10 +950,10 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
|
||||
retryingTransactionHelper.doInTransaction(() -> {
|
||||
nodeService.moveNode(
|
||||
moveFile,
|
||||
folder2,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(TEST_NAMESPACE));
|
||||
moveFile,
|
||||
folder2,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(TEST_NAMESPACE));
|
||||
return null;
|
||||
});
|
||||
|
||||
@@ -918,8 +962,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
assertTrue("Wrong aspect.", resource.getAspectNames().contains("cm:versionable"));
|
||||
|
||||
final String folder2ID = getNodeResource(2).getId();
|
||||
final String moveFileParentAfterMove =
|
||||
getNodeResource(5).getPrimaryHierarchy().get(0);
|
||||
final String moveFileParentAfterMove = getNodeResource(5).getPrimaryHierarchy().get(0);
|
||||
|
||||
assertEquals("Wrong node parent.", folder2ID, moveFileParentAfterMove);
|
||||
}
|
||||
@@ -935,10 +978,10 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
nodeService.setProperty(moveFile, ContentModel.PROP_NAME, "test_new_name");
|
||||
|
||||
nodeService.moveNode(
|
||||
moveFile,
|
||||
folder2,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(TEST_NAMESPACE));
|
||||
moveFile,
|
||||
folder2,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(TEST_NAMESPACE));
|
||||
return null;
|
||||
});
|
||||
|
||||
@@ -946,8 +989,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
assertEquals("test_new_name", resource.getName());
|
||||
|
||||
final String folder2ID = getNodeResource(2).getId();
|
||||
final String moveFileParentAfterMove =
|
||||
getNodeResource(4).getPrimaryHierarchy().get(0);
|
||||
final String moveFileParentAfterMove = getNodeResource(4).getPrimaryHierarchy().get(0);
|
||||
|
||||
assertEquals("Wrong node parent.", folder2ID, moveFileParentAfterMove);
|
||||
}
|
||||
@@ -958,28 +1000,28 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
retryingTransactionHelper.doInTransaction(() -> {
|
||||
|
||||
NodeRef folder1 = nodeService.createNode(
|
||||
rootNodeRef,
|
||||
ContentModel.ASSOC_CHILDREN,
|
||||
QName.createQName(TEST_NAMESPACE),
|
||||
ContentModel.TYPE_FOLDER).getChildRef();
|
||||
rootNodeRef,
|
||||
ContentModel.ASSOC_CHILDREN,
|
||||
QName.createQName(TEST_NAMESPACE),
|
||||
ContentModel.TYPE_FOLDER).getChildRef();
|
||||
|
||||
NodeRef folder2 = nodeService.createNode(
|
||||
rootNodeRef,
|
||||
ContentModel.ASSOC_CHILDREN,
|
||||
QName.createQName(TEST_NAMESPACE),
|
||||
ContentModel.TYPE_FOLDER).getChildRef();
|
||||
rootNodeRef,
|
||||
ContentModel.ASSOC_CHILDREN,
|
||||
QName.createQName(TEST_NAMESPACE),
|
||||
ContentModel.TYPE_FOLDER).getChildRef();
|
||||
|
||||
NodeRef fileToMove = nodeService.createNode(
|
||||
folder1,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(TEST_NAMESPACE),
|
||||
ContentModel.TYPE_CONTENT).getChildRef();
|
||||
folder1,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(TEST_NAMESPACE),
|
||||
ContentModel.TYPE_CONTENT).getChildRef();
|
||||
|
||||
nodeService.moveNode(
|
||||
fileToMove,
|
||||
folder2,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(TEST_NAMESPACE));
|
||||
fileToMove,
|
||||
folder2,
|
||||
ContentModel.ASSOC_CONTAINS,
|
||||
QName.createQName(TEST_NAMESPACE));
|
||||
|
||||
assertEquals(folder2, nodeService.getPrimaryParent(fileToMove).getParentRef());
|
||||
|
||||
@@ -989,8 +1031,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
checkNumOfEvents(3);
|
||||
|
||||
final String folder2ID = getNodeResource(2).getId();
|
||||
final String moveFileParentAfterMove =
|
||||
getNodeResource(3).getPrimaryHierarchy().get(0);
|
||||
final String moveFileParentAfterMove = getNodeResource(3).getPrimaryHierarchy().get(0);
|
||||
|
||||
assertEquals("Wrong node parent.", folder2ID, moveFileParentAfterMove);
|
||||
}
|
||||
@@ -1003,7 +1044,6 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
|
||||
final Set<String> originalAspects = resource.getAspectNames();
|
||||
assertNotNull(originalAspects);
|
||||
|
||||
|
||||
retryingTransactionHelper.doInTransaction(() -> {
|
||||
// Add cm:geographic aspect with default value
|
||||
nodeService.addAspect(nodeRef, ContentModel.ASPECT_GEOGRAPHIC, null);
|
||||
|
@@ -39,6 +39,7 @@ import org.junit.Test;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.model.RenditionModel;
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.permissions.AccessDeniedException;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
@@ -717,4 +718,57 @@ public class RenditionService2IntegrationTest extends AbstractRenditionIntegrati
|
||||
renditionService2.setEnabled(true);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTextExtractTransformAllowedWhenThumbnailDisabled()
|
||||
{
|
||||
// create a source node
|
||||
NodeRef sourceNodeRef = createSource(ADMIN, "quick.pdf");
|
||||
assertNotNull("Node not generated", sourceNodeRef);
|
||||
String replyQueue = "org.test.queue";
|
||||
String targetMimetype = MimetypeMap.MIMETYPE_TEXT_PLAIN;
|
||||
|
||||
TransformDefinition textExtractTransform = new TransformDefinition(
|
||||
targetMimetype,
|
||||
java.util.Collections.emptyMap(),
|
||||
"clientData",
|
||||
replyQueue,
|
||||
"requestId");
|
||||
|
||||
renditionService2.setThumbnailsEnabled(false);
|
||||
try
|
||||
{
|
||||
// Should NOT throw, as this is a text extract transform
|
||||
AuthenticationUtil.runAs(() -> {
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(() -> {
|
||||
renditionService2.transform(sourceNodeRef, textExtractTransform);
|
||||
return null;
|
||||
});
|
||||
return null;
|
||||
}, ADMIN);
|
||||
}
|
||||
finally
|
||||
{
|
||||
renditionService2.setThumbnailsEnabled(true);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMetadataExtractTransformAllowedWhenThumbnailDisabled()
|
||||
{
|
||||
// create a source node
|
||||
NodeRef sourceNodeRef = createSource(ADMIN, "quick.pdf");
|
||||
assertNotNull("Node not generated", sourceNodeRef);
|
||||
renditionService2.setThumbnailsEnabled(false);
|
||||
try
|
||||
{
|
||||
// Should NOT throw, as this is a metadata extract transform
|
||||
extract(ADMIN, sourceNodeRef);
|
||||
waitForExtract(ADMIN, sourceNodeRef, true);
|
||||
}
|
||||
finally
|
||||
{
|
||||
renditionService2.setThumbnailsEnabled(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Binary file not shown.
Reference in New Issue
Block a user