Compare commits

...

47 Commits

Author SHA1 Message Date
alfresco-build
187646895c [maven-release-plugin][skip ci] prepare release 23.6.0.21 2025-09-22 12:43:22 +00:00
Tiago Salvado
f9515e336f [ACS-10166] Include qname and namespace in NodeIdsWhichReferenceContentUrl query (#3579) (#3582) 2025-09-22 12:57:36 +01:00
alfresco-build
828dd20576 [maven-release-plugin][skip ci] prepare for next development iteration 2025-09-22 09:21:07 +00:00
alfresco-build
3372e20c35 [maven-release-plugin][skip ci] prepare release 23.6.0.20 2025-09-22 09:21:05 +00:00
Gerard Olenski
64b5cace27 ACS-10159 Bump ATS (#3581) 2025-09-22 10:12:37 +02:00
alfresco-build
83acf26cf4 [maven-release-plugin][skip ci] prepare for next development iteration 2025-09-18 11:13:01 +00:00
alfresco-build
b3be0f2b7f [maven-release-plugin][skip ci] prepare release 23.6.0.19 2025-09-18 11:12:59 +00:00
Belal Ansari
7a6ebb9a05 Backporting MNT-25216 Error on fixedAclUpdaterJobDetail execution when using Oracle to ACS 23 (#3575) 2025-09-18 15:57:15 +05:30
alfresco-build
fa0f239618 [maven-release-plugin][skip ci] prepare for next development iteration 2025-09-17 01:05:50 +00:00
alfresco-build
43799408a8 [maven-release-plugin][skip ci] prepare release 23.6.0.18 2025-09-17 01:05:48 +00:00
Tiago Salvado
e7305006f0 [ACS-9929] Bump cxf version to 4.1.2 (#3572) 2025-09-17 01:21:54 +01:00
alfresco-build
40c30411af [maven-release-plugin][skip ci] prepare for next development iteration 2025-09-12 11:02:12 +00:00
alfresco-build
91f8b43237 [maven-release-plugin][skip ci] prepare release 23.6.0.17 2025-09-12 11:02:09 +00:00
Eva Vasques
6fccf828e1 ACS-10042 - AGS Concurrent IPR Group Creation (#3566) (#3570) 2025-09-12 11:10:43 +01:00
alfresco-build
3fac3373c9 [maven-release-plugin][skip ci] prepare for next development iteration 2025-09-04 08:24:19 +00:00
alfresco-build
ee857ce1de [maven-release-plugin][skip ci] prepare release 23.6.0.16 2025-09-04 08:24:17 +00:00
Debjit Chattopadhyay
483d7fab21 Merge pull request #3563 from Alfresco/fix/MNT-24776-backport-23.N
backport MNT-24776 to 23.N
2025-09-04 13:01:59 +05:30
Debjit Chattopadhyay
590209b299 Fix for "MNT-24776 : Category Picker Error when a User does not have Read Permissions to a Category"
(cherry picked from commit 626640ddc7)
2025-09-03 15:14:48 +05:30
alfresco-build
376514df67 [maven-release-plugin][skip ci] prepare for next development iteration 2025-08-29 13:30:01 +00:00
alfresco-build
7144a2dd94 [maven-release-plugin][skip ci] prepare release 23.6.0.15 2025-08-29 13:29:59 +00:00
Debjit Chattopadhyay
b4da3d8c20 MNT-24308 - fix search filterquery creation for displaying favourites in Share for both Solr and Elasticsearch
MNT-24308 : On click of 'My Favorites' link under 'Documents' in the left panel of 'Document Library' page, all the links appear grayed out
2025-08-29 18:15:59 +05:30
Debjit Chattopadhyay
62de9ff0c0 MNT-24308 : On click of 'My Favorites' link under 'Documents' in the left panel of 'Document Library' page, all the links appear grayed out 2025-08-29 16:25:23 +05:30
alfresco-build
a11acce720 [maven-release-plugin][skip ci] prepare for next development iteration 2025-08-28 13:37:40 +00:00
alfresco-build
1128011e15 [maven-release-plugin][skip ci] prepare release 23.6.0.14 2025-08-28 13:37:38 +00:00
Tiago Salvado
d0cb45de0d [MNT-25242] Add property to control if scope is cleaned (#3520) (#3550) 2025-08-28 11:18:05 +01:00
alfresco-build
2b48195896 [maven-release-plugin][skip ci] prepare for next development iteration 2025-08-28 10:12:53 +00:00
alfresco-build
fbb95d6a7f [maven-release-plugin][skip ci] prepare release 23.6.0.13 2025-08-28 10:12:51 +00:00
cezary-witkowski
502427e852 [ACS-9933] Use only lang3 3.18 in 23.N (#3547)
Signed-off-by: cezary-witkowski <cezary.witkowski@hyland.com>
2025-08-28 11:31:18 +02:00
alfresco-build
3ff2d79641 [maven-release-plugin][skip ci] prepare for next development iteration 2025-08-22 11:54:40 +00:00
alfresco-build
f274b88ece [maven-release-plugin][skip ci] prepare release 23.6.0.12 2025-08-22 11:54:38 +00:00
SatyamSah5
21550ec30b ACS-9991 Allow Content and Metadata extract even if thumbnails are disabled (#3537) 2025-08-22 16:39:46 +05:30
alfresco-build
8665267225 [maven-release-plugin][skip ci] prepare for next development iteration 2025-08-07 20:47:40 +00:00
alfresco-build
984b0bc719 [maven-release-plugin][skip ci] prepare release 23.6.0.11 2025-08-07 20:47:38 +00:00
Eva Vasques
5b89fc0be7 MNT-24975 - Repeated IPR groups due to casing inconsistencies on creation (#3510) 2025-08-07 21:04:26 +01:00
alfresco-build
bf3a3382fd [maven-release-plugin][skip ci] prepare for next development iteration 2025-08-07 13:51:45 +00:00
alfresco-build
14d007fae8 [maven-release-plugin][skip ci] prepare release 23.6.0.10 2025-08-07 13:51:43 +00:00
Eva Vasques
79317ddc9d ACS-9923 Removing an aspect needs to invoke onUpdateProperties (#3504) (#3506) 2025-08-07 13:51:29 +01:00
alfresco-build
c0e762fe5e [maven-release-plugin][skip ci] prepare for next development iteration 2025-08-06 06:04:44 +00:00
alfresco-build
5109b99520 [maven-release-plugin][skip ci] prepare release 23.6.0.9 2025-08-06 06:04:42 +00:00
Belal Ansari
dfc6306331 Feature/ACS 9933 vulnerability in commons lang3 (#3503) 2025-08-06 10:53:05 +05:30
alfresco-build
731f98921f [maven-release-plugin][skip ci] prepare for next development iteration 2025-07-30 13:35:32 +00:00
alfresco-build
0b21dbdc0a [maven-release-plugin][skip ci] prepare release 23.6.0.8 2025-07-30 13:35:29 +00:00
Eva Vasques
dd928356b8 MNT-24975 - Repeated IPR groups due to casing inconsistencies (#3459) (#3495) 2025-07-30 13:46:19 +01:00
alfresco-build
1844d8bdb9 [maven-release-plugin][skip ci] prepare for next development iteration 2025-07-23 08:47:11 +00:00
alfresco-build
17eef66f5c [maven-release-plugin][skip ci] prepare release 23.6.0.7 2025-07-23 08:47:08 +00:00
jakubkochman
1d1f269a70 PRODSEC-10304 bumped spring.version to 6.2.8 in 23.N (#3463) 2025-07-23 09:54:15 +02:00
alfresco-build
2ccf6044b8 [maven-release-plugin][skip ci] prepare for next development iteration 2025-07-21 04:28:24 +00:00
56 changed files with 4348 additions and 3680 deletions

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-automation-community-repo</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<build>

View File

@@ -45,7 +45,7 @@ import com.github.dockerjava.netty.NettyDockerCmdExecFactory;
import lombok.Getter;
import lombok.Setter;
import org.alfresco.utility.Utility;
import org.apache.commons.lang.SystemUtils;
import org.apache.commons.lang3.SystemUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;

View File

@@ -42,7 +42,7 @@ import org.alfresco.rest.v0.RMRolesAndActionsAPI;
import org.alfresco.rest.v0.RecordsAPI;
import org.alfresco.rest.v0.RecordCategoriesAPI;
import org.alfresco.test.AlfrescoTest;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.testng.annotations.Test;

View File

@@ -44,7 +44,7 @@ import org.alfresco.rest.v0.service.DispositionScheduleService;
import org.alfresco.test.AlfrescoTest;
import org.alfresco.utility.model.RepoTestModel;
import org.alfresco.utility.model.UserModel;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<modules>

View File

@@ -119,6 +119,11 @@ rm.patch.v35.holdNewChildAssocPatch.batchSize=1000
rm.haspermissionmap.read=Read
rm.haspermissionmap.write=WriteProperties,AddChildren,ReadContent
# Extended Permissions
# Enable matching the given username with the correct casing username when retrieving an IPR group.
# Only needs to be used if there are owners that don't have the username in the correct casing.
rm.extendedSecurity.enableUsernameNormalization=false
#
# Extended auto-version behaviour. If true and other auto-version properties are satisfied, then
# a document will be auto-versioned when its type is changed.

View File

@@ -15,6 +15,13 @@
<parameter property="end" jdbcType="BIGINT" javaType="java.lang.Long"/>
</parameterMap>
<parameterMap id="parameter_NodeIdsWhichReferenceContentUrl" type="map">
<parameter property="contentUrlShort" jdbcType="VARCHAR" javaType="java.lang.String"/>
<parameter property="contentUrlCrc" jdbcType="BIGINT" javaType="java.lang.Long"/>
<parameter property="localName" jdbcType="VARCHAR" javaType="java.lang.String"/>
<parameter property="uri" jdbcType="VARCHAR" javaType="java.lang.String"/>
</parameterMap>
<resultMap id="result_NodeRefEntity" type="org.alfresco.module.org_alfresco_module_rm.query.NodeRefEntity">
<result property="row" column="row" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="protocol" column="protocol" jdbcType="VARCHAR" javaType="java.lang.String"/>
@@ -55,18 +62,21 @@
<!-- Get list of node ids which reference given content url -->
<select id="select_NodeIdsWhichReferenceContentUrl"
parameterType="ContentUrl"
parameterMap="parameter_NodeIdsWhichReferenceContentUrl"
resultMap="result_NodeIds">
select
p.node_id
from
alf_content_url cu
LEFT OUTER JOIN alf_content_data cd ON (cd.content_url_id = cu.id)
LEFT OUTER JOIN alf_node_properties p ON (p.long_value = cd.id)
WHERE
content_url_short = #{contentUrlShort} and
content_url_crc = #{contentUrlCrc}
left outer join alf_content_data cd ON (cd.content_url_id = cu.id)
left outer join alf_node_properties p ON (p.long_value = cd.id)
left outer join alf_qname q ON (q.id = p.qname_id)
left outer join alf_namespace n ON (n.id = q.ns_id)
where
cu.content_url_short = ? and
cu.content_url_crc = ? and
q.local_name = ? and
n.uri = ?
</select>
<select id="select_RecordFoldersWithSchedules"

View File

@@ -611,6 +611,7 @@
<property name="authorityService" ref="authorityService"/>
<property name="permissionService" ref="permissionService"/>
<property name="transactionService" ref="transactionService"/>
<property name="enableUsernameNormalization" value="${rm.extendedSecurity.enableUsernameNormalization}" />
</bean>
<bean id="ExtendedSecurityService" class="org.springframework.aop.framework.ProxyFactoryBean">

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<properties>

View File

@@ -27,7 +27,6 @@
package org.alfresco.module.org_alfresco_module_rm.query;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -36,6 +35,11 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.mybatis.spring.SqlSessionTemplate;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
import org.alfresco.repo.domain.contentdata.ContentUrlEntity;
import org.alfresco.repo.domain.node.NodeDAO;
@@ -47,9 +51,6 @@ import org.alfresco.service.cmr.repository.InvalidNodeRefException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.Pair;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.mybatis.spring.SqlSessionTemplate;
/**
* Records management query DAO implementation
@@ -89,7 +90,8 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
protected TenantService tenantService;
/**
* @param sqlSessionTemplate SQL session template
* @param sqlSessionTemplate
* SQL session template
*/
public final void setSqlSessionTemplate(SqlSessionTemplate sqlSessionTemplate)
{
@@ -97,7 +99,8 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
}
/**
* @param qnameDAO qname DAO
* @param qnameDAO
* qname DAO
*/
public final void setQnameDAO(QNameDAO qnameDAO)
{
@@ -173,8 +176,9 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
/**
* Get a set of node reference which reference the provided content URL
*
* @param String contentUrl content URL
* @return Set<NodeRef> set of nodes that reference the provided content URL
* @param String
* contentUrl content URL
* @return Set<NodeRef> set of nodes that reference the provided content URL
*/
@Override
public Set<NodeRef> getNodeRefsWhichReferenceContentUrl(String contentUrl)
@@ -188,13 +192,19 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
ContentUrlEntity contentUrlEntity = new ContentUrlEntity();
contentUrlEntity.setContentUrl(contentUrl.toLowerCase());
Map<String, Object> params = new HashMap<>(4);
params.put("contentUrlShort", contentUrlEntity.getContentUrlShort());
params.put("contentUrlCrc", contentUrlEntity.getContentUrlCrc());
params.put("localName", ContentModel.PROP_CONTENT.getLocalName());
params.put("uri", ContentModel.PROP_CONTENT.getNamespaceURI());
if (logger.isDebugEnabled())
{
logger.debug("Executing query " + SELECT_NODE_IDS_WHICH_REFERENCE_CONTENT_URL);
}
// Get all the node ids which reference the given content url
List<Long> nodeIds = template.selectList(SELECT_NODE_IDS_WHICH_REFERENCE_CONTENT_URL, contentUrlEntity);
List<Long> nodeIds = template.selectList(SELECT_NODE_IDS_WHICH_REFERENCE_CONTENT_URL, params);
if (logger.isDebugEnabled())
{
@@ -224,7 +234,7 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
if (logger.isDebugEnabled())
{
logMessage.append(nodeRefToAdd)
.append(" (from version)");
.append(" (from version)");
}
}
@@ -232,7 +242,7 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
else
{
nodeRefToAdd = nodeDAO.getNodeIdStatus(nodeId)
.getNodeRef();
.getNodeRef();
if (logger.isDebugEnabled())
{
logMessage.append(nodeRefToAdd);
@@ -266,9 +276,9 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
{
Map<String, Object> params = new HashMap<>(2);
params.put("processed", qnameDAO.getQName(ASPECT_DISPOSITION_PROCESSED)
.getFirst());
.getFirst());
params.put("folderQnameId", qnameDAO.getQName(TYPE_RECORD_FOLDER)
.getFirst());
.getFirst());
params.put("start", start);
params.put("end", end);
@@ -280,7 +290,7 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
for (NodeRefEntity nodeRefEntity : entities)
{
results.add(
new NodeRef(nodeRefEntity.getProtocol(), nodeRefEntity.getIdentifier(), nodeRefEntity.getUuid()));
new NodeRef(nodeRefEntity.getProtocol(), nodeRefEntity.getIdentifier(), nodeRefEntity.getUuid()));
}
return results;
@@ -289,7 +299,8 @@ public class RecordsManagementQueryDAOImpl implements RecordsManagementQueryDAO,
/**
* @see org.alfresco.module.org_alfresco_module_rm.query.RecordsManagementQueryDAO#getPropertyStringValueEntity(String stringValue)
*/
public PropertyStringValueEntity getPropertyStringValueEntity(String stringValue){
public PropertyStringValueEntity getPropertyStringValueEntity(String stringValue)
{
PropertyStringValueEntity propertyStringValueEntity = new PropertyStringValueEntity();
propertyStringValueEntity.setValue(stringValue);

View File

@@ -34,6 +34,12 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.springframework.context.ApplicationListener;
import org.springframework.context.event.ContextRefreshedEvent;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.extensions.webscripts.ui.common.StringUtils;
import org.alfresco.model.ContentModel;
import org.alfresco.model.RenditionModel;
import org.alfresco.module.org_alfresco_module_rm.capability.RMPermissionModel;
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService;
@@ -42,7 +48,10 @@ import org.alfresco.module.org_alfresco_module_rm.role.FilePlanRoleService;
import org.alfresco.module.org_alfresco_module_rm.util.ServiceBaseImpl;
import org.alfresco.query.PagingRequest;
import org.alfresco.query.PagingResults;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
import org.alfresco.repo.security.authority.RMAuthority;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException;
import org.alfresco.service.cmr.repository.NodeRef;
@@ -54,12 +63,6 @@ import org.alfresco.service.namespace.RegexQNamePattern;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.Pair;
import org.alfresco.util.ParameterCheck;
import org.springframework.context.ApplicationListener;
import org.springframework.context.event.ContextRefreshedEvent;
import org.springframework.extensions.webscripts.ui.common.StringUtils;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
/**
* Extended security service implementation.
@@ -68,9 +71,9 @@ import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransacti
* @since 2.1
*/
public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
implements ExtendedSecurityService,
RecordsManagementModel,
ApplicationListener<ContextRefreshedEvent>
implements ExtendedSecurityService,
RecordsManagementModel,
ApplicationListener<ContextRefreshedEvent>
{
/** ipr group names */
static final String ROOT_IPR_GROUP = "INPLACE_RECORD_MANAGEMENT";
@@ -95,8 +98,11 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/** transaction service */
private TransactionService transactionService;
private boolean enableUsernameNormalization;
/**
* @param filePlanService file plan service
* @param filePlanService
* file plan service
*/
public void setFilePlanService(FilePlanService filePlanService)
{
@@ -104,7 +110,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
}
/**
* @param filePlanRoleService file plan role service
* @param filePlanRoleService
* file plan role service
*/
public void setFilePlanRoleService(FilePlanRoleService filePlanRoleService)
{
@@ -112,7 +119,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
}
/**
* @param authorityService authority service
* @param authorityService
* authority service
*/
public void setAuthorityService(AuthorityService authorityService)
{
@@ -120,7 +128,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
}
/**
* @param permissionService permission service
* @param permissionService
* permission service
*/
public void setPermissionService(PermissionService permissionService)
{
@@ -128,13 +137,23 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
}
/**
* @param transactionService transaction service
* @param transactionService
* transaction service
*/
public void setTransactionService(TransactionService transactionService)
{
this.transactionService = transactionService;
}
/**
* @param enableUsernameNormalization
* enable username normalization to ensure correct casing
*/
public void setEnableUsernameNormalization(boolean enableUsernameNormalization)
{
this.enableUsernameNormalization = enableUsernameNormalization;
}
/**
* Application context refresh event handler
*/
@@ -142,19 +161,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
public void onApplicationEvent(ContextRefreshedEvent contextRefreshedEvent)
{
// run as System on bootstrap
AuthenticationUtil.runAs(new RunAsWork<Object>()
{
AuthenticationUtil.runAs(new RunAsWork<Object>() {
public Object doWork()
{
RetryingTransactionCallback<Void> callback = new RetryingTransactionCallback<Void>()
{
RetryingTransactionCallback<Void> callback = new RetryingTransactionCallback<Void>() {
public Void execute()
{
// if the root group doesn't exist then create it
if (!authorityService.authorityExists(getRootIRPGroup()))
{
authorityService.createAuthority(AuthorityType.GROUP, ROOT_IPR_GROUP, ROOT_IPR_GROUP,
Collections.singleton(RMAuthority.ZONE_APP_RM));
Collections.singleton(RMAuthority.ZONE_APP_RM));
}
return null;
}
@@ -174,7 +191,7 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
return GROUP_PREFIX + ROOT_IPR_GROUP;
}
/**
/**
* @see org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService#hasExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef)
*/
@Override
@@ -224,8 +241,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* Helper to get authorities for a given group
*
* @param group group name
* @return Set<String> immediate authorities
* @param group
* group name
* @return Set<String> immediate authorities
*/
private Set<String> getAuthorities(String group)
{
@@ -284,8 +302,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
* <p>
* Return null if none found.
*
* @param nodeRef node reference
* @return Pair<String, String> where first is the read group and second if the write group, null if none found
* @param nodeRef
* node reference
* @return Pair<String, String> where first is the read group and second if the write group, null if none found
*/
private Pair<String, String> getIPRGroups(NodeRef nodeRef)
{
@@ -321,17 +340,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* Given a set of readers and writers find or create the appropriate IPR groups.
* <p>
* The IPR groups are named with hashes of the authority lists in order to reduce
* the set of groups that require exact match. A further index is used to handle
* a situation where there is a hash clash, but a difference in the authority lists.
* The IPR groups are named with hashes of the authority lists in order to reduce the set of groups that require exact match. A further index is used to handle a situation where there is a hash clash, but a difference in the authority lists.
* <p>
* When no match is found the groups are created. Once created
* When no match is found the groups are created. Once created
*
* @param filePlan file plan
* @param readers authorities with read
* @param writers authorities with write
* @return Pair<String, String> where first is the full name of the read group and
* second is the full name of the write group
* @param filePlan
* file plan
* @param readers
* authorities with read
* @param writers
* authorities with write
* @return Pair<String, String> where first is the full name of the read group and second is the full name of the write group
*/
private Pair<String, String> createOrFindIPRGroups(Set<String> readers, Set<String> writers)
{
@@ -343,20 +362,28 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* Create or find an IPR group based on the provided prefix and authorities.
*
* @param groupPrefix group prefix
* @param authorities authorities
* @return String full group name
* @param groupPrefix
* group prefix
* @param authorities
* authorities
* @return String full group name
*/
private String createOrFindIPRGroup(String groupPrefix, Set<String> authorities)
{
String group = null;
// If enabled, the authorities are forced to match the correct casing of the usernames in case they were set
// with the incorrect casing.
// If not, it will just use the authorities as they are.
// In normal circumstances, the authorities are in the correct casing, so this is disabled by default.
Set<String> authoritySet = normalizeAuthorities(authorities);
// find group or determine what the next index is if no group exists or there is a clash
Pair<String, Integer> groupResult = findIPRGroup(groupPrefix, authorities);
Pair<String, Integer> groupResult = findIPRGroup(groupPrefix, authoritySet);
if (groupResult.getFirst() == null)
{
group = createIPRGroup(groupPrefix, authorities, groupResult.getSecond());
group = createIPRGroup(groupPrefix, authoritySet, groupResult.getSecond());
}
else
{
@@ -369,13 +396,13 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* Given a group name prefix and the authorities, finds the exact match existing group.
* <p>
* If the group does not exist then the group returned is null and the index shows the next available
* group index for creation.
* If the group does not exist then the group returned is null and the index shows the next available group index for creation.
*
* @param groupPrefix group name prefix
* @param authorities authorities
* @return Pair<String, Integer> where first is the name of the found group, null if none found and second
* if the next available create index
* @param groupPrefix
* group name prefix
* @param authorities
* authorities
* @return Pair<String, Integer> where first is the name of the found group, null if none found and second if the next available create index
*/
private Pair<String, Integer> findIPRGroup(String groupPrefix, Set<String> authorities)
{
@@ -391,12 +418,13 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
while (hasMoreItems == true)
{
// get matching authorities
PagingResults<String> results = authorityService.getAuthorities(AuthorityType.GROUP,
RMAuthority.ZONE_APP_RM,
groupShortNamePrefix,
false,
false,
new PagingRequest(MAX_ITEMS*pageCount, MAX_ITEMS));
PagingResults<String> results = authorityService.getAuthorities(
AuthorityType.GROUP,
RMAuthority.ZONE_APP_RM,
groupShortNamePrefix,
false,
false,
new PagingRequest(MAX_ITEMS * pageCount, MAX_ITEMS));
// record the total count
nextGroupIndex = nextGroupIndex + results.getPage().size();
@@ -413,29 +441,88 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
// determine if there are any more pages to inspect
hasMoreItems = results.hasMoreItems();
pageCount ++;
pageCount++;
}
return new Pair<>(iprGroup, nextGroupIndex);
}
/**
* Given a set of authorities, normalizes the authority names to ensure correct casing.
*
* @param authNames
* @return
*/
private Set<String> normalizeAuthorities(Set<String> authNames)
{
// If disabled or no authorities, return as is
if (!enableUsernameNormalization || authNames == null || authNames.isEmpty())
{
return authNames;
}
Set<String> normalizedAuthorities = new HashSet<>();
for (String authorityName : authNames)
{
normalizedAuthorities.add(normalizeAuthorityName(authorityName));
}
return normalizedAuthorities;
}
/**
* Usernames are case insensitive but affect the IPR group matching when set with different casing. For a given authority of type user, this method normalizes the authority name. If group, it returns the name as-is.
*
* @param authorityName
* the authority name to normalize
* @return the normalized authority name
*/
private String normalizeAuthorityName(String authorityName)
{
if (authorityName == null || authorityName.startsWith(GROUP_PREFIX))
{
return authorityName;
}
// For users, attempt to get the correct casing from the username property of the user node
if (authorityService.authorityExists(authorityName))
{
try
{
NodeRef authorityNodeRef = authorityService.getAuthorityNodeRef(authorityName);
if (authorityNodeRef != null)
{
String username = (String) nodeService.getProperty(authorityNodeRef, ContentModel.PROP_USERNAME);
return username != null ? username : authorityName;
}
}
catch (Exception e)
{
// If anything goes wrong, fallback to the original name
}
}
return authorityName;
}
/**
* Determines whether a group exactly matches a list of authorities.
*
* @param authorities list of authorities
* @param group group
* @param authorities
* list of authorities
* @param group
* group
* @return
*/
private boolean isIPRGroupTrueMatch(String group, Set<String> authorities)
{
//Remove GROUP_EVERYONE for proper comparison as GROUP_EVERYONE is never included in an IPR group
// Remove GROUP_EVERYONE for proper comparison as GROUP_EVERYONE is never included in an IPR group
Set<String> plainAuthorities = new HashSet<String>();
if (authorities != null)
{
plainAuthorities.addAll(authorities);
plainAuthorities.remove(PermissionService.ALL_AUTHORITIES);
}
Set<String> contained = authorityService.getContainedAuthorities(null, group, true);
Set<String> contained = authorityService.getContainedAuthorities(null, group, true);
return contained.equals(plainAuthorities);
}
@@ -444,15 +531,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
* <p>
* 'package' scope to help testing.
*
* @param prefix prefix
* @param authorities authorities
* @return String group prefix short name
* @param prefix
* prefix
* @param authorities
* authorities
* @return String group prefix short name
*/
/*package*/ String getIPRGroupPrefixShortName(String prefix, Set<String> authorities)
/* package */ String getIPRGroupPrefixShortName(String prefix, Set<String> authorities)
{
StringBuilder builder = new StringBuilder(128)
.append(prefix)
.append(getAuthoritySetHashCode(authorities));
.append(prefix)
.append(getAuthoritySetHashCode(authorities));
return builder.toString();
}
@@ -464,13 +553,17 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
* <p>
* 'package' scope to help testing.
*
* @param prefix prefix
* @param readers read authorities
* @param writers write authorities
* @param index group index
* @return String group short name
* @param prefix
* prefix
* @param readers
* read authorities
* @param writers
* write authorities
* @param index
* group index
* @return String group short name
*/
/*package*/ String getIPRGroupShortName(String prefix, Set<String> authorities, int index)
/* package */ String getIPRGroupShortName(String prefix, Set<String> authorities, int index)
{
return getIPRGroupShortName(prefix, authorities, Integer.toString(index));
}
@@ -480,17 +573,21 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
* <p>
* Note this excludes the "GROUP_" prefix.
*
* @param prefix prefix
* @param readers read authorities
* @param writers write authorities
* @param index group index
* @return String group short name
* @param prefix
* prefix
* @param readers
* read authorities
* @param writers
* write authorities
* @param index
* group index
* @return String group short name
*/
private String getIPRGroupShortName(String prefix, Set<String> authorities, String index)
{
StringBuilder builder = new StringBuilder(128)
.append(getIPRGroupPrefixShortName(prefix, authorities))
.append(index);
.append(getIPRGroupPrefixShortName(prefix, authorities))
.append(index);
return builder.toString();
}
@@ -498,8 +595,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* Gets the hashcode value of a set of authorities.
*
* @param authorities set of authorities
* @return int hash code
* @param authorities
* set of authorities
* @return int hash code
*/
private int getAuthoritySetHashCode(Set<String> authorities)
{
@@ -514,10 +612,13 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* Creates a new IPR group.
*
* @param groupNamePrefix group name prefix
* @param children child authorities
* @param index group index
* @return String full name of created group
* @param groupNamePrefix
* group name prefix
* @param children
* child authorities
* @param index
* group index
* @return String full name of created group
*/
private String createIPRGroup(String groupNamePrefix, Set<String> children, int index)
{
@@ -547,10 +648,10 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
}
}
}
catch(DuplicateChildNodeNameException ex)
catch (DuplicateChildNodeNameException ex)
{
// the group was concurrently created
group = authorityService.getName(AuthorityType.GROUP, groupShortName);
// Rethrow as ConcurrencyFailureException so that is can be retried and linked to the group created by the concurrent transaction
throw new ConcurrencyFailureException("IPR group creation failed due to concurrent duplicate group name creation: " + groupShortName);
}
return group;
@@ -559,8 +660,10 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* Assign IPR groups to a node reference with the correct permissions.
*
* @param iprGroups iprGroups, first read and second write
* @param nodeRef node reference
* @param iprGroups
* iprGroups, first read and second write
* @param nodeRef
* node reference
*/
private void assignIPRGroupsToNode(Pair<String, String> iprGroups, NodeRef nodeRef)
{
@@ -598,7 +701,8 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* Clear the nodes IPR permissions
*
* @param nodeRef node reference
* @param nodeRef
* node reference
*/
private void clearPermissions(NodeRef nodeRef, Pair<String, String> iprGroups)
{
@@ -610,7 +714,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#getExtendedReaders(org.alfresco.service.cmr.repository.NodeRef)
*/
@Override @Deprecated public Set<String> getExtendedReaders(NodeRef nodeRef)
@Override
@Deprecated
public Set<String> getExtendedReaders(NodeRef nodeRef)
{
return getReaders(nodeRef);
}
@@ -618,7 +724,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#getExtendedWriters(org.alfresco.service.cmr.repository.NodeRef)
*/
@Override @Deprecated public Set<String> getExtendedWriters(NodeRef nodeRef)
@Override
@Deprecated
public Set<String> getExtendedWriters(NodeRef nodeRef)
{
return getWriters(nodeRef);
}
@@ -626,7 +734,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#addExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set)
*/
@Override @Deprecated public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
@Override
@Deprecated
public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
{
set(nodeRef, readers, writers);
}
@@ -634,7 +744,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#addExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set, boolean)
*/
@Override @Deprecated public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers, boolean applyToParents)
@Override
@Deprecated
public void addExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers, boolean applyToParents)
{
set(nodeRef, readers, writers);
}
@@ -642,7 +754,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeAllExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef)
*/
@Override @Deprecated public void removeAllExtendedSecurity(NodeRef nodeRef)
@Override
@Deprecated
public void removeAllExtendedSecurity(NodeRef nodeRef)
{
remove(nodeRef);
}
@@ -650,7 +764,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set)
*/
@Override @Deprecated public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
@Override
@Deprecated
public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers)
{
remove(nodeRef);
}
@@ -658,7 +774,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, java.util.Set, java.util.Set, boolean)
*/
@Override @Deprecated public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String>writers, boolean applyToParents)
@Override
@Deprecated
public void removeExtendedSecurity(NodeRef nodeRef, Set<String> readers, Set<String> writers, boolean applyToParents)
{
remove(nodeRef);
}
@@ -666,7 +784,9 @@ public class ExtendedSecurityServiceImpl extends ServiceBaseImpl
/**
* @see org.alfresco.module.org_alfresco_module_rm.security.DeprecatedExtendedSecurityService#removeAllExtendedSecurity(org.alfresco.service.cmr.repository.NodeRef, boolean)
*/
@Override @Deprecated public void removeAllExtendedSecurity(NodeRef nodeRef, boolean applyToParents)
@Override
@Deprecated
public void removeAllExtendedSecurity(NodeRef nodeRef, boolean applyToParents)
{
remove(nodeRef);
}

View File

@@ -29,14 +29,23 @@ package org.alfresco.module.org_alfresco_module_rm.test.legacy.service;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import org.junit.Assert;
import org.springframework.dao.ConcurrencyFailureException;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase;
import org.alfresco.query.PagingRequest;
import org.alfresco.query.PagingResults;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
import org.alfresco.repo.site.SiteModel;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.security.AccessPermission;
import org.alfresco.service.cmr.security.AccessStatus;
import org.alfresco.service.cmr.security.AuthorityType;
import org.alfresco.service.cmr.site.SiteService;
import org.alfresco.service.cmr.site.SiteVisibility;
import org.alfresco.util.GUID;
@@ -73,8 +82,7 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
private String createTestUser()
{
return doTestInTransaction(new Test<String>()
{
return doTestInTransaction(new Test<String>() {
public String run()
{
String userName = GUID.generate();
@@ -90,8 +98,7 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
final String elephant = createTestUser();
final String snake = createTestUser();
doTestInTransaction(new Test<Void>()
{
doTestInTransaction(new Test<Void>() {
public Void run()
{
assertFalse(extendedSecurityService.hasExtendedSecurity(filePlan));
@@ -118,7 +125,7 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
// test remove
extendedSecurityService.remove(recordToo);
assertFalse(extendedSecurityService.hasExtendedSecurity(recordToo));
assertTrue(extendedSecurityService.getReaders(recordToo).isEmpty());
assertTrue(extendedSecurityService.getWriters(recordToo).isEmpty());
@@ -133,8 +140,7 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
final String monkey = createTestUser();
final String elephant = createTestUser();
doTestInTransaction(new Test<Void>()
{
doTestInTransaction(new Test<Void>() {
Set<String> extendedReaders = new HashSet<>(2);
public Void run() throws Exception
@@ -184,112 +190,337 @@ public class ExtendedSecurityServiceImplTest extends BaseRMTestCase
public void testDifferentUsersDifferentPermissions()
{
final String userNone = createTestUser();
final String userRead = createTestUser();
final String userWrite = createTestUser();
final String siteShortName = GUID.generate();
final String userNone = createTestUser();
final String userRead = createTestUser();
final String userWrite = createTestUser();
final String siteShortName = GUID.generate();
doTestInTransaction(new Test<Void>()
{
doTestInTransaction(new Test<Void>() {
public Void run() throws Exception
{
siteService.createSite(null, siteShortName, "test", "test", SiteVisibility.PRIVATE);
return null;
siteService.createSite(null, siteShortName, "test", "test", SiteVisibility.PRIVATE);
return null;
}
});
final NodeRef documentLibrary = doTestInTransaction(new Test<NodeRef>()
{
final NodeRef documentLibrary = doTestInTransaction(new Test<NodeRef>() {
public NodeRef run() throws Exception
{
siteService.setMembership(siteShortName, userRead, SiteModel.SITE_CONSUMER);
siteService.setMembership(siteShortName, userWrite, SiteModel.SITE_COLLABORATOR);
return siteService.createContainer(siteShortName, SiteService.DOCUMENT_LIBRARY, null, null);
siteService.setMembership(siteShortName, userRead, SiteModel.SITE_CONSUMER);
siteService.setMembership(siteShortName, userWrite, SiteModel.SITE_COLLABORATOR);
return siteService.createContainer(siteShortName, SiteService.DOCUMENT_LIBRARY, null, null);
}
});
final NodeRef record = doTestInTransaction(new Test<NodeRef>()
{
final NodeRef record = doTestInTransaction(new Test<NodeRef>() {
public NodeRef run() throws Exception
{
NodeRef record = fileFolderService.create(documentLibrary, GUID.generate(), ContentModel.TYPE_CONTENT).getNodeRef();
recordService.createRecord(filePlan, record);
return record;
NodeRef record = fileFolderService.create(documentLibrary, GUID.generate(), ContentModel.TYPE_CONTENT)
.getNodeRef();
recordService.createRecord(filePlan, record);
return record;
}
});
doTestInTransaction(new Test<Void>()
{
doTestInTransaction(new Test<Void>() {
public Void run() throws Exception
{
AuthenticationUtil.runAs(new RunAsWork<Void>()
{
public Void doWork() throws Exception
{
// check permissions
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
return null;
}
}, userNone);
AuthenticationUtil.runAs(new RunAsWork<Void>() {
public Void doWork() throws Exception
{
// check permissions
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
return null;
}
}, userNone);
AuthenticationUtil.runAs(new RunAsWork<Void>()
{
public Void doWork() throws Exception
{
// check permissions
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
return null;
}
}, userRead);
AuthenticationUtil.runAs(new RunAsWork<Void>() {
public Void doWork() throws Exception
{
// check permissions
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
return null;
}
}, userRead);
AuthenticationUtil.runAs(new RunAsWork<Void>()
{
public Void doWork() throws Exception
{
// check permissions
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
return null;
}
}, userWrite);
AuthenticationUtil.runAs(new RunAsWork<Void>() {
public Void doWork() throws Exception
{
// check permissions
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
return null;
}
}, userWrite);
AuthenticationUtil.runAs(new RunAsWork<Void>()
{
public Void doWork() throws Exception
{
// check permissions
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
return null;
}
}, userNone);
AuthenticationUtil.runAs(new RunAsWork<Void>() {
public Void doWork() throws Exception
{
// check permissions
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, READ_RECORDS));
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
return null;
}
}, userNone);
AuthenticationUtil.runAs(new RunAsWork<Void>()
{
public Void doWork() throws Exception
{
// check permissions
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
return null;
}
}, userRead);
AuthenticationUtil.runAs(new RunAsWork<Void>() {
public Void doWork() throws Exception
{
// check permissions
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(record, FILING));
return null;
}
}, userRead);
AuthenticationUtil.runAs(new RunAsWork<Void>()
{
public Void doWork() throws Exception
{
// check permissions
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
return null;
}
}, userWrite);
AuthenticationUtil.runAs(new RunAsWork<Void>() {
public Void doWork() throws Exception
{
// check permissions
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, READ_RECORDS));
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(record, FILING));
return null;
}
}, userWrite);
return null;
return null;
}
});
}
public void testConcurrentSetWithRetry()
{
Set<String> extendedReaders = new HashSet<>(2);
Set<String> extendedWriters = new HashSet<>(2);
Set<NodeRef> documents = setupConcurrentTestCase(10, extendedReaders, extendedWriters);
// For each record created previously, spawn a thread to set extended security so we cause concurrency
// failure trying to create IPR groups with the same name
fireParallelExecutionOfSetExtendedSecurity(documents, extendedReaders, extendedWriters, true);
// Look for duplicated IPR groups and verify all documents have the same groups assigned
verifyCreatedGroups(documents, false);
AuthenticationUtil.clearCurrentSecurityContext();
}
public void testConcurrentSetWithoutRetry()
{
Set<String> extendedReaders = new HashSet<>(2);
Set<String> extendedWriters = new HashSet<>(2);
Set<NodeRef> documents = setupConcurrentTestCase(10, extendedReaders, extendedWriters);
// For each record created previously, spawn a thread to set extended security so we cause concurrency
// failure trying to create IPR groups with the same name.
// Since there is no retry, we expect to get a ConcurrencyFailureException
Assert.assertThrows(ConcurrencyFailureException.class, () -> {
fireParallelExecutionOfSetExtendedSecurity(documents, extendedReaders, extendedWriters, false);
});
// Look for duplicated IPR groups and verify all documents have the same groups assigned
// Since there was a ConcurrencyFailureException some threads failed to set extended security so some
// documents may not have IPR groups created.
verifyCreatedGroups(documents, true);
AuthenticationUtil.clearCurrentSecurityContext();
}
private Set<NodeRef> setupConcurrentTestCase(int concurrentThreads, Set<String> extendedReaders, Set<String> extendedWriters)
{
final String usera = createTestUser();
final String userb = createTestUser();
final String owner = createTestUser();
extendedReaders.add(usera);
extendedReaders.add(userb);
extendedWriters.add(usera);
extendedWriters.add(userb);
AuthenticationUtil.setAdminUserAsFullyAuthenticatedUser();
// Create a site
NodeRef documentLib = createSite(new HashSet<>(), new HashSet<>());
// Create records in the site document library
return createRecords(concurrentThreads, documentLib, owner);
}
private NodeRef createSite(Set<String> readers, Set<String> writers)
{
return retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<NodeRef>() {
@Override
public NodeRef execute() throws Throwable
{
final String siteShortName = GUID.generate();
siteService.createSite(null, siteShortName, "test", "test", SiteVisibility.PRIVATE);
readers.forEach(reader -> siteService.setMembership(siteShortName, reader, SiteModel.SITE_CONSUMER));
writers.forEach(writer -> siteService.setMembership(siteShortName, writer, SiteModel.SITE_COLLABORATOR));
return siteService.createContainer(siteShortName, SiteService.DOCUMENT_LIBRARY, null, null);
}
}, false, true);
}
private Set<NodeRef> createRecords(int numRecords, NodeRef parent, String owner)
{
return retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Set<NodeRef>>() {
@Override
public Set<NodeRef> execute() throws Throwable
{
int createdRecords = 0;
Set<NodeRef> documents = new HashSet<>();
while (createdRecords < numRecords)
{
final NodeRef doc = fileFolderService.create(parent, GUID.generate(), ContentModel.TYPE_CONTENT).getNodeRef();
ownableService.setOwner(doc, owner);
recordService.createRecord(filePlan, doc, rmFolder, true);
recordService.file(doc);
recordService.complete(doc);
documents.add(doc);
createdRecords++;
}
return documents;
}
}, false, true);
}
private void setExtendedSecurity(NodeRef doc, Set<String> readers, Set<String> writers, boolean useRetry)
{
if (!useRetry)
{
setExtendedSecurity(doc, readers, writers);
return;
}
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable
{
setExtendedSecurity(doc, readers, writers);
return null;
}
}, false, true);
}
private void setExtendedSecurity(NodeRef doc, Set<String> readers, Set<String> writers)
{
AuthenticationUtil.setAdminUserAsFullyAuthenticatedUser();
extendedSecurityService.set(doc, readers, writers);
}
private void fireParallelExecutionOfSetExtendedSecurity(Set<NodeRef> documents, Set<String> extendedReaders, Set<String> extendedWriters, boolean useRetry)
{
CompletableFuture<?>[] futures = documents.stream()
.map(doc -> CompletableFuture.runAsync(() -> setExtendedSecurity(doc, extendedReaders, extendedWriters, useRetry)))
.toArray(CompletableFuture[]::new);
try
{
CompletableFuture.allOf(futures).join();
}
catch (Exception e)
{
Throwable cause = e.getCause();
if (cause instanceof ConcurrencyFailureException)
{
throw (ConcurrencyFailureException) cause;
}
throw new RuntimeException("Error during parallel execution", e);
}
}
private void verifyCreatedGroups(Set<NodeRef> documents, boolean onlyDuplicatesValidation)
{
retryingTransactionHelper.doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable
{
Set<String> expectedAuthorities = null;
Set<Set<String>> errors = new HashSet<>();
for (NodeRef doc : documents)
{
Set<AccessPermission> permissions = permissionService.getAllSetPermissions(doc);
Set<String> authorities = getDocumentAuthorities(permissions);
Set<String> authoritiesById = getAuthorityIds(authorities);
verifyIPRGroups(authorities, onlyDuplicatesValidation);
if (onlyDuplicatesValidation)
{
// Some documents may not have IPR groups created if there was a ConcurrencyFailureException
continue;
}
// All documents should have the same exact set of groups assigned
if (expectedAuthorities == null)
{
expectedAuthorities = authoritiesById;
}
if (!expectedAuthorities.equals(authoritiesById))
{
errors.add(authoritiesById);
}
}
assertTrue("Unexpected authorities linked to document", errors.isEmpty());
return null;
}
}, false, true);
}
private Set<String> getDocumentAuthorities(Set<AccessPermission> permissions)
{
Set<String> authorities = new HashSet<>();
for (AccessPermission accessPermission : permissions)
{
String authority = accessPermission.getAuthority();
String authName = authorityService.getName(AuthorityType.GROUP, authority);
authorities.add(authName);
}
return authorities;
}
private Set<String> getAuthorityIds(Set<String> authorities)
{
Set<String> authorityIds = new HashSet<>();
for (String authority : authorities)
{
String authId = authorityService.getAuthorityNodeRef(authority) != null
? authorityService.getAuthorityNodeRef(authority).getId()
: null;
authorityIds.add(authId);
}
return authorityIds;
}
private void verifyIPRGroups(Set<String> authorities, boolean onlyDuplicatesValidation)
{
boolean hasGroupIPR = false;
for (String authorityName : authorities)
{
String shortName = authorityService.getShortName(authorityName);
if (authorityName.startsWith("GROUP_IPR"))
{
hasGroupIPR = true;
PagingResults<String> results = authorityService.getAuthorities(AuthorityType.GROUP, null, shortName, false,
false, new PagingRequest(0, 10));
assertEquals("No duplicated IPR group expected", 1, results.getPage().size());
}
}
if (!onlyDuplicatesValidation)
{
assertTrue("No IPR Groups created", hasGroupIPR);
}
}
}

View File

@@ -52,6 +52,7 @@ import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.alfresco.model.ContentModel;
import org.alfresco.model.RenditionModel;
import org.alfresco.module.org_alfresco_module_rm.capability.RMPermissionModel;
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService;
@@ -67,6 +68,7 @@ import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransacti
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.security.AccessPermission;
import org.alfresco.service.cmr.security.AccessStatus;
import org.alfresco.service.cmr.security.AuthorityService;
@@ -522,6 +524,104 @@ public class ExtendedSecurityServiceImplUnitTest
verify(mockedPermissionService).setPermission(nodeRef, writeGroup, RMPermissionModel.FILING, true);
}
/**
* Given a node with no previous IPR groups assigned
* And having pre-existing IPR groups matching the ones we need
* When I add some read and write authorities but with a different casing
* Then the existing IPR groups are used
*/
@SuppressWarnings("unchecked")
@Test public void addExtendedSecurityWithMixedCasingUsernames()
{
// Have the usernames in the node as the correct usernames but with incorrect casing
String user1 = "UseR";
String user2 = "UseR_w";
// Incorrect IPR Group names
Set<String> diffCasingReaders = Stream.of(user1, GROUP).collect(Collectors.toSet());
Set<String> diffCasingWriters = Stream.of(user2, GROUP_W).collect(Collectors.toSet());
String wrongReadGroupPrefix = extendedSecurityService.getIPRGroupPrefixShortName(READER_GROUP_PREFIX, diffCasingReaders);
String wrongWriteGroupPrefix = extendedSecurityService.getIPRGroupPrefixShortName(WRITER_GROUP_PREFIX, diffCasingWriters);
String wrongReadGroup = wrongReadGroupPrefix + "0";
String wrongWriteGroup = wrongWriteGroupPrefix + "0";
// Correct Group names
String correctReadGroup = readGroupPrefix + "0";
String correctWriteGroup = writeGroupPrefix + "0";
// If queried for the correct groups, return the results
PagingResults<String> mockedCorrectReadPResults = mock(PagingResults.class);
PagingResults<String> mockedCorrectWritePResults = mock(PagingResults.class);
when(mockedCorrectReadPResults.getPage())
.thenReturn(Stream.of(GROUP_PREFIX + correctReadGroup).collect(Collectors.toList()));
when(mockedAuthorityService.getAuthorities(
eq(AuthorityType.GROUP),
eq(RMAuthority.ZONE_APP_RM),
eq(readGroupPrefix),
eq(false),
eq(false),
any(PagingRequest.class)))
.thenReturn(mockedCorrectReadPResults);
when(mockedCorrectWritePResults.getPage())
.thenReturn(Stream.of(GROUP_PREFIX + correctWriteGroup).collect(Collectors.toList()));
when(mockedAuthorityService.getAuthorities(
eq(AuthorityType.GROUP),
eq(RMAuthority.ZONE_APP_RM),
eq(writeGroupPrefix),
eq(false),
eq(false),
any(PagingRequest.class)))
.thenReturn(mockedCorrectWritePResults);
// Don't return results for the incorrect groups (lenient as these may not be called with normalization enabled)
PagingResults<String> mockedWrongReadPResults = mock(PagingResults.class);
PagingResults<String> mockedWrongWritePResults = mock(PagingResults.class);
lenient().when(mockedWrongReadPResults.getPage())
.thenReturn(Collections.emptyList());
lenient().when(mockedAuthorityService.getAuthorities(
eq(AuthorityType.GROUP),
eq(RMAuthority.ZONE_APP_RM),
eq(wrongReadGroupPrefix),
eq(false),
eq(false),
any(PagingRequest.class)))
.thenReturn(mockedWrongReadPResults);
lenient().when(mockedWrongWritePResults.getPage())
.thenReturn(Collections.emptyList());
lenient().when(mockedAuthorityService.getAuthorities(
eq(AuthorityType.GROUP),
eq(RMAuthority.ZONE_APP_RM),
eq(wrongWriteGroupPrefix),
eq(false),
eq(false),
any(PagingRequest.class)))
.thenReturn(mockedWrongWritePResults);
// The users do exist, despite being in a different casing and are able to be retrieved
NodeRef noderefUser1 = new NodeRef(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE, USER);
when(mockedAuthorityService.authorityExists(user1)).thenReturn(true);
when(mockedAuthorityService.getAuthorityNodeRef(user1)).thenReturn(noderefUser1);
when(mockedNodeService.getProperty(noderefUser1, ContentModel.PROP_USERNAME)).thenReturn(USER);
NodeRef noderefUser2 = new NodeRef(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE, USER_W);
when(mockedAuthorityService.authorityExists(user2)).thenReturn(true);
when(mockedAuthorityService.getAuthorityNodeRef(user2)).thenReturn(noderefUser2);
when(mockedNodeService.getProperty(noderefUser2, ContentModel.PROP_USERNAME)).thenReturn(USER_W);
// Set the extended security service to normalize usernames
extendedSecurityService.setEnableUsernameNormalization(true);
extendedSecurityService.set(nodeRef, diffCasingReaders, diffCasingWriters);
// Verify that the incorrect read group is not created
verify(mockedAuthorityService, never()).createAuthority(AuthorityType.GROUP, wrongReadGroup, wrongReadGroup, Collections.singleton(RMAuthority.ZONE_APP_RM));
// Verify that the incorrect write group is not created
verify(mockedAuthorityService, never()).createAuthority(AuthorityType.GROUP, wrongWriteGroup, wrongWriteGroup, Collections.singleton(RMAuthority.ZONE_APP_RM));
}
/**
* Given a node with no previous IPR groups assigned
@@ -571,7 +671,7 @@ public class ExtendedSecurityServiceImplUnitTest
.thenReturn(Stream
.of(USER_W, AlfMock.generateText())
.collect(Collectors.toSet()));
// add extended security
extendedSecurityService.set(nodeRef, READERS, WRITERS);
@@ -895,7 +995,7 @@ public class ExtendedSecurityServiceImplUnitTest
// group names
String readGroup = extendedSecurityService.getIPRGroupShortName(READER_GROUP_FULL_PREFIX, READERS, 0);
String writeGroup = extendedSecurityService.getIPRGroupShortName(WRITER_GROUP_FULL_PREFIX, WRITERS, 0);
// setup renditions
NodeRef renditionNodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeService.hasAspect(nodeRef, RecordsManagementModel.ASPECT_RECORD))
@@ -904,7 +1004,7 @@ public class ExtendedSecurityServiceImplUnitTest
.thenReturn(renditionNodeRef);
when(mockedNodeService.getChildAssocs(nodeRef, RenditionModel.ASSOC_RENDITION, RegexQNamePattern.MATCH_ALL))
.thenReturn(Collections.singletonList(mockedChildAssociationRef));
// setup permissions
Set<AccessPermission> permissions = Stream
.of(new AccessPermissionImpl(AlfMock.generateText(), AccessStatus.ALLOWED, readGroup, 0),
@@ -913,17 +1013,17 @@ public class ExtendedSecurityServiceImplUnitTest
.collect(Collectors.toSet());
when(mockedPermissionService.getAllSetPermissions(nodeRef))
.thenReturn(permissions);
// remove extended security
extendedSecurityService.remove(nodeRef);
// verify that the groups permissions have been removed
verify(mockedPermissionService).clearPermission(nodeRef, readGroup);
verify(mockedPermissionService).clearPermission(nodeRef, writeGroup);
// verify that the groups permissions have been removed from the rendition
verify(mockedPermissionService).clearPermission(renditionNodeRef, readGroup);
verify(mockedPermissionService).clearPermission(renditionNodeRef, writeGroup);
}
}
}

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<build>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<modules>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<properties>
@@ -51,8 +51,8 @@
</exclusions>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<scope>provided</scope>
</dependency>

View File

@@ -30,7 +30,7 @@ import java.util.regex.Pattern;
import org.alfresco.service.cmr.site.SiteInfo;
import org.alfresco.service.cmr.wiki.WikiPageInfo;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.json.simple.JSONObject;
import org.springframework.extensions.webscripts.Cache;
import org.springframework.extensions.webscripts.Status;
@@ -92,7 +92,7 @@ public class WikiPageGet extends AbstractWikiWebScript
{
links.add(link);
// build the list of available pages
WikiPageInfo wikiPage = wikiService.getWikiPage(site.getShortName(), StringEscapeUtils.unescapeHtml(link));
WikiPageInfo wikiPage = wikiService.getWikiPage(site.getShortName(), StringEscapeUtils.unescapeHtml4(link));
if (wikiPage != null)
{
pageTitles.add(wikiPage.getTitle());

View File

@@ -91,6 +91,15 @@ function doclist_getAllNodes(parsedArgs, filterParams, query, totalItemCount)
};
}
function sanitizeJunkFavouriteKeys(favourites){
for (var key in favourites) {
if (!key || key.trim() === "") {
delete favourites[key];
}
}
return favourites;
}
/**
* Main entry point: Create collection of documents and folders in the given space
*
@@ -123,6 +132,28 @@ function doclist_main()
if (logger.isLoggingEnabled())
logger.log("doclist.lib.js - NodeRef: " + parsedArgs.nodeRef + " Query: " + query);
favourites = sanitizeJunkFavouriteKeys(favourites);
if(Object.keys(favourites).length === 0 && query === null)
{
return {
luceneQuery: "",
paging: {
totalRecords: 0,
startIndex: 0
},
container: parsedArgs.rootNode,
parent: null,
onlineEditing: utils.moduleInstalled("org.alfresco.module.vti"),
itemCount: {
folders: 0,
documents: 0
},
items: [],
customJSON: slingshotDocLib.getJSON()
};
}
var totalItemCount = filterParams.limitResults ? parseInt(filterParams.limitResults, 10) : -1;
// For all sites documentLibrary query we pull in all available results and post filter

View File

@@ -182,11 +182,14 @@ var Filters =
case "favourites":
for (var favourite in favourites)
{
if (filterQuery)
if (favourite && favourite.trim() !== "")
{
filterQuery += " OR ";
if (filterQuery)
{
filterQuery += " OR ";
}
filterQuery += "ID:\"" + favourite + "\"";
}
filterQuery += "ID:\"" + favourite + "\"";
}
if (filterQuery.length !== 0)
@@ -201,7 +204,13 @@ var Filters =
else
{
// empty favourites query
filterQuery = "+ID:\"\"";
logger.warn("No favourites found for user: " + person.properties.userName);
return {
query: null,
limitResults: 0,
sort: [],
language: "lucene"
};
}
filterParams.query = filterQuery;

View File

@@ -45,7 +45,7 @@ import org.alfresco.service.cmr.wiki.WikiPageInfo;
import org.alfresco.service.cmr.wiki.WikiService;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.PropertyMap;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.JSONArray;
@@ -996,7 +996,7 @@ public class WikiRestApiTest extends BaseWebScriptTest
String link = m.group(1);
link += "?title=<script>alert('xss');</script>";
WikiPageInfo wikiPage2 = this.wikiService.getWikiPage(SITE_SHORT_NAME_WIKI, link);
WikiPageInfo wikiPage1 = this.wikiService.getWikiPage(SITE_SHORT_NAME_WIKI, StringEscapeUtils.unescapeHtml(link));
WikiPageInfo wikiPage1 = this.wikiService.getWikiPage(SITE_SHORT_NAME_WIKI, StringEscapeUtils.unescapeHtml4(link));
assertEquals(wikiPage2, wikiPage1);
}
@@ -1006,4 +1006,4 @@ public class WikiRestApiTest extends BaseWebScriptTest
this.wikiService.deleteWikiPage(wikiPageNew);
}
}
}
}

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<dependencies>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<dependencies>

View File

@@ -9,6 +9,6 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
</project>

View File

@@ -36,8 +36,7 @@ commons-email http://jakarta.apache.org/commons/
commons-fileupload http://jakarta.apache.org/commons/
commons-httpclient http://jakarta.apache.org/commons/
commons-io http://jakarta.apache.org/commons/
commons-jxpath http://jakarta.apache.org/commons/
commons-lang http://jakarta.apache.org/commons/
commons-jxpath http://jakarta.apache.org/commons/
commons-lang3 http://jakarta.apache.org/commons/
commons-logging http://jakarta.apache.org/commons/
commons-net http://jakarta.apache.org/commons/

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<modules>

View File

@@ -6,7 +6,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<organization>

View File

@@ -16,7 +16,7 @@ import org.alfresco.utility.testrail.annotation.TestRail;
import org.apache.chemistry.opencmis.commons.exceptions.CmisObjectNotFoundException;
import org.apache.chemistry.opencmis.commons.exceptions.CmisPermissionDeniedException;
import org.apache.chemistry.opencmis.commons.exceptions.CmisUnauthorizedException;
import org.apache.commons.lang.time.DateUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<developers>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<developers>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<properties>
@@ -17,7 +17,7 @@
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<rest.api.explorer.branch>master</rest.api.explorer.branch>
<httpclient-osgi-version>4.5.6</httpclient-osgi-version>
<commons-lang3.version>3.17.0</commons-lang3.version>
<commons-lang3.version>3.18.0</commons-lang3.version>
<scribejava-apis.version>8.3.3</scribejava-apis.version>
<java.version>17</java.version>
</properties>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<developers>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<properties>

20
pom.xml
View File

@@ -2,7 +2,7 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>alfresco-community-repo</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
<packaging>pom</packaging>
<name>Alfresco Community Repo Parent</name>
@@ -51,17 +51,17 @@
<dependency.alfresco-server-root.version>7.0.1</dependency.alfresco-server-root.version>
<dependency.activiti-engine.version>5.23.0</dependency.activiti-engine.version>
<dependency.activiti.version>5.23.0</dependency.activiti.version>
<dependency.alfresco-transform-core.version>5.1.5</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-service.version>4.1.5</dependency.alfresco-transform-service.version>
<dependency.alfresco-transform-core.version>5.2.2-A.1</dependency.alfresco-transform-core.version>
<dependency.alfresco-transform-service.version>4.2.2-A.1</dependency.alfresco-transform-service.version>
<dependency.alfresco-greenmail.version>7.0</dependency.alfresco-greenmail.version>
<dependency.acs-event-model.version>0.0.33</dependency.acs-event-model.version>
<dependency.aspectj.version>1.9.22.1</dependency.aspectj.version>
<dependency.spring.version>6.1.14</dependency.spring.version>
<dependency.spring-security.version>6.3.8</dependency.spring-security.version>
<dependency.spring.version>6.2.8</dependency.spring.version>
<dependency.spring-security.version>6.3.9</dependency.spring-security.version>
<dependency.antlr.version>3.5.3</dependency.antlr.version>
<dependency.jackson.version>2.17.2</dependency.jackson.version>
<dependency.cxf.version>4.1.0</dependency.cxf.version>
<dependency.cxf.version>4.1.2</dependency.cxf.version>
<dependency.opencmis.version>1.0.0-jakarta-1</dependency.opencmis.version>
<dependency.webscripts.version>10.2</dependency.webscripts.version>
<dependency.bouncycastle.version>1.78.1</dependency.bouncycastle.version>
@@ -154,7 +154,7 @@
<connection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</connection>
<developerConnection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</developerConnection>
<url>https://github.com/Alfresco/alfresco-community-repo</url>
<tag>23.6.0.6</tag>
<tag>23.6.0.21</tag>
</scm>
<distributionManagement>
@@ -417,9 +417,9 @@
<version>1.17.1</version>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.6</version>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.18.0</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<dependencies>

View File

@@ -40,6 +40,7 @@
"items":
[
<#list results as row>
<#if row.item.hasPermission("Read")>
{
"type": "${row.item.typeShort}",
"parentType": "${row.item.parentTypeShort!""}",
@@ -75,6 +76,7 @@
"nodeRef": "${row.item.nodeRef}"<#if row.selectable?exists>,
"selectable" : ${row.selectable?string}</#if>
}<#if row_has_next>,</#if>
</#if>
</#list>
]
}

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>23.6.0.6</version>
<version>23.6.0.21</version>
</parent>
<dependencies>
@@ -94,7 +94,6 @@
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.17.0</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
@@ -742,10 +741,6 @@
<artifactId>reflections</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
</dependencies>
<build>

View File

@@ -116,7 +116,7 @@ public class NodeDAOImpl extends AbstractNodeDAOImpl
private static final String SELECT_NODE_MAX_ID = "alfresco.node.select_NodeMaxId";
private static final String SELECT_NODE_INTERVAL_BY_TYPE = "alfresco.node.select_MinMaxNodeIdForNodeType";
private static final String SELECT_NODES_WITH_ASPECT_IDS = "alfresco.node.select_NodesWithAspectIds";
private static final String SELECT_NODES_WITH_ASPECT_IDS_LIMITED = "alfresco.node.select_NodesWithAspectIds_Limited";
private static final String SELECT_NODES_WITH_ASPECT_IDS_LIMITED = "alfresco.node.select.select_NodesWithAspectIds_Limited";
private static final String INSERT_NODE_ASSOC = "alfresco.node.insert.insert_NodeAssoc";
private static final String UPDATE_NODE_ASSOC = "alfresco.node.update_NodeAssoc";
private static final String DELETE_NODE_ASSOC = "alfresco.node.delete_NodeAssoc";

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* Copyright (C) 2005 - 2025 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -125,6 +125,9 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
/** Number of (bytecode) instructions that will trigger the observer */
private int observerInstructionCount = 100;
/** Flag to enable or disable scope cleaning at the end of each script execution */
private boolean cleanScope = true;
/** Custom context factory */
public static AlfrescoContextFactory contextFactory;
@@ -209,6 +212,15 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
}
/**
* @param cleanScope
* true to enable scope cleaning at the end of each script execution - set to false to disable this feature.
*/
public void setCleanScope(boolean cleanScope)
{
this.cleanScope = cleanScope;
}
/**
* @see org.alfresco.service.cmr.repository.ScriptProcessor#reset()
*/
public void reset()
@@ -614,7 +626,7 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
}
finally
{
if (!secure)
if (!secure && cleanScope)
{
unsetScope(model, scope);
}

View File

@@ -81,11 +81,19 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
public static final QName DEFAULT_RENDITION_CONTENT_PROP = ContentModel.PROP_CONTENT;
public static final String DEFAULT_MIMETYPE = MimetypeMap.MIMETYPE_TEXT_PLAIN;
public static final String MIMETYPE_METADATA_EXTRACT = "alfresco-metadata-extract";
public static final String MIMETYPE_METADATA_EMBED = "alfresco-metadata-embed";
public static final String DEFAULT_ENCODING = "UTF-8";
public static final int SOURCE_HAS_NO_CONTENT = -1;
public static final int RENDITION2_DOES_NOT_EXIST = -2;
// Allowed mimetypes to support text or metadata extract transforms when thumbnails are disabled.
private static final Set<String> ALLOWED_MIMETYPES = Set.of(
MimetypeMap.MIMETYPE_TEXT_PLAIN,
MIMETYPE_METADATA_EXTRACT,
MIMETYPE_METADATA_EMBED);
private static Log logger = LogFactory.getLog(RenditionService2Impl.class);
// As Async transforms and renditions are so similar, this class provides a way to provide the code that is different.
@@ -288,7 +296,7 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
{
try
{
if (!isEnabled())
if (!isAsyncAllowed(renderOrTransform))
{
throw new RenditionService2Exception("Async transforms and renditions are disabled " +
"(system.thumbnail.generate=false or renditionService2.enabled=false).");
@@ -967,4 +975,23 @@ public class RenditionService2Impl implements RenditionService2, InitializingBea
}
}
}
// Checks if the given transform callback is a text extract transform for content indexing or metadata extract/embed.
private boolean isTextOrMetadataExtractTransform(RenderOrTransformCallBack renderOrTransform)
{
RenditionDefinition2 renditionDefinition = renderOrTransform.getRenditionDefinition();
return renditionDefinition != null && ALLOWED_MIMETYPES.contains(renditionDefinition.getTargetMimetype());
}
private boolean isAsyncAllowed(RenderOrTransformCallBack renderOrTransform)
{
// If enabled is false, all async transforms/renditions must be blocked
if (!enabled)
{
return false;
}
// If thumbnails are disabled, allow only text extract or metadata extract/embed transforms
return thumbnailsEnabled || isTextOrMetadataExtractTransform(renderOrTransform);
}
}

View File

@@ -74,7 +74,7 @@ import com.nimbusds.oauth2.sdk.id.Identifier;
import com.nimbusds.oauth2.sdk.id.Issuer;
import com.nimbusds.openid.connect.sdk.claims.PersonClaims;
import com.nimbusds.openid.connect.sdk.op.OIDCProviderMetadata;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hc.client5.http.classic.HttpClient;

View File

@@ -42,7 +42,7 @@ import jakarta.servlet.http.HttpServletResponse;
import com.nimbusds.oauth2.sdk.Scope;
import com.nimbusds.oauth2.sdk.id.Identifier;
import com.nimbusds.oauth2.sdk.id.State;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.oauth2.client.registration.ClientRegistration;

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* Copyright (C) 2005 - 2025 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -28,9 +28,6 @@ package org.alfresco.repo.transaction;
import java.lang.reflect.Method;
import java.util.List;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.transaction.TransactionService;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
import org.springframework.transaction.PlatformTransactionManager;
@@ -38,6 +35,10 @@ import org.springframework.transaction.TransactionManager;
import org.springframework.transaction.interceptor.TransactionAspectSupport;
import org.springframework.transaction.interceptor.TransactionAttribute;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.transaction.TransactionService;
/**
* @author Dmitry Velichkevich
*/
@@ -64,7 +65,7 @@ public class RetryingTransactionInterceptor extends TransactionAspectSupport imp
final TransactionAttribute txnAttr = getTransactionAttributeSource().getTransactionAttribute(
method, invocation.getThis().getClass());
final TransactionManager tm = determineTransactionManager(txnAttr);
final TransactionManager tm = determineTransactionManager(txnAttr, null);
if (tm != null && !(tm instanceof PlatformTransactionManager))
{
@@ -83,8 +84,7 @@ public class RetryingTransactionInterceptor extends TransactionAspectSupport imp
retryingTransactionHelper.setExtraExceptions(extraExceptions);
return retryingTransactionHelper.doInTransaction(
new RetryingTransactionCallback<Object>()
{
new RetryingTransactionCallback<Object>() {
public Object execute()
{
TransactionInfo txInfo = createTransactionIfNecessary(ptm, TransactionAttribute.PROPAGATION_REQUIRES_NEW == txnAttr

View File

@@ -782,25 +782,6 @@
<if test="ordered == true">order by node.id ASC</if>
</select>
<select id="select_NodesWithAspectIds_Limited" parameterType="Ids" resultMap="result_NodeRef" >
select
node.id as id,
store.protocol as protocol,
store.identifier as identifier,
node.uuid as uuid
from
alf_node_aspects na
join alf_node node on (na.node_id = node.id)
left join alf_store store on (store.id = node.store_id)
where
<![CDATA[na.node_id >= #{idOne}]]>
<if test="idTwo != null"><![CDATA[and na.node_id < #{idTwo}]]></if>
and na.qname_id in
<foreach item="item" index="i" collection="ids" open="(" separator="," close=")">#{item}</foreach>
<if test="ordered == true">order by node.id ASC</if>
<if test="maxResults != null"><![CDATA[limit #{maxResults}]]></if>
</select>
<!-- Common results for result_NodeAssoc -->
<sql id="select_NodeAssoc_Results">
select
@@ -1566,4 +1547,4 @@
</foreach>
</delete>
</mapper>
</mapper>

View File

@@ -30,4 +30,23 @@
<![CDATA[and commit_time_ms <= #{maxCommitTime}]]>
</select>
</mapper>
<select id="select_NodesWithAspectIds_Limited" parameterType="Ids" resultMap="alfresco.node.result_NodeRef" >
select
node.id as id,
store.protocol as protocol,
store.identifier as identifier,
node.uuid as uuid
from
alf_node_aspects na
join alf_node node on (na.node_id = node.id)
left join alf_store store on (store.id = node.store_id)
where
<![CDATA[na.node_id >= #{idOne}]]>
<if test="idTwo != null"><![CDATA[and na.node_id < #{idTwo}]]></if>
and na.qname_id in
<foreach item="item" index="i" collection="ids" open="(" separator="," close=")">#{item}</foreach>
<if test="ordered == true">order by node.id ASC</if>
<if test="maxResults != null"><![CDATA[limit #{maxResults}]]></if>
</select>
</mapper>

View File

@@ -30,4 +30,23 @@
<![CDATA[and commit_time_ms <= #{maxCommitTime}]]>
</select>
</mapper>
<select id="select_NodesWithAspectIds_Limited" parameterType="Ids" resultMap="alfresco.node.result_NodeRef" >
select
node.id as id,
store.protocol as protocol,
store.identifier as identifier,
node.uuid as uuid
from
alf_node_aspects na
join alf_node node on (na.node_id = node.id)
left join alf_store store on (store.id = node.store_id)
where
<![CDATA[na.node_id >= #{idOne}]]>
<if test="idTwo != null"><![CDATA[and na.node_id < #{idTwo}]]></if>
and na.qname_id in
<foreach item="item" index="i" collection="ids" open="(" separator="," close=")">#{item}</foreach>
<if test="ordered == true">order by node.id ASC</if>
<if test="maxResults != null"><![CDATA[limit #{maxResults}]]></if>
</select>
</mapper>

View File

@@ -1394,6 +1394,9 @@ scripts.execution.maxMemoryUsedInBytes=-1
# Number of instructions that will trigger the observer
scripts.execution.observerInstructionCount=5000
# Flag to control if the scope is cleaned at the end of script execution
scripts.execution.clean.scope=true
# Default value being used in POST/size-details endpoint to partition a huge folder into smaller chunks
# so that we can compute more efficiently and consolidate all sizes into a single unit.
default.async.folder.items=1000

View File

@@ -60,6 +60,9 @@
<property name="observerInstructionCount">
<value>${scripts.execution.observerInstructionCount}</value>
</property>
<property name="cleanScope">
<value>${scripts.execution.clean.scope}</value>
</property>
</bean>
<!-- base config implementation that script extension beans extend from - for auto registration

View File

@@ -26,6 +26,12 @@
package org.alfresco.repo.event2;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.alfresco.model.ContentModel.PROP_DESCRIPTION;
import java.io.Serializable;
@@ -35,6 +41,9 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.junit.Test;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.dictionary.M2Model;
@@ -53,7 +62,6 @@ import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.GUID;
import org.alfresco.util.Pair;
import org.junit.Test;
/**
* @author Iulian Aftene
@@ -66,20 +74,20 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
public void testUpdateNodeResourceContent()
{
ContentService contentService = (ContentService) applicationContext.getBean(
"contentService");
"contentService");
final NodeRef nodeRef = createNode(ContentModel.TYPE_CONTENT);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEvent(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(),
resultRepoEvent.getType());
resultRepoEvent.getType());
NodeResource resource = getNodeResource(resultRepoEvent);
assertNull("Content should have been null.", resource.getContent());
retryingTransactionHelper.doInTransaction(() -> {
ContentWriter writer = contentService.getWriter(nodeRef, ContentModel.TYPE_CONTENT,
true);
true);
writer.setMimetype(MimetypeMap.MIMETYPE_PDF);
writer.setEncoding("UTF-8");
writer.putContent("test content.");
@@ -90,7 +98,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
resultRepoEvent = getRepoEvent(2);
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(),
resultRepoEvent.getType());
resultRepoEvent.getType());
resource = getNodeResource(resultRepoEvent);
ContentInfo content = resource.getContent();
@@ -105,7 +113,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
// Update the content again
retryingTransactionHelper.doInTransaction(() -> {
ContentWriter writer = contentService.getWriter(nodeRef, ContentModel.TYPE_CONTENT,
true);
true);
writer.setMimetype(MimetypeMap.MIMETYPE_PDF);
writer.setEncoding("UTF-8");
writer.putContent("A quick brown fox jumps over the lazy dog.");
@@ -370,7 +378,6 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals("new test title", title);
assertEquals("new test title", getLocalizedProperty(resource, "cm:title", defaultLocale));
resourceBefore = getNodeResourceBefore(3);
title = getProperty(resourceBefore, "cm:title");
assertEquals("Wrong old property.", "test title", title);
@@ -490,14 +497,14 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
resource = getNodeResource(2);
assertNotNull(resource.getAspectNames());
assertTrue(resource.getAspectNames().contains("cm:versionable"));
//Check all aspects
// Check all aspects
Set<String> expectedAspects = new HashSet<>(originalAspects);
expectedAspects.add("cm:versionable");
assertEquals(expectedAspects, resource.getAspectNames());
// Check properties
assertFalse(resource.getProperties().isEmpty());
//Check resourceBefore
// Check resourceBefore
NodeResource resourceBefore = getNodeResourceBefore(2);
assertNotNull(resourceBefore.getAspectNames());
assertEquals(originalAspects, resourceBefore.getAspectNames());
@@ -544,21 +551,64 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals(aspectsBeforeRemove, resourceBefore.getAspectNames());
}
@Test
public void testRemoveAspectPropertiesTest()
{
final NodeRef nodeRef = createNode(ContentModel.TYPE_CONTENT);
NodeResource resource = getNodeResource(1);
final Set<String> originalAspects = resource.getAspectNames();
assertNotNull(originalAspects);
// Add cm:geographic aspect with properties
retryingTransactionHelper.doInTransaction(() -> {
Map<QName, Serializable> aspectProperties = new HashMap<>();
aspectProperties.put(ContentModel.PROP_LATITUDE, "12.345678");
aspectProperties.put(ContentModel.PROP_LONGITUDE, "12.345678");
nodeService.addAspect(nodeRef, ContentModel.ASPECT_GEOGRAPHIC, aspectProperties);
return null;
});
resource = getNodeResource(2);
Set<String> aspectsBeforeRemove = resource.getAspectNames();
assertNotNull(aspectsBeforeRemove);
assertTrue(aspectsBeforeRemove.contains("cm:geographic"));
// Remove cm:geographic aspect - this automatically removes the properties from the node
retryingTransactionHelper.doInTransaction(() -> {
nodeService.removeAspect(nodeRef, ContentModel.ASPECT_GEOGRAPHIC);
return null;
});
resource = getNodeResource(3);
assertEquals(originalAspects, resource.getAspectNames());
NodeResource resourceBefore = getNodeResourceBefore(3);
assertNotNull(resourceBefore.getAspectNames());
assertEquals(aspectsBeforeRemove, resourceBefore.getAspectNames());
// Resource before should contain cm:latitude and cm:longitude properties
assertNotNull(resourceBefore.getProperties());
assertTrue(resourceBefore.getProperties().containsKey("cm:latitude"));
assertTrue(resourceBefore.getProperties().containsKey("cm:longitude"));
// Resource after should NOT contain cm:latitude and cm:longitude properties
assertNotNull(resource.getProperties());
assertFalse(resource.getProperties().containsKey("cm:latitude"));
assertFalse(resource.getProperties().containsKey("cm:longitude"));
}
@Test
public void testCreateAndUpdateInTheSameTransaction()
{
retryingTransactionHelper.doInTransaction(() -> {
NodeRef node1 = nodeService.createNode(
rootNodeRef,
ContentModel.ASSOC_CHILDREN,
QName.createQName(TEST_NAMESPACE, GUID.generate()),
ContentModel.TYPE_CONTENT).getChildRef();
rootNodeRef,
ContentModel.ASSOC_CHILDREN,
QName.createQName(TEST_NAMESPACE, GUID.generate()),
ContentModel.TYPE_CONTENT).getChildRef();
nodeService.setProperty(node1, PROP_DESCRIPTION, "test description");
return null;
});
//Create and update node are done in the same transaction so one event is expected
// Create and update node are done in the same transaction so one event is expected
// to be generated
checkNumOfEvents(1);
}
@@ -593,8 +643,8 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals("Incorrect node type was found", "cm:folder", nodeResource.getNodeType());
NodeResource resourceBefore = getNodeResourceBefore(2);
assertEquals("Incorrect node type was found","cm:content", resourceBefore.getNodeType());
// assertNotNull(resourceBefore.getModifiedAt()); uncomment this when the issue will be fixed
assertEquals("Incorrect node type was found", "cm:content", resourceBefore.getNodeType());
// assertNotNull(resourceBefore.getModifiedAt()); uncomment this when the issue will be fixed
assertNull(resourceBefore.getId());
assertNull(resourceBefore.getContent());
assertNull(resourceBefore.isFile());
@@ -624,8 +674,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
m2Type.setTitle("Test type title");
// Create active model
CustomModelDefinition modelDefinition =
retryingTransactionHelper.doInTransaction(() -> customModelService.createCustomModel(model, true));
CustomModelDefinition modelDefinition = retryingTransactionHelper.doInTransaction(() -> customModelService.createCustomModel(model, true));
assertNotNull(modelDefinition);
assertEquals(modelName, modelDefinition.getName().getLocalName());
@@ -655,7 +704,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
assertEquals("cm:content node type was not found", "cm:content", nodeResource.getNodeType());
QName typeQName = QName.createQName("{" + namespacePair.getFirst()+ "}" + typeName);
QName typeQName = QName.createQName("{" + namespacePair.getFirst() + "}" + typeName);
retryingTransactionHelper.doInTransaction(() -> {
nodeService.setType(nodeRef, typeQName);
@@ -757,7 +806,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
// we should have only 1 event, node.Created
checkNumOfEvents(1);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEvent(1);
RepoEvent<EventData<NodeResource>> resultRepoEvent = getRepoEvent(1);
assertEquals("Wrong repo event type.", EventType.NODE_CREATED.getType(), resultRepoEvent.getType());
NodeResource nodeResource = getNodeResource(resultRepoEvent);
assertEquals("Incorrect node type was found", "cm:folder", nodeResource.getNodeType());
@@ -783,10 +832,10 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
retryingTransactionHelper.doInTransaction(() -> {
nodeService.moveNode(
moveFile,
folder2,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE));
moveFile,
folder2,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE));
return null;
});
@@ -801,7 +850,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals("Wrong node parent.", folder1ID, moveFileParentBeforeMove);
assertEquals("Wrong node parent.", folder2ID, moveFileParentAfterMove);
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(),
getRepoEvent(4).getType());
getRepoEvent(4).getType());
assertNull(resourceBefore.getId());
assertNull(resourceBefore.getName());
@@ -833,10 +882,10 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
retryingTransactionHelper.doInTransaction(() -> {
nodeService.moveNode(
moveFolder,
grandParent,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE));
moveFolder,
grandParent,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE));
return null;
});
@@ -845,15 +894,13 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
final String grandParentID = getNodeResource(1).getId();
final String parentID = getNodeResource(2).getId();
final String moveFolderParentBeforeMove =
getNodeResourceBefore(4).getPrimaryHierarchy().get(0);
final String moveFolderParentAfterMove =
getNodeResource(4).getPrimaryHierarchy().get(0);
final String moveFolderParentBeforeMove = getNodeResourceBefore(4).getPrimaryHierarchy().get(0);
final String moveFolderParentAfterMove = getNodeResource(4).getPrimaryHierarchy().get(0);
assertEquals("Wrong node parent.", parentID, moveFolderParentBeforeMove);
assertEquals("Wrong node parent.", grandParentID, moveFolderParentAfterMove);
assertEquals("Wrong repo event type.", EventType.NODE_UPDATED.getType(),
getRepoEventWithoutWait(4).getType());
getRepoEventWithoutWait(4).getType());
}
@Test
@@ -867,28 +914,25 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
retryingTransactionHelper.doInTransaction(() -> {
nodeService.moveNode(
grandParent,
root2,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE));
grandParent,
root2,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE));
return null;
});
checkNumOfEvents(6);
final String root2ID = getNodeResource(2).getId();
final String grandParentParentAfterMove =
getNodeResource(6).getPrimaryHierarchy().get(0);
final String grandParentParentAfterMove = getNodeResource(6).getPrimaryHierarchy().get(0);
assertEquals("Wrong node parent.", root2ID, grandParentParentAfterMove);
final String grandParentID = getNodeResource(3).getId();
final String parentIDOfTheParentFolder =
getNodeResource(4).getPrimaryHierarchy().get(0);
final String parentIDOfTheParentFolder = getNodeResource(4).getPrimaryHierarchy().get(0);
assertEquals("Wrong node parent.", grandParentID, parentIDOfTheParentFolder);
final String parentID = getNodeResource(4).getId();
final String contentParentID =
getNodeResource(5).getPrimaryHierarchy().get(0);
final String contentParentID = getNodeResource(5).getPrimaryHierarchy().get(0);
assertEquals("Wrong node parent.", parentID, contentParentID);
}
@@ -906,10 +950,10 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
retryingTransactionHelper.doInTransaction(() -> {
nodeService.moveNode(
moveFile,
folder2,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE));
moveFile,
folder2,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE));
return null;
});
@@ -918,8 +962,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
assertTrue("Wrong aspect.", resource.getAspectNames().contains("cm:versionable"));
final String folder2ID = getNodeResource(2).getId();
final String moveFileParentAfterMove =
getNodeResource(5).getPrimaryHierarchy().get(0);
final String moveFileParentAfterMove = getNodeResource(5).getPrimaryHierarchy().get(0);
assertEquals("Wrong node parent.", folder2ID, moveFileParentAfterMove);
}
@@ -935,10 +978,10 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
nodeService.setProperty(moveFile, ContentModel.PROP_NAME, "test_new_name");
nodeService.moveNode(
moveFile,
folder2,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE));
moveFile,
folder2,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE));
return null;
});
@@ -946,8 +989,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
assertEquals("test_new_name", resource.getName());
final String folder2ID = getNodeResource(2).getId();
final String moveFileParentAfterMove =
getNodeResource(4).getPrimaryHierarchy().get(0);
final String moveFileParentAfterMove = getNodeResource(4).getPrimaryHierarchy().get(0);
assertEquals("Wrong node parent.", folder2ID, moveFileParentAfterMove);
}
@@ -958,28 +1000,28 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
retryingTransactionHelper.doInTransaction(() -> {
NodeRef folder1 = nodeService.createNode(
rootNodeRef,
ContentModel.ASSOC_CHILDREN,
QName.createQName(TEST_NAMESPACE),
ContentModel.TYPE_FOLDER).getChildRef();
rootNodeRef,
ContentModel.ASSOC_CHILDREN,
QName.createQName(TEST_NAMESPACE),
ContentModel.TYPE_FOLDER).getChildRef();
NodeRef folder2 = nodeService.createNode(
rootNodeRef,
ContentModel.ASSOC_CHILDREN,
QName.createQName(TEST_NAMESPACE),
ContentModel.TYPE_FOLDER).getChildRef();
rootNodeRef,
ContentModel.ASSOC_CHILDREN,
QName.createQName(TEST_NAMESPACE),
ContentModel.TYPE_FOLDER).getChildRef();
NodeRef fileToMove = nodeService.createNode(
folder1,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE),
ContentModel.TYPE_CONTENT).getChildRef();
folder1,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE),
ContentModel.TYPE_CONTENT).getChildRef();
nodeService.moveNode(
fileToMove,
folder2,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE));
fileToMove,
folder2,
ContentModel.ASSOC_CONTAINS,
QName.createQName(TEST_NAMESPACE));
assertEquals(folder2, nodeService.getPrimaryParent(fileToMove).getParentRef());
@@ -989,8 +1031,7 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
checkNumOfEvents(3);
final String folder2ID = getNodeResource(2).getId();
final String moveFileParentAfterMove =
getNodeResource(3).getPrimaryHierarchy().get(0);
final String moveFileParentAfterMove = getNodeResource(3).getPrimaryHierarchy().get(0);
assertEquals("Wrong node parent.", folder2ID, moveFileParentAfterMove);
}
@@ -1003,7 +1044,6 @@ public class UpdateRepoEventIT extends AbstractContextAwareRepoEvent
final Set<String> originalAspects = resource.getAspectNames();
assertNotNull(originalAspects);
retryingTransactionHelper.doInTransaction(() -> {
// Add cm:geographic aspect with default value
nodeService.addAspect(nodeRef, ContentModel.ASPECT_GEOGRAPHIC, null);

View File

@@ -39,6 +39,7 @@ import org.junit.Test;
import org.alfresco.model.ContentModel;
import org.alfresco.model.RenditionModel;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.permissions.AccessDeniedException;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
@@ -776,4 +777,57 @@ public class RenditionService2IntegrationTest extends AbstractRenditionIntegrati
}
}
@Test
public void testTextExtractTransformAllowedWhenThumbnailDisabled()
{
// create a source node
NodeRef sourceNodeRef = createSource(ADMIN, "quick.pdf");
assertNotNull("Node not generated", sourceNodeRef);
String replyQueue = "org.test.queue";
String targetMimetype = MimetypeMap.MIMETYPE_TEXT_PLAIN;
TransformDefinition textExtractTransform = new TransformDefinition(
targetMimetype,
java.util.Collections.emptyMap(),
"clientData",
replyQueue,
"requestId");
renditionService2.setThumbnailsEnabled(false);
try
{
// Should NOT throw, as this is a text extract transform
AuthenticationUtil.runAs(() -> {
transactionService.getRetryingTransactionHelper().doInTransaction(() -> {
renditionService2.transform(sourceNodeRef, textExtractTransform);
return null;
});
return null;
}, ADMIN);
}
finally
{
renditionService2.setThumbnailsEnabled(true);
}
}
@Test
public void testMetadataExtractTransformAllowedWhenThumbnailDisabled()
{
// create a source node
NodeRef sourceNodeRef = createSource(ADMIN, "quick.pdf");
assertNotNull("Node not generated", sourceNodeRef);
renditionService2.setThumbnailsEnabled(false);
try
{
// Should NOT throw, as this is a metadata extract transform
extract(ADMIN, sourceNodeRef);
waitForExtract(ADMIN, sourceNodeRef, true);
}
finally
{
renditionService2.setThumbnailsEnabled(true);
}
}
}

View File

@@ -1,55 +1,54 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.util;
import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
/**
* Base test class providing Hibernate sessions.
* <p>
* By default this is auto-wired by type. If a this is going to
* result in a conlict the use auto-wire by name. This can be done by
* setting populateProtectedVariables to true in the constructor and
* then adding protected members with the same name as the bean you require.
*
* @author Derek Hulley
*/
@RunWith(SpringRunner.class)
@ContextConfiguration({"classpath:alfresco/application-context.xml"})
public abstract class BaseSpringTest extends TestCase
{
public Log logger = LogFactory.getLog(getClass().getName());
@Autowired
protected ApplicationContext applicationContext;
}
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2025 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.util;
import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.ContextCustomizerFactories;
import org.springframework.test.context.junit4.SpringRunner;
/**
* Base test class providing Hibernate sessions.
* <p>
* By default this is auto-wired by type. If a this is going to result in a conlict the use auto-wire by name. This can be done by setting populateProtectedVariables to true in the constructor and then adding protected members with the same name as the bean you require.
*
* @author Derek Hulley
*/
@RunWith(SpringRunner.class)
@ContextConfiguration({"classpath:alfresco/application-context.xml"})
@ContextCustomizerFactories(factories = {}, mergeMode = ContextCustomizerFactories.MergeMode.REPLACE_DEFAULTS)
public abstract class BaseSpringTest extends TestCase
{
public Log logger = LogFactory.getLog(getClass().getName());
@Autowired
protected ApplicationContext applicationContext;
}