Merge release/V2.3 into feature-2.3/RM2526_BlendedDispositionSchedule.

This commit is contained in:
Tom Page
2016-10-10 14:34:36 +01:00
28 changed files with 1875 additions and 468 deletions

View File

@@ -77,3 +77,10 @@ rm.patch.v22.convertToStandardFilePlan=false
# a document will be auto-versioned when its type is changed.
#
version.store.enableAutoVersionOnTypeChange=false
#
# Enable auto-version to be created when there is a difference between the document and latest record state
# to ensure that the created version record matches the current document state,
# otherwise create the version record from the version history
#
rm.enableAutoVersionOnRecordCreation=false

View File

@@ -19,3 +19,4 @@ rm.service.close-record-folder-not-folder=The record folder couldn't be closed b
rm.service.node-has-aspect=The node {0} has already the aspect {1}.
rm.service.final-version=Final
rm.service.final-version-description=The final archived record version
rm.service.enable-autoversion-on-record-creation=Auto Version on Record Creation

View File

@@ -260,6 +260,7 @@
<property name="capabilityService" ref="CapabilityService" />
<property name="recordableVersionService" ref="RecordableVersionService" />
<property name="eagerContentStoreCleaner" ref="eagerContentStoreCleaner"/>
<property name="inplaceRecordService" ref="InplaceRecordService"/>
<property name="ghostingEnabled">
<value>${rm.ghosting.enabled}</value>
</property>
@@ -761,7 +762,6 @@
<property name="filePlanService" ref="FilePlanService" />
<property name="publicAction" value="true"/>
<property name="allowParameterSubstitutions" value="true"/>
<property name="retryingTransactionHelper" ref="retryingTransactionHelper" />
</bean>
<!-- Copy To -->

View File

@@ -145,6 +145,7 @@
org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getLastCompletedDispostionAction=RM.Read.0
org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.isDisposableItemCutoff=RM.Read.0
org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.cutoffDisposableItem=RM.Read.0
org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.calculateAsOfDate=RM_ALLOW
org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.*=RM_DENY
]]>
</value>

View File

@@ -21,6 +21,7 @@
<!-- extended version service bean definition -->
<bean id="rm.versionService" abstract="true" class="org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionServiceImpl">
<property name="filePlanService" ref="FilePlanService" />
<property name="enableAutoVersionOnRecordCreation" value="${rm.enableAutoVersionOnRecordCreation}" />
<property name="authenticationUtil" ref="rm.authenticationUtil" />
<property name="relationshipService" ref="RelationshipService" />
<property name="recordService" ref="RecordService" />
@@ -28,6 +29,7 @@
<property name="cmObjectType" ref="cm.object" />
<property name="extendedPermissionService" ref="ExtendedPermissionService" />
<property name="extendedSecurityService" ref="ExtendedSecurityService" />
</bean>
<bean class="org.alfresco.util.BeanExtender">
<property name="beanName" value="versionService" />
@@ -77,6 +79,7 @@
<value>
<![CDATA[
org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService.isCurrentVersionRecorded=RM_ALLOW
org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService.isEnableAutoVersionOnRecordCreation=RM_ALLOW
org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService.isRecordedVersion=RM_ALLOW
org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService.getVersionRecord=RM_ALLOW
org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService.getRecordedVersion=RM_ALLOW

View File

@@ -598,6 +598,8 @@
<property name="permissionService" ref="permissionService"/>
<property name="extendedSecurityService" ref="extendedSecurityService"/>
<property name="transactionService" ref="transactionService"/>
<property name="contentStreamer" ref="webscript.content.streamer" />
<property name="fileFolderService" ref="FileFolderService"/>
</bean>
<!-- REST impl for GET Holds -->

View File

@@ -2,11 +2,13 @@
<shortname>Removes dynamic authorities</shortname>
<description><![CDATA[
Removes dynamic authorities from in place records created in previous verssions.<br/>
URL parameter batchsize is mandatory, and represents the number of records that are processed in one transaction.<br/>
URL parameter batchsize is mandatory, and represents the maximum number of records that can be processed in one transaction.<br/>
URL parameter maxProcessedRecords is optional, and represents the maximum number of records that will be processed in one request.<br/>
URL parameter export is optional, and if the it's value is true, will export the processed records into a csv file.<br/>
URL parameter parentNodeRef is optional, and represents the nodeRef of the folder that contains the records to be processed.<br/>
]]>
</description>
<url>/api/rm/rm-dynamicauthorities?batchsize={batchsize}&amp;maxProcessedRecords={maxProcessedRecords?}</url>
<url>/api/rm/rm-dynamicauthorities?batchsize={batchsize}&amp;maxProcessedRecords={maxProcessedRecords?}&amp;export={export?}&amp;parentNodeRef={parentNodeRef?}</url>
<format default="json">argument</format>
<authentication>admin</authentication>
<transaction allow="readonly">required</transaction>

View File

@@ -237,17 +237,11 @@ public class BroadcastDispositionActionDefinitionUpdateAction extends RMActionEx
* @param dispositionActionDef The disposition action definition node
* @param nextAction The next disposition action
*/
private void persistPeriodChanges(NodeRef dispositionActionDef, DispositionAction nextAction)
protected void persistPeriodChanges(NodeRef dispositionActionDef, DispositionAction nextAction)
{
Date newAsOfDate = null;
Period dispositionPeriod = (Period) getNodeService().getProperty(dispositionActionDef, PROP_DISPOSITION_PERIOD);
if (dispositionPeriod != null)
{
// calculate the new as of date as we have been provided a new period
Date now = new Date();
newAsOfDate = dispositionPeriod.getNextDate(now);
}
NodeRef dispositionedNode = getNodeService().getPrimaryParent(nextAction.getNodeRef()).getParentRef();
DispositionActionDefinition definition = nextAction.getDispositionActionDefinition();
Date newAsOfDate = getDispositionService().calculateAsOfDate(dispositionedNode, definition, false);
if (logger.isDebugEnabled())
{

View File

@@ -17,12 +17,10 @@ import org.alfresco.service.cmr.action.ParameterDefinition;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.model.FileNotFoundException;
import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.namespace.QName;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.util.StringUtils;
/**
@@ -35,9 +33,6 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr
{
private static Log logger = LogFactory.getLog(CopyMoveLinkFileToBaseAction.class);
/** Retrying transaction helper */
private RetryingTransactionHelper retryingTransactionHelper;
/** action parameters */
public static final String PARAM_DESTINATION_RECORD_FOLDER = "destinationRecordFolder";
public static final String PARAM_PATH = "path";
@@ -94,14 +89,6 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr
this.filePlanService = filePlanService;
}
/**
* @param retryingTransactionHelper retrying transaction helper
*/
public void setRetryingTransactionHelper(RetryingTransactionHelper retryingTransactionHelper)
{
this.retryingTransactionHelper = retryingTransactionHelper;
}
/**
* @see org.alfresco.module.org_alfresco_module_rm.action.RMActionExecuterAbstractBase#addParameterDefinitions(java.util.List)
*/
@@ -138,25 +125,7 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr
NodeRef recordFolder = (NodeRef)action.getParameterValue(PARAM_DESTINATION_RECORD_FOLDER);
if (recordFolder == null)
{
final boolean finaltargetIsUnfiledRecords = targetIsUnfiledRecords;
recordFolder = retryingTransactionHelper.doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<NodeRef>()
{
public NodeRef execute() throws Throwable
{
NodeRef result = null;
try
{
// get the reference to the record folder based on the relative path
result = createOrResolvePath(action, actionedUponNodeRef, finaltargetIsUnfiledRecords);
}
catch (DuplicateChildNodeNameException ex)
{
throw new ConcurrencyFailureException("Cannot create or resolve path.", ex);
}
return result;
}
}, false, true);
recordFolder = createOrResolvePath(action, actionedUponNodeRef, targetIsUnfiledRecords);
}
// now we have the reference to the target folder we can do some final checks to see if the action is valid
@@ -170,20 +139,17 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr
{
try
{
synchronized (this)
if (getMode() == CopyMoveLinkFileToActionMode.MOVE)
{
if (getMode() == CopyMoveLinkFileToActionMode.MOVE)
{
fileFolderService.move(actionedUponNodeRef, finalRecordFolder, null);
}
else if (getMode() == CopyMoveLinkFileToActionMode.COPY)
{
fileFolderService.copy(actionedUponNodeRef, finalRecordFolder, null);
}
else if (getMode() == CopyMoveLinkFileToActionMode.LINK)
{
getRecordService().link(actionedUponNodeRef, finalRecordFolder);
}
fileFolderService.move(actionedUponNodeRef, finalRecordFolder, null);
}
else if (getMode() == CopyMoveLinkFileToActionMode.COPY)
{
fileFolderService.copy(actionedUponNodeRef, finalRecordFolder, null);
}
else if (getMode() == CopyMoveLinkFileToActionMode.LINK)
{
getRecordService().link(actionedUponNodeRef, finalRecordFolder);
}
}
catch (FileNotFoundException fileNotFound)
@@ -193,7 +159,6 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr
return null;
}
});
}
}
@@ -283,23 +248,29 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr
* @param targetisUnfiledRecords true is the target is in unfiled records
* @return
*/
private NodeRef createOrResolvePath(Action action, NodeRef actionedUponNodeRef, boolean targetisUnfiledRecords)
private NodeRef createOrResolvePath(final Action action, final NodeRef actionedUponNodeRef, final boolean targetisUnfiledRecords)
{
// get the starting context
NodeRef context = getContext(action, actionedUponNodeRef, targetisUnfiledRecords);
final NodeRef context = getContext(action, actionedUponNodeRef, targetisUnfiledRecords);
NodeRef path = context;
// get the path we wish to resolve
String pathParameter = (String)action.getParameterValue(PARAM_PATH);
String[] pathElementsArray = StringUtils.tokenizeToStringArray(pathParameter, "/", false, true);
final String[] pathElementsArray = StringUtils.tokenizeToStringArray(pathParameter, "/", false, true);
if((pathElementsArray != null) && (pathElementsArray.length > 0))
{
// get the create parameter
Boolean createValue = (Boolean)action.getParameterValue(PARAM_CREATE_RECORD_PATH);
boolean create = createValue == null ? false : createValue.booleanValue();
final boolean create = createValue == null ? false : createValue.booleanValue();
// create or resolve the specified path
path = createOrResolvePath(action, context, actionedUponNodeRef, Arrays.asList(pathElementsArray), targetisUnfiledRecords, create, false);
path = getTransactionService().getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<NodeRef>()
{
public NodeRef execute() throws Throwable
{
return createOrResolvePath(action, context, actionedUponNodeRef, Arrays.asList(pathElementsArray), targetisUnfiledRecords, create, false);
}
}, false, true);
}
return path;
}
@@ -388,7 +359,7 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr
NodeRef child = getChild(parent, childName);
if (child == null)
{
if(targetisUnfiledRecords)
if (targetisUnfiledRecords)
{
// create unfiled folder
child = fileFolderService.create(parent, childName, RecordsManagementModel.TYPE_UNFILED_RECORD_FOLDER).getNodeRef();

View File

@@ -29,6 +29,7 @@ import org.alfresco.module.org_alfresco_module_rm.action.RMDispositionActionExec
import org.alfresco.module.org_alfresco_module_rm.capability.CapabilityService;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionActionDefinition;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionSchedule;
import org.alfresco.module.org_alfresco_module_rm.record.InplaceRecordService;
import org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService;
import org.alfresco.repo.content.cleanup.EagerContentStoreCleaner;
import org.alfresco.service.cmr.action.Action;
@@ -60,6 +61,9 @@ public class DestroyAction extends RMDispositionActionExecuterAbstractBase
/** Recordable version service */
private RecordableVersionService recordableVersionService;
/** Inplace record service */
private InplaceRecordService inplaceRecordService;
/** Indicates if ghosting is enabled or not */
private boolean ghostingEnabled = true;
@@ -87,6 +91,14 @@ public class DestroyAction extends RMDispositionActionExecuterAbstractBase
this.recordableVersionService = recordableVersionService;
}
/**
* @param inplaceRecordService inplace record service
*/
public void setInplaceRecordService(InplaceRecordService inplaceRecordService)
{
this.inplaceRecordService = inplaceRecordService;
}
/**
* @param ghostingEnabled true if ghosting is enabled, false otherwise
*/
@@ -170,6 +182,9 @@ public class DestroyAction extends RMDispositionActionExecuterAbstractBase
recordableVersionService.destroyRecordedVersion(version);
}
// Hide from inplace users to give the impression of destruction
inplaceRecordService.hideRecord(record);
// Add the ghosted aspect
getNodeService().addAspect(record, ASPECT_GHOSTED, null);
}

View File

@@ -243,4 +243,15 @@ public interface DispositionService
* @return date
*/
Date getDispositionActionDate(NodeRef dispositionSchedule, String dispositionActionName);
/**
* Compute the "disposition as of" date (if necessary) for a disposition action and a node.
*
* @param nodeRef The node which the schedule applies to.
* @param dispositionActionDefinition The definition of the disposition action.
* @param allowContextFromAsOf true if the context date is allowed to be obtained from the disposition "as of" property.
* @return The new "disposition as of" date.
*/
Date calculateAsOfDate(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition,
boolean allowContextFromAsOf);
}

View File

@@ -54,8 +54,8 @@ import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.namespace.RegexQNamePattern;
import org.alfresco.util.ParameterCheck;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Disposition service implementation.
@@ -69,7 +69,7 @@ public class DispositionServiceImpl extends ServiceBaseImpl
RecordsManagementPolicies.OnFileRecord
{
/** Logger */
private static Log logger = LogFactory.getLog(DispositionServiceImpl.class);
private static final Logger LOGGER = LoggerFactory.getLogger(DispositionServiceImpl.class);
/** Transaction mode for setting next action */
public enum WriteMode {READ_ONLY, DATE_ONLY, DATE_AND_NAME};
@@ -208,7 +208,7 @@ public class DispositionServiceImpl extends ServiceBaseImpl
DispositionActionDefinition nextDispositionActionDefinition = dispositionActionDefinitions.get(0);
// initialise the details of the next disposition action
initialiseDispositionAction(nodeRef, nextDispositionActionDefinition);
initialiseDispositionAction(nodeRef, nextDispositionActionDefinition, true);
}
}
}
@@ -400,12 +400,10 @@ public class DispositionServiceImpl extends ServiceBaseImpl
{
// TODO in the future we should be able to support disposition schedule reuse, but for now just warn that
// only the first disposition schedule will be considered
if (logger.isWarnEnabled())
{
logger.warn("Disposition schedule has more than one associated records management container. " +
"This is not currently supported so only the first container will be considered. " +
"(dispositionScheduleNodeRef=" + dispositionSchedule.getNodeRef().toString() + ")");
}
LOGGER.warn("Disposition schedule has more than one associated records management container. "
+ "This is not currently supported so only the first container will be considered. "
+ "(dispositionScheduleNodeRef={})",
dispositionSchedule.getNodeRef().toString());
}
// Get the container reference
@@ -631,42 +629,16 @@ public class DispositionServiceImpl extends ServiceBaseImpl
* Initialises the details of the next disposition action based on the details of a disposition
* action definition.
*
* @param nodeRef node reference
* @param dispositionActionDefinition disposition action definition
* @param nodeRef node reference
* @param dispositionActionDefinition disposition action definition
* @param allowContextFromAsOf true if the context date is allowed to be obtained from the disposition "as of" property.
*/
private void initialiseDispositionAction(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition)
private void initialiseDispositionAction(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition, boolean allowContextFromAsOf)
{
// Create the properties
Map<QName, Serializable> props = new HashMap<QName, Serializable>(10);
// Calculate the asOf date
Date asOfDate = null;
Period period = dispositionActionDefinition.getPeriod();
if (period != null)
{
Date contextDate = null;
// Get the period properties value
QName periodProperty = dispositionActionDefinition.getPeriodProperty();
if (periodProperty != null)
{
// doesn't matter if the period property isn't set ... the asOfDate will get updated later
// when the value of the period property is set
contextDate = (Date)this.nodeService.getProperty(nodeRef, periodProperty);
}
else
{
// for now use 'NOW' as the default context date
// TODO set the default period property ... cut off date or last disposition date depending on context
contextDate = new Date();
}
// Calculate the as of date
if (contextDate != null)
{
asOfDate = period.getNextDate(contextDate);
}
}
Date asOfDate = calculateAsOfDate(nodeRef, dispositionActionDefinition, allowContextFromAsOf);
// Set the property values
props.put(PROP_DISPOSITION_ACTION_ID, dispositionActionDefinition.getId());
@@ -694,6 +666,50 @@ public class DispositionServiceImpl extends ServiceBaseImpl
}
}
/**
* Compute the "disposition as of" date (if necessary) for a disposition action and a node.
*
* @param nodeRef The node which the schedule applies to.
* @param dispositionActionDefinition The definition of the disposition action.
* @param allowContextFromAsOf true if the context date is allowed to be obtained from the disposition "as of" property.
* @return The new "disposition as of" date.
*/
@Override
public Date calculateAsOfDate(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition,
boolean allowContextFromAsOf)
{
// Calculate the asOf date
Date asOfDate = null;
Period period = dispositionActionDefinition.getPeriod();
if (period != null)
{
Date contextDate = null;
// Get the period properties value
QName periodProperty = dispositionActionDefinition.getPeriodProperty();
if (periodProperty != null && (allowContextFromAsOf
|| !RecordsManagementModel.PROP_DISPOSITION_AS_OF.equals(periodProperty)))
{
// doesn't matter if the period property isn't set ... the asOfDate will get updated later
// when the value of the period property is set
contextDate = (Date)this.nodeService.getProperty(nodeRef, periodProperty);
}
else
{
// for now use 'NOW' as the default context date
// TODO set the default period property ... cut off date or last disposition date depending on context
contextDate = new Date();
}
// Calculate the as of date
if (contextDate != null)
{
asOfDate = period.getNextDate(contextDate);
}
}
return asOfDate;
}
/**
* @see org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService#isNextDispositionActionEligible(org.alfresco.service.cmr.repository.NodeRef)
*/
@@ -919,63 +935,7 @@ public class DispositionServiceImpl extends ServiceBaseImpl
nodeService.addAspect(nodeRef, ASPECT_DISPOSITION_LIFECYCLE, null);
}
// Create the properties
Map<QName, Serializable> props = new HashMap<QName, Serializable>(10);
// Calculate the asOf date
Date asOfDate = null;
Period period = nextDispositionActionDefinition.getPeriod();
if (period != null)
{
Date contextDate = null;
// Get the period properties value
QName periodProperty = nextDispositionActionDefinition.getPeriodProperty();
if (periodProperty != null &&
!RecordsManagementModel.PROP_DISPOSITION_AS_OF.equals(periodProperty))
{
// doesn't matter if the period property isn't set ... the asOfDate will get updated later
// when the value of the period property is set
contextDate = (Date) nodeService.getProperty(nodeRef, periodProperty);
}
else
{
// for now use 'NOW' as the default context date
// TODO set the default period property ... cut off date or last disposition date depending on context
contextDate = new Date();
}
// Calculate the as of date
if (contextDate != null)
{
asOfDate = period.getNextDate(contextDate);
}
}
// Set the property values
props.put(PROP_DISPOSITION_ACTION_ID, nextDispositionActionDefinition.getId());
props.put(PROP_DISPOSITION_ACTION, nextDispositionActionDefinition.getName());
if (asOfDate != null)
{
props.put(PROP_DISPOSITION_AS_OF, asOfDate);
}
// Create a new disposition action object
NodeRef dispositionActionNodeRef = nodeService.createNode(
nodeRef,
ASSOC_NEXT_DISPOSITION_ACTION,
ASSOC_NEXT_DISPOSITION_ACTION,
TYPE_DISPOSITION_ACTION,
props).getChildRef();
DispositionAction da = new DispositionActionImpl(serviceRegistry, dispositionActionNodeRef);
// Create the events
List<RecordsManagementEvent> events = nextDispositionActionDefinition.getEvents();
for (RecordsManagementEvent event : events)
{
// For every event create an entry on the action
da.addEventCompletionDetails(event);
}
initialiseDispositionAction(nodeRef, nextDispositionActionDefinition, false);
}
}

View File

@@ -1038,6 +1038,8 @@ public class RecordServiceImpl extends BaseBehaviourBean
{
NodeRef versionRecord = null;
recordableVersionService.createSnapshotVersion(nodeRef);
// wire record up to previous record
VersionHistory versionHistory = versionService.getVersionHistory(nodeRef);
if (versionHistory != null)

View File

@@ -34,25 +34,24 @@ public interface RecordableVersionService
* <p>
* Returns false if not versionable or no version.
*
* @param nodeRef node reference
* @return boolean true if latest version recorded, false otherwise
* @param nodeRef node reference
* @return boolean true if latest version recorded, false otherwise
*/
boolean isCurrentVersionRecorded(NodeRef nodeRef);
/**
* Indicates whether a version is recorded or not.
*
* @param version version
* @return boolean true if recorded version, false otherwise
* @param version version
* @return boolean true if recorded version, false otherwise
*/
boolean isRecordedVersion(Version version);
/**
* If the version is a recorded version, gets the related version
* record.
* If the version is a recorded version, gets the related version record.
*
* @param version version
* @return NodeRef node reference of version record
* @param version version
* @return NodeRef node reference of version record
*/
NodeRef getVersionRecord(Version version);
@@ -60,37 +59,61 @@ public interface RecordableVersionService
* Gets the version that relates to the version record
*
* @param versionRecord version record node reference
* @return Version version or null if not found
* @return Version version or null if not found
*/
Version getRecordedVersion(NodeRef record);
/**
* Creates a record from the latest version, marking it as recorded.
* Creates a record from the latest frozen version, marking it as recorded.
* <p>
* Does not create a record if the node is not versionable or the latest
* version is already recorded.
* Does not create a record if the node is not versionable or the latest version is already recorded.
*
* @param nodeRef node reference
* @return NodeRef node reference to the created record.
* @param nodeRef node reference
* @return NodeRef node reference to the created record.
*/
NodeRef createRecordFromLatestVersion(NodeRef filePlan, NodeRef nodeRef);
/**
* Creates a record from the latest version, marking it as recorded.
* <p>
* Does not create a record if the node is not versionable or the latest version is already recorded.
*
* @param nodeRef parent node reference
* @param nodeRef node reference
* @param autoVersion true, create new record version from latest version, false creates a record from the latest frozen version
* @return NodeRef node reference to the created record.
*
*/
NodeRef createRecordFromLatestVersion(NodeRef filePlan, NodeRef nodeRef, boolean autoVersion);
/**
* Indicates whether a record version is destroyed or not.
*
* @param version version
* @return boolean true if destroyed, false otherwise
* @param version version
* @return boolean true if destroyed, false otherwise
*/
boolean isRecordedVersionDestroyed(Version version);
/**
* Marks a recorded version as destroyed.
* <p>
* Note this method does not destroy the associated record, instead it marks the
* version as destroyed.
* Note this method does not destroy the associated record, instead it marks the version as destroyed.
*
* @param version version
* @param version version
*/
void destroyRecordedVersion(Version version);
/**
* Flag that indicate to create new version on record creation if current node is modified
*
* @return boolean true to auto-version on record creation, false to use latest versioned version
*/
boolean isEnableAutoVersionOnRecordCreation();
/**
* Create a snapshot version of current node
*
* @param nodeRef node reference
*/
void createSnapshotVersion(NodeRef nodeRef);
}

View File

@@ -24,6 +24,7 @@ import static org.codehaus.plexus.util.StringUtils.isNotBlank;
import java.io.Serializable;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
@@ -57,6 +58,7 @@ import org.alfresco.util.PropertyMap;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.extensions.surf.util.I18NUtil;
/**
* Recordable version service implementation
@@ -79,6 +81,12 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
protected static final String PROP_VERSION_RECORD = "RecordVersion";
protected static final String PROP_RECORDED_VERSION_DESTROYED = "RecordedVersionDestroyed";
/** I18N */
private static final String AUTO_VERSION_ON_RECORD_CREATION = "rm.service.enable-autoversion-on-record-creation";
/** flag that enable auto-version on record creation */
private boolean isEnableAutoVersionOnRecordCreation = false;
/** version aspect property names */
private static final String[] VERSION_PROPERTY_NAMES = new String[]
{
@@ -121,7 +129,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
private ExtendedSecurityService extendedSecurityService;
/**
* @param filePlanService file plan service
* @param filePlanService file plan service
*/
public void setFilePlanService(FilePlanService filePlanService)
{
@@ -129,7 +137,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
}
/**
* @param authenticationUtil authentication util helper
* @param authenticationUtil authentication util helper
*/
public void setAuthenticationUtil(AuthenticationUtil authenticationUtil)
{
@@ -137,7 +145,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
}
/**
* @param relationshipService relationship service
* @param relationshipService relationship service
*/
public void setRelationshipService(RelationshipService relationshipService)
{
@@ -153,7 +161,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
}
/**
* @param modelSecurityService model security service
* @param modelSecurityService model security service
*/
public void setModelSecurityService(ModelSecurityService modelSecurityService)
{
@@ -177,7 +185,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
}
/**
* @param extendedSecurityService extended security service
* @param extendedSecurityService extended security service
*/
public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService)
{
@@ -185,7 +193,21 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
}
/**
* @see org.alfresco.repo.version.Version2ServiceImpl#createVersion(org.alfresco.service.cmr.repository.NodeRef, java.util.Map, int)
* @param isEnableAutoVersionOnRecordCreation
*/
public void setEnableAutoVersionOnRecordCreation(boolean isEnableAutoVersionOnRecordCreation)
{
this.isEnableAutoVersionOnRecordCreation = isEnableAutoVersionOnRecordCreation;
}
public boolean isEnableAutoVersionOnRecordCreation()
{
return isEnableAutoVersionOnRecordCreation;
}
/**
* @see org.alfresco.repo.version.Version2ServiceImpl#createVersion(org.alfresco.service.cmr.repository.NodeRef,
* java.util.Map, int)
*/
@Override
protected Version createVersion(NodeRef nodeRef, Map<String, Serializable> origVersionProperties, int versionNumber) throws ReservedVersionNameException
@@ -205,7 +227,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
VersionType versionType = null;
if (origVersionProperties != null)
{
versionType = (VersionType)origVersionProperties.get(VersionModel.PROP_VERSION_TYPE);
versionType = (VersionType) origVersionProperties.get(VersionModel.PROP_VERSION_TYPE);
}
// determine whether this is a recorded version or not
@@ -232,12 +254,12 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
}
/**
* @param nodeRef node reference
* @return {@link NodeRef} associated file plan, default if none
* @param nodeRef node reference
* @return {@link NodeRef} associated file plan, default if none
*/
private NodeRef getFilePlan(NodeRef nodeRef)
{
NodeRef filePlan = (NodeRef)nodeService.getProperty(nodeRef, PROP_FILE_PLAN);
NodeRef filePlan = (NodeRef) nodeService.getProperty(nodeRef, PROP_FILE_PLAN);
if (filePlan == null)
{
filePlan = getFilePlan();
@@ -246,7 +268,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
}
/**
* @return {@link NodeRef} default file plan, exception if none
* @return {@link NodeRef} default file plan, exception if none
*/
private NodeRef getFilePlan()
{
@@ -276,7 +298,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
boolean result = false;
if (nodeService.hasAspect(nodeRef, RecordableVersionModel.ASPECT_VERSIONABLE))
{
String policyString = (String)nodeService.getProperty(nodeRef, PROP_RECORDABLE_VERSION_POLICY);
String policyString = (String) nodeService.getProperty(nodeRef, PROP_RECORDABLE_VERSION_POLICY);
if (policyString != null)
{
RecordableVersionPolicy policy = RecordableVersionPolicy.valueOf(policyString.toUpperCase());
@@ -306,7 +328,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
NodeRef version = null;
if (versionProperties.containsKey(KEY_RECORDABLE_VERSION) &&
((Boolean)versionProperties.get(KEY_RECORDABLE_VERSION)).booleanValue())
((Boolean)versionProperties.get(KEY_RECORDABLE_VERSION)).booleanValue())
{
// create a recorded version
version = createNewRecordedVersion(sourceTypeRef, versionHistoryRef, standardVersionProperties, versionProperties, versionNumber, nodeDetails);
@@ -323,13 +345,13 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
/**
* Creates a new recorded version
*
* @param sourceTypeRef source type name
* @param versionHistoryRef version history reference
* @param standardVersionProperties standard version properties
* @param versionProperties version properties
* @param versionNumber version number
* @param nodeDetails policy scope
* @return {@link NodeRef} record version
* @param sourceTypeRef source type name
* @param versionHistoryRef version history reference
* @param standardVersionProperties standard version properties
* @param versionProperties version properties
* @param versionNumber version number
* @param nodeDetails policy scope
* @return {@link NodeRef} record version
*/
protected NodeRef createNewRecordedVersion(QName sourceTypeRef,
NodeRef versionHistoryRef,
@@ -356,14 +378,14 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
try
{
// get the destination file plan
final NodeRef filePlan = (NodeRef)versionProperties.get(KEY_FILE_PLAN);
final NodeRef filePlan = (NodeRef) versionProperties.get(KEY_FILE_PLAN);
if (filePlan == null)
{
throw new AlfrescoRuntimeException("Can't create a new recorded version, because no file plan has been specified in the version properties.");
}
// create a copy of the source node and place in the file plan
final NodeRef nodeRef = (NodeRef)standardVersionProperties.get(Version2Model.PROP_QNAME_FROZEN_NODE_REF);
final NodeRef nodeRef = (NodeRef) standardVersionProperties.get(Version2Model.PROP_QNAME_FROZEN_NODE_REF);
cmObjectType.disableCopy();
try
@@ -400,7 +422,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
nodeService.addAspect(versionNodeRef, Version2Model.ASPECT_VERSION, standardVersionProperties);
// add the recordedVersion aspect with link to record
nodeService.addAspect(versionNodeRef, ASPECT_RECORDED_VERSION, Collections.singletonMap(PROP_RECORD_NODE_REF, (Serializable)record));
nodeService.addAspect(versionNodeRef, ASPECT_RECORDED_VERSION, Collections.singletonMap(PROP_RECORD_NODE_REF, (Serializable) record));
// freeze auditable aspect information
freezeAuditableAspect(nodeRef, versionNodeRef);
@@ -441,8 +463,8 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
/**
* Helper method to link the record to the previous version record
*
* @param nodeRef noderef source node reference
* @param record record record node reference
* @param nodeRef noderef source node reference
* @param record record record node reference
*/
private void linkToPreviousVersionRecord(final NodeRef nodeRef, final NodeRef record)
{
@@ -465,8 +487,8 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
/**
* Helper to get the latest version record for a given document (ie non-record)
*
* @param nodeRef node reference
* @return NodeRef latest version record, null otherwise
* @param nodeRef node reference
* @return NodeRef latest version record, null otherwise
*/
private NodeRef getLatestVersionRecord(NodeRef nodeRef)
{
@@ -480,7 +502,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
for (Version previousVersion : previousVersions)
{
// look for the associated record
final NodeRef previousRecord = (NodeRef)previousVersion.getVersionProperties().get(PROP_VERSION_RECORD);
final NodeRef previousRecord = (NodeRef) previousVersion.getVersionProperties().get(PROP_VERSION_RECORD);
if (previousRecord != null &&
nodeService.exists(previousRecord))
{
@@ -545,17 +567,18 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
@Override
protected Version getVersion(NodeRef versionRef)
{
Version version = super.getVersion(versionRef);
// place the version record reference in the version properties
NodeRef record = (NodeRef)dbNodeService.getProperty(versionRef, PROP_RECORD_NODE_REF);
NodeRef record = (NodeRef) dbNodeService.getProperty(versionRef, PROP_RECORD_NODE_REF);
if (record != null)
{
version.getVersionProperties().put(PROP_VERSION_RECORD, record);
}
// place information about the destruction of the version record in the properties
Boolean destroyed = (Boolean)dbNodeService.getProperty(versionRef, PROP_DESTROYED);
Boolean destroyed = (Boolean) dbNodeService.getProperty(versionRef, PROP_DESTROYED);
if (destroyed == null) { destroyed = Boolean.FALSE; }
version.getVersionProperties().put(PROP_RECORDED_VERSION_DESTROYED, destroyed);
@@ -614,7 +637,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
if (dbNodeService.hasAspect(versionNodeRef, RecordableVersionModel.ASPECT_RECORDED_VERSION))
{
// get the version record
result = (NodeRef)dbNodeService.getProperty(versionNodeRef, RecordableVersionModel.PROP_RECORD_NODE_REF);
result = (NodeRef) dbNodeService.getProperty(versionNodeRef, RecordableVersionModel.PROP_RECORD_NODE_REF);
// check that the version record exists
if (result != null &&
@@ -631,8 +654,8 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
/**
* Create Version Store Ref
*
* @param store ref
* @return store ref for version store
* @param store ref
* @return store ref for version store
*/
public StoreRef convertStoreRef(StoreRef storeRef)
{
@@ -643,14 +666,25 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
* Convert the incomming node ref (with the version store protocol specified)
* to the internal representation with the workspace protocol.
*
* @param nodeRef the incomming verison protocol node reference
* @return the internal version node reference
* @param nodeRef the incomming verison protocol node reference
* @return the internal version node reference
*/
public NodeRef convertNodeRef(NodeRef nodeRef)
{
return new NodeRef(convertStoreRef(nodeRef.getStoreRef()), nodeRef.getId());
}
/**
* @see org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService#createRecordFromLatestVersion(org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.cmr.repository.NodeRef, autoVersion)
*/
@Override
public NodeRef createRecordFromLatestVersion(final NodeRef filePlan, final NodeRef nodeRef, final boolean isEnableAutoVersionOnRecordCreation)
{
setEnableAutoVersionOnRecordCreation(isEnableAutoVersionOnRecordCreation);
return createRecordFromLatestVersion(filePlan, nodeRef);
}
/**
* @see org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService#createRecordFromLatestVersion(org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.cmr.repository.NodeRef)
*/
@@ -665,6 +699,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
// check for versionable aspect
if (nodeService.hasAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE))
{
createSnapshotVersion(nodeRef);
// get the latest version
final Version currentVersion = getCurrentVersion(nodeRef);
@@ -732,7 +767,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
QName sourceTypeRef = getVersionType(currentVersion);
// patch-up owner information, which needs to be frozen for recorded versions
String owner = (String)nodeService.getProperty(currentVersion.getFrozenStateNodeRef(), ContentModel.PROP_OWNER);
String owner = (String) nodeService.getProperty(currentVersion.getFrozenStateNodeRef(), ContentModel.PROP_OWNER);
if (owner != null)
{
versionProperties.put(PROP_FROZEN_OWNER, owner);
@@ -760,7 +795,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
nodeService.addAspect(versionNodeRef, Version2Model.ASPECT_VERSION, versionProperties);
// add the recordedVersion aspect with link to record
nodeService.addAspect(versionNodeRef, ASPECT_RECORDED_VERSION, Collections.singletonMap(PROP_RECORD_NODE_REF, (Serializable)record));
nodeService.addAspect(versionNodeRef, ASPECT_RECORDED_VERSION, Collections.singletonMap(PROP_RECORD_NODE_REF, (Serializable) record));
}
}
@@ -768,7 +803,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
}
/**
* @see org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService#isRecordedVersionDestroyed(org.alfresco.service.cmr.version.Version)
* @see org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService#isRecordedVersionDestroyed(org.alfresco.service.cmr.version.Version)
*/
@Override
public boolean isRecordedVersionDestroyed(Version version)
@@ -779,7 +814,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
NodeRef versionNodeRef = getVersionNodeRef(version);
// get the destroyed property value
Boolean isDestroyed = (Boolean)dbNodeService.getProperty(versionNodeRef, PROP_DESTROYED);
Boolean isDestroyed = (Boolean) dbNodeService.getProperty(versionNodeRef, PROP_DESTROYED);
if (isDestroyed != null)
{
result = isDestroyed.booleanValue();
@@ -812,8 +847,8 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
* Helper method to get the version number of a given version by inspecting the
* name of the parent association.
*
* @param version version
* @return int version number
* @param version version
* @return int version number
*/
private int getVersionNumber(Version version)
{
@@ -827,7 +862,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
/**
* Helper method to get all the version aspect properties from an existing version
*
* @param version version
* @param version version
* @return Map<QName, Serializable> property values
*/
private Map<QName, Serializable> getVersionAspectProperties(Version version)
@@ -862,11 +897,52 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl
/**
* Helper method to get the internal node reference of a version
*
* @param version version
* @return NodeRef internal node reference to version
* @param version version
* @return NodeRef internal node reference to version
*/
private NodeRef getVersionNodeRef(Version version)
{
return convertNodeRef(version.getFrozenStateNodeRef());
}
/**
* Check if current version of the node is modified compared with versioned version
*
* @param nodeRef internal node reference
* @return boolean true if nodeRef is modified, otherwise false
*/
public boolean isCurrentVersionDirty(NodeRef nodeRef)
{
if (!nodeService.hasAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE)) { return false; }
// get the latest version
Version currentVersion = getCurrentVersion(nodeRef);
Date modificationDate = (Date) nodeService.getProperty(nodeRef, ContentModel.PROP_MODIFIED);
if (currentVersion == null) { return true; }
// grab the frozen state
NodeRef currentFrozenState = currentVersion.getFrozenStateNodeRef();
Date frozenModificationDate = (Date) nodeService.getProperty(currentFrozenState, ContentModel.PROP_MODIFIED);
boolean versionStoreOutdated = ((frozenModificationDate != null) && (modificationDate.getTime() > frozenModificationDate.getTime()));
return versionStoreOutdated;
}
/**
* @see RecordableVersionService#createSnapshotVersion(NodeRef)
*/
public void createSnapshotVersion(NodeRef nodeRef)
{
boolean autoVersion = isEnableAutoVersionOnRecordCreation();
// if the flag autoversion on record creation set, create new version on dirty nodes
if (autoVersion && isCurrentVersionDirty(nodeRef))
{
Map<String, Serializable> autoVersionProperties = new HashMap<String, Serializable>(2);
autoVersionProperties.put(VersionModel.PROP_VERSION_TYPE, VersionType.MINOR);
autoVersionProperties.put(VersionModel.PROP_DESCRIPTION, I18NUtil.getMessage(AUTO_VERSION_ON_RECORD_CREATION));
createVersion(nodeRef, autoVersionProperties);
}
}
}

View File

@@ -18,11 +18,21 @@
*/
package org.alfresco.repo.web.scripts.roles;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletResponse;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
@@ -33,19 +43,26 @@ import org.alfresco.repo.domain.node.NodeDAO;
import org.alfresco.repo.domain.patch.PatchDAO;
import org.alfresco.repo.domain.qname.QNameDAO;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.repo.web.scripts.content.ContentStreamer;
import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.model.FileInfo;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.Pair;
import org.alfresco.util.TempFileProvider;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.extensions.webscripts.AbstractWebScript;
import org.springframework.extensions.webscripts.Cache;
import org.springframework.extensions.webscripts.DeclarativeWebScript;
import org.springframework.extensions.webscripts.Format;
import org.springframework.extensions.webscripts.Status;
import org.springframework.extensions.webscripts.WebScriptException;
import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.extensions.webscripts.WebScriptResponse;
/**
* Webscript used for removing dynamic authorities from the records.
@@ -54,7 +71,7 @@ import org.springframework.extensions.webscripts.WebScriptRequest;
* @since 2.3.0.7
*/
@SuppressWarnings("deprecation")
public class DynamicAuthoritiesGet extends DeclarativeWebScript implements RecordsManagementModel
public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel
{
private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0.";
private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN";
@@ -63,14 +80,16 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor
private static final String MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE = "Processing record {0} - BEGIN";
private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid.";
private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory";
private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist.";
private static final String SUCCESS_STATUS = "success";
private static final String FAILED_STATUS = "failed";
/**
* The logger
*/
private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class);
private static final String BATCH_SIZE = "batchsize";
private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords";
private static final String PARAM_EXPORT = "export";
private static final String PARAM_PARENT_NODE_REF = "parentNodeRef";
private static final String MODEL_STATUS = "responsestatus";
private static final String MODEL_MESSAGE = "message";
private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records.";
@@ -86,50 +105,60 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor
private PermissionService permissionService;
private ExtendedSecurityService extendedSecurityService;
private TransactionService transactionService;
/** Content Streamer */
protected ContentStreamer contentStreamer;
private FileFolderService fileFolderService;
/** service setters */
public void setPatchDAO(PatchDAO patchDAO) { this.patchDAO = patchDAO; }
public void setNodeDAO(NodeDAO nodeDAO) { this.nodeDAO = nodeDAO; }
public void setQnameDAO(QNameDAO qnameDAO) { this.qnameDAO = qnameDAO; }
public void setNodeService(NodeService nodeService) { this.nodeService = nodeService; }
public void setPermissionService(PermissionService permissionService) { this.permissionService = permissionService; }
public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) { this.extendedSecurityService = extendedSecurityService; }
public void setTransactionService(TransactionService transactionService) { this.transactionService = transactionService; }
public void setPatchDAO(PatchDAO patchDAO)
{
this.patchDAO = patchDAO;
}
@Override
protected Map<String, Object> executeImpl(WebScriptRequest req, Status status, Cache cache)
public void setNodeDAO(NodeDAO nodeDAO)
{
this.nodeDAO = nodeDAO;
}
public void setQnameDAO(QNameDAO qnameDAO)
{
this.qnameDAO = qnameDAO;
}
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
public void setPermissionService(PermissionService permissionService)
{
this.permissionService = permissionService;
}
public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService)
{
this.extendedSecurityService = extendedSecurityService;
}
public void setTransactionService(TransactionService transactionService)
{
this.transactionService = transactionService;
}
public void setContentStreamer(ContentStreamer contentStreamer)
{
this.contentStreamer = contentStreamer;
}
public void setFileFolderService(FileFolderService fileFolderService)
{
this.fileFolderService = fileFolderService;
}
protected Map<String, Object> buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException
{
Map<String, Object> model = new HashMap<String, Object>();
String batchSizeStr = req.getParameter(BATCH_SIZE);
String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS);
Long size = 0L;
if (StringUtils.isBlank(batchSizeStr))
{
model.put(MODEL_STATUS, FAILED_STATUS);
model.put(MODEL_MESSAGE, MESSAGE_BATCHSIZE_IS_MANDATORY);
logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY);
return model;
}
try
{
size = Long.parseLong(batchSizeStr);
if(size <= 0)
{
model.put(MODEL_STATUS, FAILED_STATUS);
model.put(MODEL_MESSAGE, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
return model;
}
}
catch(NumberFormatException ex)
{
model.put(MODEL_STATUS, FAILED_STATUS);
model.put(MODEL_MESSAGE, MESSAGE_BATCHSIZE_IS_INVALID);
logger.info(MESSAGE_BATCHSIZE_IS_INVALID);
return model;
}
final Long batchSize = size;
final Long batchSize = getBatchSizeParameter(req);
// get the max node id and the extended security aspect
Long maxNodeId = patchDAO.getMaxAdmNodeID();
final Pair<Long, QName> recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY);
@@ -141,6 +170,201 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor
return model;
}
Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize);
boolean attach = getExportParameter(req);
File file = TempFileProvider.createTempFile("processedNodes_", ".csv");
FileWriter writer = new FileWriter(file);
BufferedWriter out = new BufferedWriter(writer);
List<NodeRef> processedNodes = new ArrayList<NodeRef>();
try
{
NodeRef parentNodeRef = getParentNodeRefParameter(req);
if (parentNodeRef != null)
{
processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair,
totalNumberOfRecordsToProcess.intValue(), out, attach);
}
else
{
processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess,
out, attach);
}
}
finally
{
out.close();
}
int processedNodesSize = processedNodes.size();
String message = "";
if (totalNumberOfRecordsToProcess == 0
|| (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess))
{
message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize);
}
if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize)
{
message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess);
}
model.put(MODEL_STATUS, SUCCESS_STATUS);
model.put(MODEL_MESSAGE, message);
logger.info(message);
if (attach)
{
try
{
String fileName = file.getName();
contentStreamer.streamContent(req, res, file, null, attach, fileName, model);
model = null;
}
finally
{
if (file != null)
{
file.delete();
}
}
}
return model;
}
/**
* Get export parameter from the request
*
* @param req
* @return
*/
protected boolean getExportParameter(WebScriptRequest req)
{
boolean attach = false;
String export = req.getParameter(PARAM_EXPORT);
if (export != null && Boolean.parseBoolean(export))
{
attach = true;
}
return attach;
}
/*
* (non-Javadoc)
* @see org.alfresco.repo.web.scripts.content.StreamContent#execute(org.springframework.extensions.webscripts.
* WebScriptRequest, org.springframework.extensions.webscripts.WebScriptResponse)
*/
@Override
public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException
{
// retrieve requested format
String format = req.getFormat();
try
{
String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format);
if (mimetype == null)
{
throw new WebScriptException("Web Script format '" + format + "' is not registered");
}
// construct model for script / template
Status status = new Status();
Cache cache = new Cache(getDescription().getRequiredCache());
Map<String, Object> model = buildModel(req, res);
if (model == null) { return; }
model.put("status", status);
model.put("cache", cache);
Map<String, Object> templateModel = createTemplateParameters(req, res, model);
// render output
int statusCode = status.getCode();
if (statusCode != HttpServletResponse.SC_OK && !req.forceSuccessStatus())
{
if (logger.isDebugEnabled())
{
logger.debug("Force success status header in response: " + req.forceSuccessStatus());
logger.debug("Setting status " + statusCode);
}
res.setStatus(statusCode);
}
// apply location
String location = status.getLocation();
if (location != null && location.length() > 0)
{
if (logger.isDebugEnabled()) logger.debug("Setting location to " + location);
res.setHeader(WebScriptResponse.HEADER_LOCATION, location);
}
// apply cache
res.setCache(cache);
String callback = null;
if (getContainer().allowCallbacks())
{
callback = req.getJSONCallback();
}
if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null)
{
if (logger.isDebugEnabled()) logger.debug("Rendering JSON callback response: content type="
+ Format.JAVASCRIPT.mimetype() + ", status=" + statusCode + ", callback=" + callback);
// NOTE: special case for wrapping JSON results in a javascript function callback
res.setContentType(Format.JAVASCRIPT.mimetype() + ";charset=UTF-8");
res.getWriter().write((callback + "("));
}
else
{
if (logger.isDebugEnabled())
logger.debug("Rendering response: content type=" + mimetype + ", status=" + statusCode);
res.setContentType(mimetype + ";charset=UTF-8");
}
// render response according to requested format
renderFormatTemplate(format, templateModel, res.getWriter());
if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null)
{
// NOTE: special case for wrapping JSON results in a javascript function callback
res.getWriter().write(")");
}
}
catch (Throwable e)
{
if (logger.isDebugEnabled())
{
StringWriter stack = new StringWriter();
e.printStackTrace(new PrintWriter(stack));
logger.debug("Caught exception; decorating with appropriate status template : " + stack.toString());
}
throw createStatusException(e, req, res);
}
}
protected void renderFormatTemplate(String format, Map<String, Object> model, Writer writer)
{
format = (format == null) ? "" : format;
String templatePath = getDescription().getId() + "." + format;
if (logger.isDebugEnabled()) logger.debug("Rendering template '" + templatePath + "'");
renderTemplate(templatePath, model, writer);
}
/**
* Obtain maximum of the records to be processed from the request if it is specified or bachsize value otherwise
*
* @param req
* @return maximum of the records to be processed from the request if it is specified or bachsize value otherwise
*/
protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize)
{
String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS);
//default total number of records to be processed to batch size value
Long totalNumberOfRecordsToProcess = batchSize;
if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr))
@@ -154,7 +378,77 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor
//do nothing here, the value will remain 0L in this case
}
}
return totalNumberOfRecordsToProcess;
}
/**
* Obtain batchsize parameter from the request.
*
* @param req
* @return batchsize parameter from the request
*/
protected Long getBatchSizeParameter(WebScriptRequest req)
{
String batchSizeStr = req.getParameter(BATCH_SIZE);
Long size = 0L;
if (StringUtils.isBlank(batchSizeStr))
{
logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_MANDATORY);
}
try
{
size = Long.parseLong(batchSizeStr);
if (size <= 0)
{
logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
}
}
catch (NumberFormatException ex)
{
logger.info(MESSAGE_BATCHSIZE_IS_INVALID);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_INVALID);
}
return size;
}
/**
* Get parentNodeRef parameter from the request
*
* @param req
* @return
*/
protected NodeRef getParentNodeRefParameter(WebScriptRequest req)
{
String parentNodeRefStr = req.getParameter(PARAM_PARENT_NODE_REF);
NodeRef parentNodeRef = null;
if (StringUtils.isNotBlank(parentNodeRefStr))
{
parentNodeRef = new NodeRef(parentNodeRefStr);
if(!nodeService.exists(parentNodeRef))
{
String message = MessageFormat.format(MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE, parentNodeRef.toString());
logger.info(message);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, message);
}
}
return parentNodeRef;
}
/**
* Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess
* parameters
*
* @param batchSize
* @param maxNodeId
* @param recordAspectPair
* @param totalNumberOfRecordsToProcess
* @return the list of processed nodes
*/
protected List<NodeRef> processNodes(final Long batchSize, Long maxNodeId, final Pair<Long, QName> recordAspectPair,
Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach)
{
final Long maxRecordsToProcess = totalNumberOfRecordsToProcess;
final List<NodeRef> processedNodes = new ArrayList<NodeRef>();
logger.info(MESSAGE_PROCESSING_BEGIN);
@@ -172,7 +466,8 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor
public Void execute() throws Throwable
{
// get the nodes with the extended security aspect applied
List<Long> nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, currentIndex + batchSize);
List<Long> nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex,
currentIndex + batchSize);
// process each one
for (Long nodeId : nodeIds)
@@ -187,29 +482,77 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor
processNode(record);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName));
processedNodes.add(record);
if (attach)
{
out.write(recordName);
out.write(",");
out.write(record.toString());
out.write("\n");
}
}
return null;
}
},
false, // read only
}, false, // read only
true); // requires new
}
logger.info(MESSAGE_PROCESSING_END);
int processedNodesSize = processedNodes.size();
String message = "";
if(totalNumberOfRecordsToProcess == 0 || (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess))
return processedNodes;
}
protected List<NodeRef> processChildrenNodes(NodeRef parentNodeRef, final int batchSize,
final Pair<Long, QName> recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out,
final boolean attach)
{
final List<NodeRef> processedNodes = new ArrayList<NodeRef>();
final List<FileInfo> children = fileFolderService.search(parentNodeRef, "*", /*filesSearch*/true, /*folderSearch*/true, /*includeSubfolders*/true);
logger.info(MESSAGE_PROCESSING_BEGIN);
// by batch size
for (int i = 0; i < children.size(); i += batchSize)
{
message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize);
if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess)
{
break;
}
final int currentIndex = i;
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>()
{
public Void execute() throws Throwable
{
List<FileInfo> nodes = children.subList(currentIndex, Math.min(currentIndex + batchSize, children.size()));
// process each one
for (FileInfo node : nodes)
{
if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess)
{
break;
}
NodeRef record = node.getNodeRef();
if (nodeService.hasAspect(record, recordAspectPair.getSecond()))
{
String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName));
processNode(record);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName));
processedNodes.add(record);
if (attach)
{
out.write(recordName);
out.write(",");
out.write(record.toString());
out.write("\n");
}
}
}
return null;
}
}, false, // read only
true); // requires new
}
if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize)
{
message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess);
}
model.put(MODEL_STATUS, SUCCESS_STATUS);
model.put(MODEL_MESSAGE, message);
logger.info(message);
return model;
logger.info(MESSAGE_PROCESSING_END);
return processedNodes;
}
/**
@@ -218,7 +561,7 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor
* @param nodeRef
*/
@SuppressWarnings({ "unchecked"})
private void processNode(NodeRef nodeRef)
protected void processNode(NodeRef nodeRef)
{
// get the reader/writer data
Map<String, Integer> readers = (Map<String, Integer>)nodeService.getProperty(nodeRef, PROP_READERS);
@@ -234,8 +577,18 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor
// if record then ...
if (nodeService.hasAspect(nodeRef, ASPECT_RECORD))
{
Set<String> readersKeySet = null;
if (readers != null)
{
readersKeySet = readers.keySet();
}
Set<String> writersKeySet = null;
if (writers != null)
{
writersKeySet = writers.keySet();
}
// re-set extended security via API
extendedSecurityService.set(nodeRef, readers.keySet(), writers.keySet());
extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet);
}
}
}

View File

@@ -31,7 +31,8 @@ import org.junit.runners.Suite.SuiteClasses;
@RunWith(Suite.class)
@SuiteClasses(
{
CutOffTest.class
CutOffTest.class,
UpdateDispositionScheduleTest.class
})
public class DispositionTestSuite
{

View File

@@ -0,0 +1,164 @@
/*
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.module.org_alfresco_module_rm.test.integration.disposition;
import static org.alfresco.module.org_alfresco_module_rm.test.util.bdt.BehaviourTest.test;
import java.io.Serializable;
import java.util.Date;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import com.google.common.collect.ImmutableMap;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.action.impl.CutOffAction;
import org.alfresco.module.org_alfresco_module_rm.action.impl.DestroyAction;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionActionDefinition;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionSchedule;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService;
import org.alfresco.module.org_alfresco_module_rm.job.publish.DispositionActionDefinitionPublishExecutor;
import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase;
import org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils;
import org.alfresco.module.org_alfresco_module_rm.test.util.bdt.BehaviourTest;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.ApplicationContextHelper;
import org.springframework.extensions.webscripts.GUID;
/**
* Integration tests for updating the disposition schedule.
*
* @author Tom Page
* @since 2.3.1
*/
public class UpdateDispositionScheduleTest extends BaseRMTestCase
{
/** A unique prefix for the constants in this test. */
protected static final String TEST_PREFIX = UpdateDispositionScheduleTest.class.getName() + GUID.generate() + "_";
/** The name to use for the category. */
protected static final String CATEGORY_NAME = TEST_PREFIX + "Category";
/** The name to use for the folder. */
protected static final String FOLDER_NAME = TEST_PREFIX + "Folder";
/** The name to use for the record. */
protected static final String RECORD_NAME = TEST_PREFIX + "Record";
/** The executor for the disposition update job. */
private DispositionActionDefinitionPublishExecutor dispositionActionDefinitionPublishExecutor;
/** The internal disposition service is used to avoid permissions issues when updating the record. */
private DispositionService internalDispositionService;
/** The category node. */
private NodeRef category;
/** The folder node. */
private NodeRef folder;
/** The record node. */
private NodeRef record;
/** The 'disposition as of' date from before the 'when' step. */
private Date originalAsOfDate;
@Override
protected void setUp() throws Exception
{
super.setUp();
BehaviourTest.initBehaviourTests(retryingTransactionHelper);
// Get the application context
applicationContext = ApplicationContextHelper.getApplicationContext(getConfigLocations());
dispositionActionDefinitionPublishExecutor = applicationContext.getBean(DispositionActionDefinitionPublishExecutor.class);
internalDispositionService = (DispositionService) applicationContext.getBean("dispositionService");
}
/**
* <a href="https://issues.alfresco.com/jira/browse/RM-3386">RM-3386</a>
* <p><pre>
* Given a record subject to a disposition schedule
* And the next step is due to run at some period after the date the content was created
* When I update the period of the next step (and wait for this to be processed)
* Then the "as of" date is updated to be at the new period after the creation date.
* </pre>
*/
public void testUpdatePeriod()
{
test()
.given(() -> {
// Create a category.
category = filePlanService.createRecordCategory(filePlan, CATEGORY_NAME);
// Create a disposition schedule for the category (Cut off immediately, then Destroy 1 year after the creation date).
DispositionSchedule dispSched = utils.createBasicDispositionSchedule(category, "instructions", "authority", true, false);
Map<QName, Serializable> cutOffParams = ImmutableMap.of(PROP_DISPOSITION_ACTION_NAME, CutOffAction.NAME,
PROP_DISPOSITION_DESCRIPTION, "description",
PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_IMMEDIATELY);
dispositionService.addDispositionActionDefinition(dispSched, cutOffParams);
Map<QName, Serializable> destroyParams = ImmutableMap.of(PROP_DISPOSITION_ACTION_NAME, DestroyAction.NAME,
PROP_DISPOSITION_DESCRIPTION, "description",
PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_ONE_YEAR,
PROP_DISPOSITION_PERIOD_PROPERTY, ContentModel.PROP_CREATED);
dispositionService.addDispositionActionDefinition(dispSched, destroyParams);
// Create a folder containing a record within the category.
folder = recordFolderService.createRecordFolder(category, FOLDER_NAME);
record = fileFolderService.create(folder, RECORD_NAME, ContentModel.TYPE_CONTENT).getNodeRef();
dispositionService.cutoffDisposableItem(record);
// Ensure the update has been applied to the record.
internalDispositionService.updateNextDispositionAction(record);
originalAsOfDate = dispositionService.getNextDispositionAction(record).getAsOfDate();
})
.when(() -> {
// Update the Destroy step to be 3 years after the creation date.
DispositionSchedule dispSched = dispositionService.getDispositionSchedule(category);
DispositionActionDefinition destroy = dispSched.getDispositionActionDefinitionByName(DestroyAction.NAME);
Map<QName, Serializable> destroyParams = ImmutableMap.of(PROP_DISPOSITION_ACTION_NAME, DestroyAction.NAME,
PROP_DISPOSITION_DESCRIPTION, "description",
PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_THREE_YEARS,
PROP_DISPOSITION_PERIOD_PROPERTY, ContentModel.PROP_CREATED);
dispositionService.updateDispositionActionDefinition(destroy, destroyParams);
// Make the disposition action definition update job run.
dispositionActionDefinitionPublishExecutor.publish(destroy.getNodeRef());
})
.then()
.expect(true)
.from(() -> aboutTwoYearsApart(originalAsOfDate, dispositionService.getNextDispositionAction(record).getAsOfDate()))
.because("Increasing the destroy period by two years should increase the 'as of' date by two years.");
}
/**
* Check that the two given dates are approximately two years apart.
* <p>
* This actually just checks that they're more than one and less than three years apart, because leap years make
* things hard to calculate.
*
* @return true if the two dates are about two years apart.
*/
private boolean aboutTwoYearsApart(Date start, Date end)
{
long days = daysBetween(start, end);
long yearInDays = 365;
return (yearInDays < days) && (days < 3 * yearInDays);
}
/** Find the number of days between the two dates. */
private long daysBetween(Date start, Date end)
{
return TimeUnit.MILLISECONDS.toDays(end.getTime() - start.getTime());
}
}

View File

@@ -50,7 +50,8 @@ import org.junit.runners.Suite.SuiteClasses;
RM1914Test.class,
//RM2190Test.class,
RM2192Test.class,
RM3314Test.class
RM3314Test.class,
RM4101Test.class
})
public class IssueTestSuite
{

View File

@@ -0,0 +1,121 @@
/*
* Copyright (C) 2005-2016 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.module.org_alfresco_module_rm.test.integration.issue;
import java.util.UUID;
import org.alfresco.module.org_alfresco_module_rm.action.impl.LinkToAction;
import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase;
import org.alfresco.service.cmr.action.Action;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.rule.Rule;
import org.alfresco.service.cmr.rule.RuleService;
import org.alfresco.service.cmr.rule.RuleType;
/**
* Tests issue #4101: Link to, Copy to and File to rules fail when not run in background
*
* @author Tuna Aksoy
* @since 2.3.0.8
*/
public class RM4101Test extends BaseRMTestCase
{
private RuleService ruleService;
@Override
protected void initServices()
{
super.initServices();
ruleService = (RuleService) applicationContext.getBean("RuleService");
}
@Override
protected boolean isRecordTest()
{
return true;
}
public void testRunRuleNotInBackground() throws Exception
{
final String categoryName = "category1" + UUID.randomUUID().toString();
final NodeRef category1 = doTestInTransaction(new Test<NodeRef>()
{
@Override
public NodeRef run()
{
return filePlanService.createRecordCategory(filePlan, categoryName);
}
});
final NodeRef folder1 = doTestInTransaction(new Test<NodeRef>()
{
@Override
public NodeRef run()
{
return recordFolderService.createRecordFolder(category1, "folder1WithRule" + UUID.randomUUID().toString());
}
});
final String folder2Name = "folder2FolderToLinkTo" + UUID.randomUUID().toString();
final NodeRef folder2 = doTestInTransaction(new Test<NodeRef>()
{
@Override
public NodeRef run()
{
return recordFolderService.createRecordFolder(category1, folder2Name);
}
});
doTestInTransaction(new Test<Void>()
{
@Override
public Void run()
{
Action linkToAction = actionService.createAction(LinkToAction.NAME);
linkToAction.setParameterValue(LinkToAction.PARAM_PATH, "/" + categoryName + "/" + folder2Name);
Rule rule = new Rule();
rule.setRuleType(RuleType.INBOUND);
rule.setTitle("LinkTo");
rule.setAction(linkToAction);
rule.setExecuteAsynchronously(false);
ruleService.saveRule(folder1, rule);
return null;
}
});
doTestInTransaction(new Test<Void>()
{
@Override
public Void run()
{
utils.createRecord(folder1, "record1" + UUID.randomUUID().toString());
return null;
}
@Override
public void test(Void result) throws Exception
{
assertEquals(1, nodeService.getChildAssocs(folder2).size());
}
});
}
}

View File

@@ -500,8 +500,7 @@ public class InplaceRecordPermissionTest extends BaseRMTestCase
* And it's metadata is maintained
* Then the inplace users will no longer see the record
*/
// FIXME: See RM-4095
public void ztestDestroyedRecordInplacePermissions()
public void testDestroyedRecordInplacePermissions()
{
test()
.given()

View File

@@ -38,7 +38,9 @@ import org.junit.runners.Suite.SuiteClasses;
HideInplaceRecordTest.class,
MoveInplaceRecordTest.class,
ViewRecordTest.class,
LinkRecordTest.class
LinkRecordTest.class,
CreateInplaceRecordTest.class,
InplaceRecordPermissionTest.class
})
public class RecordTestSuite
{

View File

@@ -19,15 +19,18 @@
package org.alfresco.module.org_alfresco_module_rm.test.integration.version;
import java.io.Serializable;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService;
import org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionServiceImpl;
import org.alfresco.repo.version.VersionModel;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.version.Version;
import org.alfresco.service.cmr.version.VersionType;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.GUID;
/**
@@ -48,7 +51,7 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest
protected void initServices()
{
super.initServices();
recordableVersionService = (RecordableVersionService)applicationContext.getBean("RecordableVersionService");
recordableVersionService = (RecordableVersionService) applicationContext.getBean("RecordableVersionService");
}
/**
@@ -205,4 +208,184 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest
}
/**
* Given versionable content with a recorded latest version and autoversion is true
* When I declare this version record and contains local modifications
* Then a new minor version is created for document
*
* @see https://issues.alfresco.com/jira/browse/RM-2368
*/
public void testCreateRecordFromLatestVersionAutoTrue()
{
doBehaviourDrivenTest(new BehaviourDrivenTest(dmCollaborator)
{
private NodeRef myDocument;
private NodeRef versionedRecord;
private Map<String, Serializable> versionProperties;
private Date createdDate;
private Date modificationDate;
private String record_name = "initial_name";
private String AUTO_VERSION_DESCRIPTION = "Auto Version on Record Creation";
private boolean autoVersion = true;
public void given() throws Exception
{
// create a document
myDocument = fileFolderService.create(dmFolder, GUID.generate(), ContentModel.TYPE_CONTENT).getNodeRef();
createdDate = (Date) nodeService.getProperty(myDocument, ContentModel.PROP_CREATED);
modificationDate = (Date) nodeService.getProperty(myDocument, ContentModel.PROP_MODIFIED);
assertTrue("Modified date must be after or on creation date", createdDate.getTime() == modificationDate.getTime());
// Set initial set of properties
Map<QName, Serializable> properties = new HashMap<QName, Serializable>(3);
// Ensure default behaviour autoversion on change properties is set to false
properties.put(ContentModel.PROP_AUTO_VERSION_PROPS, false);
// Set initial name
properties.put(ContentModel.PROP_NAME, "initial_name");
nodeService.setProperties(myDocument, properties);
nodeService.setProperty(myDocument, ContentModel.PROP_DESCRIPTION, DESCRIPTION);
nodeService.addAspect(myDocument, ContentModel.ASPECT_OWNABLE, null);
// make sure document is versionable
nodeService.addAspect(myDocument, ContentModel.ASPECT_VERSIONABLE, null);
// Change Type to a custom document
nodeService.setType(myDocument, TYPE_CUSTOM_TYPE);
// setup version properties
versionProperties = new HashMap<String, Serializable>(2);
versionProperties.put(Version.PROP_DESCRIPTION, DESCRIPTION);
versionProperties.put(VersionModel.PROP_VERSION_TYPE, VersionType.MAJOR);
// create initial version
versionService.createVersion(myDocument, versionProperties);
}
public void when()
{
// Apply a custom aspect
nodeService.addAspect(myDocument, ContentModel.ASPECT_TITLED, null);
// Update properties
nodeService.setProperty(myDocument, ContentModel.PROP_NAME, "updated_name");
nodeService.setProperty(myDocument, ContentModel.PROP_DESCRIPTION, DESCRIPTION);
// test RM-2368
versionedRecord = recordableVersionService.createRecordFromLatestVersion(filePlan, myDocument, autoVersion);
}
public void then()
{
// Properties updated / flag as modified
// check the created record
assertNotNull(versionedRecord);
assertTrue(recordService.isRecord(versionedRecord));
// check the record type is correct
assertEquals(TYPE_CUSTOM_TYPE, nodeService.getType(versionedRecord));
// assert the current version is recorded
assertTrue(recordableVersionService.isCurrentVersionRecorded(myDocument));
// get name of record
record_name = (String) nodeService.getProperty(versionedRecord, ContentModel.PROP_NAME);
// new version is create, current node was modified
assertTrue("Name was updated:", record_name.contains("updated_name"));
// check record
checkRecordedVersion(myDocument, AUTO_VERSION_DESCRIPTION, "1.1");
}
});
}
/**
*
* Given versionable content with a recorded latest version and autoversion is false
* When I declare this version record and contains local modifications
* Then a record is created from latest version
*
* @see https://issues.alfresco.com/jira/browse/RM-2368
*/
public void testCreateRecordFromLatestVersion()
{
doBehaviourDrivenTest(new BehaviourDrivenTest(dmCollaborator)
{
private NodeRef myDocument;
private NodeRef versionedRecord;
private Map<String, Serializable> versionProperties;
private Date createdDate;
private Date modificationDate;
private String record_name = "initial_name";
private boolean autoVersion = false;
public void given() throws Exception
{
// create a document
myDocument = fileFolderService.create(dmFolder, GUID.generate(), ContentModel.TYPE_CONTENT).getNodeRef();
createdDate = (Date) nodeService.getProperty(myDocument, ContentModel.PROP_CREATED);
modificationDate = (Date) nodeService.getProperty(myDocument, ContentModel.PROP_MODIFIED);
assertTrue("Modified date must be after or on creation date", createdDate.getTime() == modificationDate.getTime());
// Set initial set of properties
Map<QName, Serializable> properties = new HashMap<QName, Serializable>(3);
// Ensure default behaviour autoversion on change properties is set to false
properties.put(ContentModel.PROP_AUTO_VERSION_PROPS, false);
// Set initial name
properties.put(ContentModel.PROP_NAME, "initial_name");
nodeService.setProperties(myDocument, properties);
nodeService.setProperty(myDocument, ContentModel.PROP_DESCRIPTION, DESCRIPTION);
nodeService.addAspect(myDocument, ContentModel.ASPECT_OWNABLE, null);
// make sure document is versionable
nodeService.addAspect(myDocument, ContentModel.ASPECT_VERSIONABLE, null);
// Change Type to a custom document
nodeService.setType(myDocument, TYPE_CUSTOM_TYPE);
// setup version properties
versionProperties = new HashMap<String, Serializable>(2);
versionProperties.put(Version.PROP_DESCRIPTION, DESCRIPTION);
versionProperties.put(VersionModel.PROP_VERSION_TYPE, VersionType.MAJOR);
// create initial version
versionService.createVersion(myDocument, versionProperties);
}
public void when()
{
// Apply a custom aspect
nodeService.addAspect(myDocument, ContentModel.ASPECT_TITLED, null);
// Update properties
nodeService.setProperty(myDocument, ContentModel.PROP_NAME, "initial_name");
nodeService.setProperty(myDocument, ContentModel.PROP_DESCRIPTION, DESCRIPTION);
// test RM-2368
versionedRecord = recordableVersionService.createRecordFromLatestVersion(filePlan, myDocument, autoVersion);
}
public void then()
{
// Properties updated / flag as modified
// check the created record
assertNotNull(versionedRecord);
assertTrue(recordService.isRecord(versionedRecord));
// check the record type is correct
assertEquals(TYPE_CUSTOM_TYPE, nodeService.getType(versionedRecord));
// assert the current version is recorded
assertTrue(recordableVersionService.isCurrentVersionRecorded(myDocument));
// get name of record
record_name = (String) nodeService.getProperty(versionedRecord, ContentModel.PROP_NAME);
// record is created based on existing frozen, which does not contain any modification of node
assertTrue("Name is not modified: ", record_name.contains("initial_name"));
checkRecordedVersion(myDocument, DESCRIPTION, "1.0");
}
});
}
}

View File

@@ -77,6 +77,8 @@ public class CommonRMTestUtils implements RecordsManagementModel
public static final String PERIOD_NONE = "none|0";
public static final String PERIOD_IMMEDIATELY = "immediately|0";
public static final String PERIOD_ONE_WEEK = "week|1";
public static final String PERIOD_ONE_YEAR = "year|1";
public static final String PERIOD_THREE_YEARS = "year|3";
/**
* Constructor
@@ -256,7 +258,7 @@ public class CommonRMTestUtils implements RecordsManagementModel
}, AuthenticationUtil.getAdminUserName());
}
}
public void closeFolder(final NodeRef recordFolder)
{

View File

@@ -0,0 +1,94 @@
/*
* Copyright (C) 2005-2016 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.module.org_alfresco_module_rm.action.impl;
import static org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel.PROP_DISPOSITION_AS_OF;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Date;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionAction;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionActionDefinition;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.junit.Before;
import org.junit.Test;
/**
* Unit tests for {@link BroadcastDispositionActionDefinitionUpdateAction}.
*
* @author Tom Page
* @since 2.3.1
*/
public class BroadcastDispositionActionDefinitionUpdateActionUnitTest
{
/** The node under the category containing information about the definition of the action. */
private static final NodeRef DISPOSITION_ACTION_DEF_NODE = new NodeRef("disposition://Action/Def");
/** The node containing the details of the next disposition step for the content. */
private static final NodeRef NEXT_ACTION_NODE_REF = new NodeRef("next://Step/");
/** The node being subject to the disposition step. */
private static final NodeRef CONTENT_NODE_REF = new NodeRef("content://Node/Ref");
/** The class under test. */
private BroadcastDispositionActionDefinitionUpdateAction action = new BroadcastDispositionActionDefinitionUpdateAction();
private NodeService mockNodeService = mock(NodeService.class);
private DispositionService mockDispositionService = mock(DispositionService.class);
/** Inject the mock services into the class under test and link the content and next action nodes. */
@Before
public void setUp()
{
action.setNodeService(mockNodeService);
action.setDispositionService(mockDispositionService);
ChildAssociationRef mockAssocRef = mock(ChildAssociationRef.class);
when(mockNodeService.getPrimaryParent(NEXT_ACTION_NODE_REF)).thenReturn(mockAssocRef);
when(mockAssocRef.getParentRef()).thenReturn(CONTENT_NODE_REF);
}
/**
* Check that the disposition service is used to determine the "disposition as of" date when changes are made to the
* disposition period.
*/
@Test
public void testPersistPeriodChanges()
{
// Set up the data associated with the next disposition action.
DispositionAction mockAction = mock(DispositionAction.class);
when(mockAction.getNodeRef()).thenReturn(NEXT_ACTION_NODE_REF);
DispositionActionDefinition mockDispositionActionDefinition = mock(DispositionActionDefinition.class);
when(mockAction.getDispositionActionDefinition()).thenReturn(mockDispositionActionDefinition);
when(mockAction.getName()).thenReturn("mockAction");
// Set up the disposition service to return a known "disposition as of" date.
Date asOfDate = new Date();
when(mockDispositionService.calculateAsOfDate(CONTENT_NODE_REF, mockDispositionActionDefinition, false))
.thenReturn(asOfDate);
// Call the method under test.
action.persistPeriodChanges(DISPOSITION_ACTION_DEF_NODE, mockAction);
// Check that the "disposition as of" date has been set on the next action.
verify(mockNodeService).setProperty(NEXT_ACTION_NODE_REF, PROP_DISPOSITION_AS_OF, asOfDate);
}
}

View File

@@ -0,0 +1,93 @@
/*
* Copyright (C) 2005-2016 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.module.org_alfresco_module_rm.disposition;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Date;
import org.alfresco.model.ContentModel;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.Period;
import org.junit.Before;
import org.junit.Test;
/**
* Unit tests for {@link DispositionServiceImpl}.
*
* @author Tom Page
* @since 2.3.1
*/
public class DispositionServiceImplUnitTest
{
/** The node being subject to the disposition step. */
NodeRef CONTENT_NODE_REF = new NodeRef("content://node/");
/** The class under test. */
private DispositionServiceImpl dispositionService = new DispositionServiceImpl();
private NodeService mockNodeService = mock(NodeService.class);
@Before
public void setUp()
{
dispositionService.setNodeService(mockNodeService);
}
/**
* Check that the relevant information is retrieved from the DispositionActionDefinition in order to determine the
* "disposition as of" date.
*/
@Test
public void testCalculateAsOfDate()
{
// Set up a mock for the disposition action definition.
DispositionActionDefinition mockDispositionActionDefinition = mock(DispositionActionDefinition.class);
Period mockPeriod = mock(Period.class);
when(mockDispositionActionDefinition.getPeriod()).thenReturn(mockPeriod);
when(mockDispositionActionDefinition.getPeriodProperty()).thenReturn(ContentModel.PROP_CREATED);
// Set up a created date and another date that is some Period later.
Date createdDate = new Date(1234567890);
when(mockNodeService.getProperty(CONTENT_NODE_REF, ContentModel.PROP_CREATED)).thenReturn(createdDate);
Date nextDate = new Date(1240000000);
when(mockPeriod.getNextDate(createdDate)).thenReturn(nextDate);
// Call the method under test.
Date asOfDate = dispositionService.calculateAsOfDate(CONTENT_NODE_REF, mockDispositionActionDefinition, true);
assertEquals("Unexpected calculation for 'as of' date", nextDate, asOfDate);
}
/** Check that the calculated "disposition as of" date is null if a null period is given. */
@Test
public void testCalculateAsOfDate_nullPeriod()
{
DispositionActionDefinition mockDispositionActionDefinition = mock(DispositionActionDefinition.class);
when(mockDispositionActionDefinition.getPeriod()).thenReturn(null);
// Call the method under test.
Date asOfDate = dispositionService.calculateAsOfDate(CONTENT_NODE_REF, mockDispositionActionDefinition, true);
assertNull("It should not be possible to determine the 'as of' date.", asOfDate);
}
}

View File

@@ -31,8 +31,8 @@ import org.json.JSONObject;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.springframework.extensions.surf.util.Content;
import org.springframework.extensions.webscripts.AbstractWebScript;
import org.springframework.extensions.webscripts.Container;
import org.springframework.extensions.webscripts.DeclarativeWebScript;
import org.springframework.extensions.webscripts.Description;
import org.springframework.extensions.webscripts.Description.RequiredCache;
import org.springframework.extensions.webscripts.DescriptionExtension;
@@ -68,7 +68,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest
/**
* @return declarative webscript
*/
protected abstract DeclarativeWebScript getWebScript();
protected abstract AbstractWebScript getWebScript();
/**
* @return classpath location of webscript template
@@ -136,7 +136,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest
*/
protected String executeWebScript(Map<String, String> parameters, String content) throws Exception
{
DeclarativeWebScript webScript = getWebScript();
AbstractWebScript webScript = getWebScript();
String template = getWebScriptTemplate();
// initialise webscript
@@ -158,7 +158,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest
* @return {@link WebScriptRequest} mocked web script request
*/
@SuppressWarnings("rawtypes")
protected WebScriptRequest getMockedWebScriptRequest(DeclarativeWebScript webScript, final Map<String, String> parameters, String content) throws Exception
protected WebScriptRequest getMockedWebScriptRequest(AbstractWebScript webScript, final Map<String, String> parameters, String content) throws Exception
{
Match match = new Match(null, parameters, null, webScript);
org.springframework.extensions.webscripts.Runtime mockedRuntime = mock(org.springframework.extensions.webscripts.Runtime.class);

View File

@@ -23,17 +23,22 @@ import static java.util.Collections.emptyMap;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.File;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@@ -44,6 +49,7 @@ import java.util.stream.Stream;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority;
import org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService;
@@ -55,6 +61,9 @@ import org.alfresco.repo.domain.patch.PatchDAO;
import org.alfresco.repo.domain.qname.QNameDAO;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.repo.web.scripts.content.ContentStreamer;
import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.model.FileInfo;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.security.PermissionService;
@@ -64,12 +73,17 @@ import org.alfresco.util.Pair;
import org.json.JSONObject;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.springframework.extensions.webscripts.DeclarativeWebScript;
import org.springframework.extensions.webscripts.AbstractWebScript;
import org.springframework.extensions.webscripts.Status;
import org.springframework.extensions.webscripts.WebScriptException;
import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.extensions.webscripts.WebScriptResponse;
/**
* DynamicAuthoritiesGet Unit Test
@@ -100,13 +114,17 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
private TransactionService mockedTransactionService;
@Mock
private RetryingTransactionHelper mockedRetryingTransactionHelper;
@Mock
private ContentStreamer contentStreamer;
@Mock
private FileFolderService mockedFileFolderService;
/** test component */
@InjectMocks
private DynamicAuthoritiesGet webScript;
@Override
protected DeclarativeWebScript getWebScript()
protected AbstractWebScript getWebScript()
{
return webScript;
}
@@ -128,6 +146,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
webScript.setNodeService(mockedNodeService);
webScript.setPermissionService(mockedPermissionService);
webScript.setExtendedSecurityService(mockedExtendedSecurityService);
webScript.setFileFolderService(mockedFileFolderService);
// setup retrying transaction helper
Answer<Object> doInTransactionAnswer = new Answer<Object>()
{
@@ -153,7 +172,9 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
}
/**
* Given that there are no nodes with the extended security aspect When the action is executed Nothing happens
* Given that there are no nodes with the extended security aspect
* When the action is executed Nothing happens
*
* @throws Exception
*/
@SuppressWarnings({ "unchecked" })
@@ -174,7 +195,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 0 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_READERS));
verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_WRITERS));
verify(mockedNodeService, never()).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY));
@@ -186,8 +206,12 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
}
/**
* Given that there are records with the extended security aspect When the action is executed Then the aspect is
* removed And the dynamic authorities permissions are cleared And extended security is set via the updated API
* Given that there are records with the extended security aspect
* When the action is executed
* Then the aspect is removed
* And the dynamic authorities permissions are cleared
* And extended security is set via the updated API
*
* @throws Exception
*/
@SuppressWarnings("unchecked")
@@ -196,8 +220,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong()))
.thenReturn(ids)
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
@@ -208,7 +231,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
});
// Set up parameters.
@@ -220,7 +242,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS));
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS));
verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY));
@@ -233,8 +254,10 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
}
/**
* Given that there are non-records with the extended security aspect When the web script is executed Then the aspect is
* removed And the dynamic authorities permissions are cleared
* Given that there are non-records with the extended security aspect
* When the web script is executed
* Then the aspect is removed And the dynamic authorities permissions are cleared
*
* @throws Exception
*/
@SuppressWarnings("unchecked")
@@ -243,8 +266,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong()))
.thenReturn(ids)
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
@@ -255,7 +277,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
});
// Set up parameters.
@@ -281,39 +302,50 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
@Test
public void missingBatchSizeParameter() throws Exception
{
JSONObject json = executeJSONWebScript(emptyMap());
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"failed\",\"message\":\"Parameter batchsize is mandatory\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
try
{
executeJSONWebScript(emptyMap());
fail("Expected exception as parameter batchsize is mandatory.");
}
catch (WebScriptException e)
{
assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus());
}
}
@Test
public void invalidBatchSizeParameter() throws Exception
{
try
{
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "dd");
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"failed\",\"message\":\"Parameter batchsize is invalid.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
executeJSONWebScript(parameters);
fail("Expected exception as parameter batchsize is invalid.");
}
catch (WebScriptException e)
{
assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus());
}
}
@Test
public void batchSizeShouldBeGraterThanZero() throws Exception
{
when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(null);
try
{
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "0");
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"failed\",\"message\":\"Parameter batchsize should be a number greater than 0.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
executeJSONWebScript(parameters);
fail("Expected exception as parameter batchsize is not a number greater than 0.");
}
catch (WebScriptException e)
{
assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus());
}
}
@Test
@@ -335,8 +367,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
{
List<Long> ids = Stream.of(1l, 2l, 3l,4l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong()))
.thenReturn(ids)
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
@@ -347,7 +378,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
});
// Set up parameters.
@@ -365,8 +395,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
{
List<Long> ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong()))
.thenReturn(ids)
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
@@ -377,7 +406,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
});
// Set up parameters.
@@ -389,4 +417,302 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS)).thenReturn(null);
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null);
});
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4");
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
ArgumentCaptor<Set> readerKeysCaptor = ArgumentCaptor.forClass(Set.class);
ArgumentCaptor<Set> writersKeysCaptor = ArgumentCaptor.forClass(Set.class);
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS));
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS));
verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedReaderDynamicAuthority.EXTENDED_READER));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER));
verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(),
writersKeysCaptor.capture());
List<Set> allReaderKeySets = readerKeysCaptor.getAllValues();
List<Set> allWritersKeySets = writersKeysCaptor.getAllValues();
for (Set keySet : allReaderKeySets)
{
assertNull(keySet);
}
for (Set keySet : allWritersKeySets)
{
assertNull(keySet);
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void recordsWithExtendedSecurityAspectAndNullWriters() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null);
});
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4");
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
ArgumentCaptor<Set> readerKeysCaptor = ArgumentCaptor.forClass(Set.class);
ArgumentCaptor<Set> writersKeysCaptor = ArgumentCaptor.forClass(Set.class);
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS));
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS));
verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedReaderDynamicAuthority.EXTENDED_READER));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER));
verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(),
writersKeysCaptor.capture());
List<Set> allReaderKeySets = readerKeysCaptor.getAllValues();
List<Set> allWritersKeySets = writersKeysCaptor.getAllValues();
for (Set keySet : allReaderKeySets)
{
assertNotNull(keySet);
}
for (Set keySet : allWritersKeySets)
{
assertNull(keySet);
}
}
/**
* Given I have records that require migration
* And I am interested in knowning which records are migrated
* When I run the migration tool
* Then I will be returned a CSV file containing the name and node reference of the record migrated
*
* @throws Exception
*/
@SuppressWarnings("unchecked")
@Test
public void processWithCSVFile() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
String name = "name" + i;
when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name);
});
ArgumentCaptor<File> csvFileCaptor = ArgumentCaptor.forClass(File.class);
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export",
"true");
executeWebScript(parameters);
verify(contentStreamer, times(1)).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
csvFileCaptor.capture(), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
File fileForDownload = csvFileCaptor.getValue();
assertNotNull(fileForDownload);
}
/**
* Given that I have record that require migration
* And I'm not interested in knowing which records were migrated
* When I run the migration tool
* Then I will not be returned a CSV file of details.
*
* @throws Exception
*/
@SuppressWarnings("unchecked")
@Test
public void processedWithouthCSVFile() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
});
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export",
"false");
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
}
@Test
public void invalidParentNodeRefParameter() throws Exception
{
try
{
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", "invalidNodeRef");
executeJSONWebScript(parameters);
fail("Expected exception as parameter parentNodeRef is invalid.");
}
catch (WebScriptException e)
{
assertEquals("If parameter parentNodeRef is invalid then 'Internal server error' should be returned.",
Status.STATUS_INTERNAL_SERVER_ERROR, e.getStatus());
}
}
@Test
public void inexistentParentNodeRefParameter() throws Exception
{
try
{
NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeService.exists(parentNodeRef)).thenReturn(false);
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef",
parentNodeRef.toString());
executeJSONWebScript(parameters);
fail("Expected exception as parameter parentNodeRef does not exist.");
}
catch (WebScriptException e)
{
assertEquals("If parameter parentNodeRef is does not exist then 'Bad Reequest' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus());
}
}
@SuppressWarnings("unchecked")
@Test
public void processedWithParentNodeRef() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService);
List<FileInfo> children = new ArrayList<FileInfo>();
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
String name = "name" + i;
when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name);
FileInfo mockedFileInfo = mock(FileInfo.class);
when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef);
children.add(mockedFileInfo);
});
when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true)))
.thenReturn(children);
Map<String, String> parameters = ImmutableMap.of("batchsize", "3", "maxProcessedRecords", "4", "export",
"false", "parentNodeRef", parentNodeRef.toString());
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
}
@SuppressWarnings("unchecked")
@Test
public void processedWithParentNodeRefWithFirstTwoBatchesAlreadyProcessed() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l, 4l, 5l, 6l, 7l, 8l).collect(Collectors.toList());
NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService);
List<FileInfo> children = new ArrayList<FileInfo>();
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
if (i <= 6l)
{
when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(false);
}
else
{
when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true);
}
when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
String name = "name" + i;
when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name);
FileInfo mockedFileInfo = mock(FileInfo.class);
when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef);
children.add(mockedFileInfo);
});
when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true)))
.thenReturn(children);
Map<String, String> parameters = ImmutableMap.of("batchsize", "3", "parentNodeRef", parentNodeRef.toString());
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 2 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
}
}