From b7311e28ac570dd92bde1fc9144310598863f25c Mon Sep 17 00:00:00 2001 From: Tuna Aksoy Date: Wed, 5 Oct 2016 15:20:19 +0100 Subject: [PATCH] Merge branch 'release/V2.4' into merge-2.5/MergeV24ToV25 Conflicts: rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-action-context.xml rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/DestroyAction.java --- .../rm-action-context.xml | 12 +- .../rm-webscript-context.xml | 2 + .../roles/rm-dynamicauthorities.get.desc.xml | 6 +- .../impl/CopyMoveLinkFileToBaseAction.java | 75 +-- .../action/impl/DestroyAction.java | 6 +- .../scripts/roles/DynamicAuthoritiesGet.java | 481 +++++++++++++++--- .../test/integration/issue/RM3993Test.java | 19 - .../test/integration/issue/RM4101Test.java | 129 +++++ .../record/InplaceRecordPermissionTest.java | 3 +- .../test/util/BaseWebScriptUnitTest.java | 8 +- .../roles/DynamicAuthoritiesGetUnitTest.java | 406 +++++++++++++-- 11 files changed, 945 insertions(+), 202 deletions(-) create mode 100644 rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM4101Test.java diff --git a/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-action-context.xml b/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-action-context.xml index 04257725c1..0191e398ba 100644 --- a/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-action-context.xml +++ b/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-action-context.xml @@ -106,11 +106,11 @@ - + - + @@ -260,7 +260,7 @@ depends-on="rmDestroyRecordsScheduledForDestructionCapability"> - + ${rm.ghosting.enabled} @@ -272,7 +272,7 @@ - + @@ -763,7 +763,6 @@ - @@ -792,7 +791,6 @@ - @@ -821,7 +819,6 @@ - @@ -850,7 +847,6 @@ - diff --git a/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml b/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml index fedd555546..d4f8b73a05 100644 --- a/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml +++ b/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml @@ -598,6 +598,8 @@ + + diff --git a/rm-community/rm-community-repo/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml b/rm-community/rm-community-repo/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml index 00a2cad9e1..6cf7174dff 100644 --- a/rm-community/rm-community-repo/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml +++ b/rm-community/rm-community-repo/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml @@ -2,11 +2,13 @@ Removes dynamic authorities - URL parameter batchsize is mandatory, and represents the number of records that are processed in one transaction.
+ URL parameter batchsize is mandatory, and represents the maximum number of records that can be processed in one transaction.
URL parameter maxProcessedRecords is optional, and represents the maximum number of records that will be processed in one request.
+ URL parameter export is optional, and if the it's value is true, will export the processed records into a csv file.
+ URL parameter parentNodeRef is optional, and represents the nodeRef of the folder that contains the records to be processed.
]]>
- /api/rm/rm-dynamicauthorities?batchsize={batchsize}&maxProcessedRecords={maxProcessedRecords?} + /api/rm/rm-dynamicauthorities?batchsize={batchsize}&maxProcessedRecords={maxProcessedRecords?}&export={export?}&parentNodeRef={parentNodeRef?} argument admin required diff --git a/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/CopyMoveLinkFileToBaseAction.java b/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/CopyMoveLinkFileToBaseAction.java index 468d0a709f..c2c896afbd 100644 --- a/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/CopyMoveLinkFileToBaseAction.java +++ b/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/CopyMoveLinkFileToBaseAction.java @@ -44,12 +44,10 @@ import org.alfresco.service.cmr.action.ParameterDefinition; import org.alfresco.service.cmr.dictionary.DataTypeDefinition; import org.alfresco.service.cmr.model.FileFolderService; import org.alfresco.service.cmr.model.FileNotFoundException; -import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.namespace.QName; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.springframework.dao.ConcurrencyFailureException; import org.springframework.util.StringUtils; /** @@ -62,9 +60,6 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr { private static Log logger = LogFactory.getLog(CopyMoveLinkFileToBaseAction.class); - /** Retrying transaction helper */ - private RetryingTransactionHelper retryingTransactionHelper; - /** action parameters */ public static final String PARAM_DESTINATION_RECORD_FOLDER = "destinationRecordFolder"; public static final String PARAM_PATH = "path"; @@ -121,14 +116,6 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr this.filePlanService = filePlanService; } - /** - * @param retryingTransactionHelper retrying transaction helper - */ - public void setRetryingTransactionHelper(RetryingTransactionHelper retryingTransactionHelper) - { - this.retryingTransactionHelper = retryingTransactionHelper; - } - /** * @see org.alfresco.module.org_alfresco_module_rm.action.RMActionExecuterAbstractBase#addParameterDefinitions(java.util.List) */ @@ -165,25 +152,7 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr NodeRef recordFolder = (NodeRef)action.getParameterValue(PARAM_DESTINATION_RECORD_FOLDER); if (recordFolder == null) { - final boolean finaltargetIsUnfiledRecords = targetIsUnfiledRecords; - recordFolder = retryingTransactionHelper.doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback() - { - public NodeRef execute() throws Throwable - { - NodeRef result = null; - try - { - // get the reference to the record folder based on the relative path - result = createOrResolvePath(action, actionedUponNodeRef, finaltargetIsUnfiledRecords); - } - catch (DuplicateChildNodeNameException ex) - { - throw new ConcurrencyFailureException("Cannot create or resolve path.", ex); - } - - return result; - } - }, false, true); + recordFolder = createOrResolvePath(action, actionedUponNodeRef, targetIsUnfiledRecords); } // now we have the reference to the target folder we can do some final checks to see if the action is valid @@ -197,30 +166,26 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr { try { - synchronized (this) + if (getMode() == CopyMoveLinkFileToActionMode.MOVE) { - if (getMode() == CopyMoveLinkFileToActionMode.MOVE) - { - fileFolderService.move(actionedUponNodeRef, finalRecordFolder, null); - } - else if (getMode() == CopyMoveLinkFileToActionMode.COPY) - { - fileFolderService.copy(actionedUponNodeRef, finalRecordFolder, null); - } - else if (getMode() == CopyMoveLinkFileToActionMode.LINK) - { - getRecordService().link(actionedUponNodeRef, finalRecordFolder); - } + fileFolderService.move(actionedUponNodeRef, finalRecordFolder, null); + } + else if (getMode() == CopyMoveLinkFileToActionMode.COPY) + { + fileFolderService.copy(actionedUponNodeRef, finalRecordFolder, null); + } + else if (getMode() == CopyMoveLinkFileToActionMode.LINK) + { + getRecordService().link(actionedUponNodeRef, finalRecordFolder); } } catch (FileNotFoundException fileNotFound) { throw new AlfrescoRuntimeException("Unable to execute file to action, because the " + (mode == CopyMoveLinkFileToActionMode.MOVE ? "move" : "copy") + " operation failed.", fileNotFound); } - + return null; } - }); } } @@ -326,21 +291,11 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr final boolean create = createValue == null ? false : createValue.booleanValue(); // create or resolve the specified path - path = retryingTransactionHelper.doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback() + path = getTransactionService().getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback() { public NodeRef execute() throws Throwable { - NodeRef path = null; - try - { - path = createOrResolvePath(action, context, actionedUponNodeRef, Arrays.asList(pathElementsArray), targetisUnfiledRecords, - create, false); - } - catch (DuplicateChildNodeNameException ex) - { - throw new ConcurrencyFailureException("Cannot create or resolve path.", ex); - } - return path; + return createOrResolvePath(action, context, actionedUponNodeRef, Arrays.asList(pathElementsArray), targetisUnfiledRecords, create, false); } }, false, true); } @@ -431,7 +386,7 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr NodeRef child = getChild(parent, childName); if (child == null) { - if(targetisUnfiledRecords) + if (targetisUnfiledRecords) { // create unfiled folder child = fileFolderService.create(parent, childName, RecordsManagementModel.TYPE_UNFILED_RECORD_FOLDER).getNodeRef(); diff --git a/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/DestroyAction.java b/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/DestroyAction.java index 3a35dd2713..6917ee1a26 100644 --- a/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/DestroyAction.java +++ b/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/DestroyAction.java @@ -63,7 +63,7 @@ public class DestroyAction extends RMDispositionActionExecuterAbstractBase /** Recordable version service */ private RecordableVersionService recordableVersionService; - + /** Inplace record service */ private InplaceRecordService inplaceRecordService; @@ -93,7 +93,7 @@ public class DestroyAction extends RMDispositionActionExecuterAbstractBase { this.recordableVersionService = recordableVersionService; } - + /** * @param inplaceRecordService inplace record service */ @@ -183,7 +183,7 @@ public class DestroyAction extends RMDispositionActionExecuterAbstractBase // Add the ghosted aspect getNodeService().addAspect(record, ASPECT_GHOSTED, null); - + // Hide from inplace users to give the impression of destruction inplaceRecordService.hideRecord(record); diff --git a/rm-community/rm-community-repo/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java b/rm-community/rm-community-repo/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java index 5e3020e9aa..09457fc5cb 100644 --- a/rm-community/rm-community-repo/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java +++ b/rm-community/rm-community-repo/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java @@ -44,12 +44,22 @@ */ package org.alfresco.repo.web.scripts.roles; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.io.Writer; import java.text.MessageFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import javax.servlet.http.HttpServletResponse; + import org.alfresco.model.ContentModel; import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; @@ -59,19 +69,26 @@ import org.alfresco.repo.domain.node.NodeDAO; import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import org.alfresco.repo.web.scripts.content.ContentStreamer; +import org.alfresco.service.cmr.model.FileFolderService; +import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.security.PermissionService; import org.alfresco.service.namespace.QName; import org.alfresco.service.transaction.TransactionService; import org.alfresco.util.Pair; +import org.alfresco.util.TempFileProvider; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Cache; -import org.springframework.extensions.webscripts.DeclarativeWebScript; +import org.springframework.extensions.webscripts.Format; import org.springframework.extensions.webscripts.Status; +import org.springframework.extensions.webscripts.WebScriptException; import org.springframework.extensions.webscripts.WebScriptRequest; +import org.springframework.extensions.webscripts.WebScriptResponse; /** * Webscript used for removing dynamic authorities from the records. @@ -80,7 +97,7 @@ import org.springframework.extensions.webscripts.WebScriptRequest; * @since 2.3.0.7 */ @SuppressWarnings("deprecation") -public class DynamicAuthoritiesGet extends DeclarativeWebScript implements RecordsManagementModel +public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel { private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0."; private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN"; @@ -89,14 +106,16 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor private static final String MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE = "Processing record {0} - BEGIN"; private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid."; private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory"; + private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist."; private static final String SUCCESS_STATUS = "success"; - private static final String FAILED_STATUS = "failed"; /** * The logger */ private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class); private static final String BATCH_SIZE = "batchsize"; private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords"; + private static final String PARAM_EXPORT = "export"; + private static final String PARAM_PARENT_NODE_REF = "parentNodeRef"; private static final String MODEL_STATUS = "responsestatus"; private static final String MODEL_MESSAGE = "message"; private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records."; @@ -112,50 +131,60 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor private PermissionService permissionService; private ExtendedSecurityService extendedSecurityService; private TransactionService transactionService; + /** Content Streamer */ + protected ContentStreamer contentStreamer; + private FileFolderService fileFolderService; + + /** service setters */ + public void setPatchDAO(PatchDAO patchDAO) + { + this.patchDAO = patchDAO; + } - /** service setters */ - public void setPatchDAO(PatchDAO patchDAO) { this.patchDAO = patchDAO; } - public void setNodeDAO(NodeDAO nodeDAO) { this.nodeDAO = nodeDAO; } - public void setQnameDAO(QNameDAO qnameDAO) { this.qnameDAO = qnameDAO; } - public void setNodeService(NodeService nodeService) { this.nodeService = nodeService; } - public void setPermissionService(PermissionService permissionService) { this.permissionService = permissionService; } - public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) { this.extendedSecurityService = extendedSecurityService; } - public void setTransactionService(TransactionService transactionService) { this.transactionService = transactionService; } + public void setNodeDAO(NodeDAO nodeDAO) + { + this.nodeDAO = nodeDAO; + } - @Override - protected Map executeImpl(WebScriptRequest req, Status status, Cache cache) + public void setQnameDAO(QNameDAO qnameDAO) { - Map model = new HashMap(); - String batchSizeStr = req.getParameter(BATCH_SIZE); - String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); + this.qnameDAO = qnameDAO; + } - Long size = 0L; - if (StringUtils.isBlank(batchSizeStr)) - { - model.put(MODEL_STATUS, FAILED_STATUS); - model.put(MODEL_MESSAGE, MESSAGE_BATCHSIZE_IS_MANDATORY); - logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY); - return model; - } - try - { - size = Long.parseLong(batchSizeStr); - if(size <= 0) - { - model.put(MODEL_STATUS, FAILED_STATUS); - model.put(MODEL_MESSAGE, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); - logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); - return model; - } - } - catch(NumberFormatException ex) - { - model.put(MODEL_STATUS, FAILED_STATUS); - model.put(MODEL_MESSAGE, MESSAGE_BATCHSIZE_IS_INVALID); - logger.info(MESSAGE_BATCHSIZE_IS_INVALID); - return model; - } - final Long batchSize = size; + public void setNodeService(NodeService nodeService) + { + this.nodeService = nodeService; + } + + public void setPermissionService(PermissionService permissionService) + { + this.permissionService = permissionService; + } + + public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) + { + this.extendedSecurityService = extendedSecurityService; + } + + public void setTransactionService(TransactionService transactionService) + { + this.transactionService = transactionService; + } + + public void setContentStreamer(ContentStreamer contentStreamer) + { + this.contentStreamer = contentStreamer; + } + + public void setFileFolderService(FileFolderService fileFolderService) + { + this.fileFolderService = fileFolderService; + } + + protected Map buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException + { + Map model = new HashMap(); + final Long batchSize = getBatchSizeParameter(req); // get the max node id and the extended security aspect Long maxNodeId = patchDAO.getMaxAdmNodeID(); final Pair recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY); @@ -167,6 +196,201 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor return model; } + Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize); + + boolean attach = getExportParameter(req); + + File file = TempFileProvider.createTempFile("processedNodes_", ".csv"); + FileWriter writer = new FileWriter(file); + BufferedWriter out = new BufferedWriter(writer); + List processedNodes = new ArrayList(); + try + { + NodeRef parentNodeRef = getParentNodeRefParameter(req); + if (parentNodeRef != null) + { + processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair, + totalNumberOfRecordsToProcess.intValue(), out, attach); + } + else + { + processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess, + out, attach); + } + } + finally + { + out.close(); + } + + int processedNodesSize = processedNodes.size(); + + String message = ""; + if (totalNumberOfRecordsToProcess == 0 + || (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess)) + { + message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize); + } + if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize) + { + message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess); + } + model.put(MODEL_STATUS, SUCCESS_STATUS); + model.put(MODEL_MESSAGE, message); + logger.info(message); + + if (attach) + { + try + { + String fileName = file.getName(); + contentStreamer.streamContent(req, res, file, null, attach, fileName, model); + model = null; + } + finally + { + if (file != null) + { + file.delete(); + } + } + } + return model; + } + + /** + * Get export parameter from the request + * + * @param req + * @return + */ + protected boolean getExportParameter(WebScriptRequest req) + { + boolean attach = false; + String export = req.getParameter(PARAM_EXPORT); + if (export != null && Boolean.parseBoolean(export)) + { + attach = true; + } + return attach; + } + + /* + * (non-Javadoc) + * @see org.alfresco.repo.web.scripts.content.StreamContent#execute(org.springframework.extensions.webscripts. + * WebScriptRequest, org.springframework.extensions.webscripts.WebScriptResponse) + */ + @Override + public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException + { + // retrieve requested format + String format = req.getFormat(); + + try + { + String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format); + if (mimetype == null) + { + throw new WebScriptException("Web Script format '" + format + "' is not registered"); + } + + // construct model for script / template + Status status = new Status(); + Cache cache = new Cache(getDescription().getRequiredCache()); + Map model = buildModel(req, res); + if (model == null) { return; } + model.put("status", status); + model.put("cache", cache); + + Map templateModel = createTemplateParameters(req, res, model); + + // render output + int statusCode = status.getCode(); + if (statusCode != HttpServletResponse.SC_OK && !req.forceSuccessStatus()) + { + if (logger.isDebugEnabled()) + { + logger.debug("Force success status header in response: " + req.forceSuccessStatus()); + logger.debug("Setting status " + statusCode); + } + res.setStatus(statusCode); + } + + // apply location + String location = status.getLocation(); + if (location != null && location.length() > 0) + { + if (logger.isDebugEnabled()) logger.debug("Setting location to " + location); + res.setHeader(WebScriptResponse.HEADER_LOCATION, location); + } + + // apply cache + res.setCache(cache); + + String callback = null; + if (getContainer().allowCallbacks()) + { + callback = req.getJSONCallback(); + } + if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) + { + if (logger.isDebugEnabled()) logger.debug("Rendering JSON callback response: content type=" + + Format.JAVASCRIPT.mimetype() + ", status=" + statusCode + ", callback=" + callback); + + // NOTE: special case for wrapping JSON results in a javascript function callback + res.setContentType(Format.JAVASCRIPT.mimetype() + ";charset=UTF-8"); + res.getWriter().write((callback + "(")); + } + else + { + if (logger.isDebugEnabled()) + logger.debug("Rendering response: content type=" + mimetype + ", status=" + statusCode); + + res.setContentType(mimetype + ";charset=UTF-8"); + } + + // render response according to requested format + renderFormatTemplate(format, templateModel, res.getWriter()); + + if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) + { + // NOTE: special case for wrapping JSON results in a javascript function callback + res.getWriter().write(")"); + } + } + catch (Throwable e) + { + if (logger.isDebugEnabled()) + { + StringWriter stack = new StringWriter(); + e.printStackTrace(new PrintWriter(stack)); + logger.debug("Caught exception; decorating with appropriate status template : " + stack.toString()); + } + + throw createStatusException(e, req, res); + } + } + + protected void renderFormatTemplate(String format, Map model, Writer writer) + { + format = (format == null) ? "" : format; + + String templatePath = getDescription().getId() + "." + format; + + if (logger.isDebugEnabled()) logger.debug("Rendering template '" + templatePath + "'"); + + renderTemplate(templatePath, model, writer); + } + + /** + * Obtain maximum of the records to be processed from the request if it is specified or bachsize value otherwise + * + * @param req + * @return maximum of the records to be processed from the request if it is specified or bachsize value otherwise + */ + protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize) + { + String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); //default total number of records to be processed to batch size value Long totalNumberOfRecordsToProcess = batchSize; if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr)) @@ -180,7 +404,77 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor //do nothing here, the value will remain 0L in this case } } + return totalNumberOfRecordsToProcess; + } + + /** + * Obtain batchsize parameter from the request. + * + * @param req + * @return batchsize parameter from the request + */ + protected Long getBatchSizeParameter(WebScriptRequest req) + { + String batchSizeStr = req.getParameter(BATCH_SIZE); + Long size = 0L; + if (StringUtils.isBlank(batchSizeStr)) + { + logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_MANDATORY); + } + try + { + size = Long.parseLong(batchSizeStr); + if (size <= 0) + { + logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); + } + } + catch (NumberFormatException ex) + { + logger.info(MESSAGE_BATCHSIZE_IS_INVALID); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_INVALID); + } + return size; + } + /** + * Get parentNodeRef parameter from the request + * + * @param req + * @return + */ + protected NodeRef getParentNodeRefParameter(WebScriptRequest req) + { + String parentNodeRefStr = req.getParameter(PARAM_PARENT_NODE_REF); + NodeRef parentNodeRef = null; + if (StringUtils.isNotBlank(parentNodeRefStr)) + { + parentNodeRef = new NodeRef(parentNodeRefStr); + if(!nodeService.exists(parentNodeRef)) + { + String message = MessageFormat.format(MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE, parentNodeRef.toString()); + logger.info(message); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, message); + } + } + return parentNodeRef; + } + + /** + * Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess + * parameters + * + * @param batchSize + * @param maxNodeId + * @param recordAspectPair + * @param totalNumberOfRecordsToProcess + * @return the list of processed nodes + */ + protected List processNodes(final Long batchSize, Long maxNodeId, final Pair recordAspectPair, + Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach) + { final Long maxRecordsToProcess = totalNumberOfRecordsToProcess; final List processedNodes = new ArrayList(); logger.info(MESSAGE_PROCESSING_BEGIN); @@ -198,7 +492,8 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor public Void execute() throws Throwable { // get the nodes with the extended security aspect applied - List nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, currentIndex + batchSize); + List nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, + currentIndex + batchSize); // process each one for (Long nodeId : nodeIds) @@ -213,38 +508,86 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor processNode(record); logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); processedNodes.add(record); - } + if (attach) + { + out.write(recordName); + out.write(","); + out.write(record.toString()); + out.write("\n"); + } + } return null; } - }, - false, // read only + }, false, // read only true); // requires new } logger.info(MESSAGE_PROCESSING_END); - int processedNodesSize = processedNodes.size(); - String message = ""; - if(totalNumberOfRecordsToProcess == 0 || (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess)) - { - message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize); - } - if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize) - { - message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess); - } - model.put(MODEL_STATUS, SUCCESS_STATUS); - model.put(MODEL_MESSAGE, message); - logger.info(message); - return model; + return processedNodes; } + protected List processChildrenNodes(NodeRef parentNodeRef, final int batchSize, + final Pair recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out, + final boolean attach) + { + final List processedNodes = new ArrayList(); + final List children = fileFolderService.search(parentNodeRef, "*", /*filesSearch*/true, /*folderSearch*/true, /*includeSubfolders*/true); + logger.info(MESSAGE_PROCESSING_BEGIN); + // by batch size + for (int i = 0; i < children.size(); i += batchSize) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + final int currentIndex = i; + + transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback() + { + public Void execute() throws Throwable + { + List nodes = children.subList(currentIndex, Math.min(currentIndex + batchSize, children.size())); + // process each one + for (FileInfo node : nodes) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + NodeRef record = node.getNodeRef(); + if (nodeService.hasAspect(record, recordAspectPair.getSecond())) + { + String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName)); + processNode(record); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); + processedNodes.add(record); + if (attach) + { + out.write(recordName); + out.write(","); + out.write(record.toString()); + out.write("\n"); + } + } + } + + return null; + } + }, false, // read only + true); // requires new + } + logger.info(MESSAGE_PROCESSING_END); + return processedNodes; + } + /** * Process each node * * @param nodeRef */ @SuppressWarnings({ "unchecked"}) - private void processNode(NodeRef nodeRef) + protected void processNode(NodeRef nodeRef) { // get the reader/writer data Map readers = (Map)nodeService.getProperty(nodeRef, PROP_READERS); @@ -258,10 +601,20 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor permissionService.clearPermission(nodeRef, ExtendedWriterDynamicAuthority.EXTENDED_WRITER); // if record then ... - if (nodeService.hasAspect(nodeRef, ASPECT_RECORD)) + if (nodeService.hasAspect(nodeRef, ASPECT_RECORD)) + { + Set readersKeySet = null; + if (readers != null) { + readersKeySet = readers.keySet(); + } + Set writersKeySet = null; + if (writers != null) + { + writersKeySet = writers.keySet(); + } // re-set extended security via API - extendedSecurityService.set(nodeRef, readers.keySet(), writers.keySet()); + extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet); } } } diff --git a/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM3993Test.java b/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM3993Test.java index 20c6dc3e4d..f8e8633db3 100644 --- a/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM3993Test.java +++ b/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM3993Test.java @@ -24,25 +24,6 @@ * along with Alfresco. If not, see . * #L% */ -/* - * Copyright (C) 2005-2014 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ - package org.alfresco.module.org_alfresco_module_rm.test.integration.issue; import java.io.Serializable; diff --git a/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM4101Test.java b/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM4101Test.java new file mode 100644 index 0000000000..df6a6561f5 --- /dev/null +++ b/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM4101Test.java @@ -0,0 +1,129 @@ +/* + * #%L + * Alfresco Records Management Module + * %% + * Copyright (C) 2005 - 2016 Alfresco Software Limited + * %% + * This file is part of the Alfresco software. + * - + * If the software was purchased under a paid Alfresco license, the terms of + * the paid license agreement will prevail. Otherwise, the software is + * provided under the following open source license terms: + * - + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * - + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * - + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + * #L% + */ +package org.alfresco.module.org_alfresco_module_rm.test.integration.issue; + +import java.util.UUID; + +import org.alfresco.module.org_alfresco_module_rm.action.impl.LinkToAction; +import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase; +import org.alfresco.service.cmr.action.Action; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.cmr.rule.Rule; +import org.alfresco.service.cmr.rule.RuleService; +import org.alfresco.service.cmr.rule.RuleType; + +/** + * Tests issue #4101: Link to, Copy to and File to rules fail when not run in background + * + * @author Tuna Aksoy + * @since 2.3.0.8 + */ +public class RM4101Test extends BaseRMTestCase +{ + private RuleService ruleService; + + @Override + protected void initServices() + { + super.initServices(); + + ruleService = (RuleService) applicationContext.getBean("RuleService"); + } + + @Override + protected boolean isRecordTest() + { + return true; + } + + public void testRunRuleNotInBackground() throws Exception + { + final String categoryName = "category1" + UUID.randomUUID().toString(); + final NodeRef category1 = doTestInTransaction(new Test() + { + @Override + public NodeRef run() + { + return filePlanService.createRecordCategory(filePlan, categoryName); + } + }); + + final NodeRef folder1 = doTestInTransaction(new Test() + { + @Override + public NodeRef run() + { + return recordFolderService.createRecordFolder(category1, "folder1WithRule" + UUID.randomUUID().toString()); + } + }); + + final String folder2Name = "folder2FolderToLinkTo" + UUID.randomUUID().toString(); + final NodeRef folder2 = doTestInTransaction(new Test() + { + @Override + public NodeRef run() + { + return recordFolderService.createRecordFolder(category1, folder2Name); + } + }); + + doTestInTransaction(new Test() + { + @Override + public Void run() + { + Action linkToAction = actionService.createAction(LinkToAction.NAME); + linkToAction.setParameterValue(LinkToAction.PARAM_PATH, "/" + categoryName + "/" + folder2Name); + + Rule rule = new Rule(); + rule.setRuleType(RuleType.INBOUND); + rule.setTitle("LinkTo"); + rule.setAction(linkToAction); + rule.setExecuteAsynchronously(false); + ruleService.saveRule(folder1, rule); + + return null; + } + }); + + doTestInTransaction(new Test() + { + @Override + public Void run() + { + utils.createRecord(folder1, "record1" + UUID.randomUUID().toString()); + return null; + } + + @Override + public void test(Void result) throws Exception + { + assertEquals(1, nodeService.getChildAssocs(folder2).size()); + } + }); + } +} diff --git a/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/InplaceRecordPermissionTest.java b/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/InplaceRecordPermissionTest.java index b16c1472c3..ef086d2bc8 100644 --- a/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/InplaceRecordPermissionTest.java +++ b/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/InplaceRecordPermissionTest.java @@ -500,8 +500,7 @@ public class InplaceRecordPermissionTest extends BaseRMTestCase * And it's metadata is maintained * Then the inplace users will no longer see the record */ - // FIXME: See RM-4095 - public void ztestDestroyedRecordInplacePermissions() + public void testDestroyedRecordInplacePermissions() { test() .given() diff --git a/rm-community/rm-community-repo/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java b/rm-community/rm-community-repo/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java index 189a6b54d0..3f3105f9b8 100644 --- a/rm-community/rm-community-repo/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java +++ b/rm-community/rm-community-repo/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java @@ -46,8 +46,8 @@ import org.json.JSONObject; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.springframework.extensions.surf.util.Content; +import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Container; -import org.springframework.extensions.webscripts.DeclarativeWebScript; import org.springframework.extensions.webscripts.Description; import org.springframework.extensions.webscripts.Description.RequiredCache; import org.springframework.extensions.webscripts.DescriptionExtension; @@ -83,7 +83,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest /** * @return declarative webscript */ - protected abstract DeclarativeWebScript getWebScript(); + protected abstract AbstractWebScript getWebScript(); /** * @return classpath location of webscript template @@ -151,7 +151,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest */ protected String executeWebScript(Map parameters, String content) throws Exception { - DeclarativeWebScript webScript = getWebScript(); + AbstractWebScript webScript = getWebScript(); String template = getWebScriptTemplate(); // initialise webscript @@ -173,7 +173,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest * @return {@link WebScriptRequest} mocked web script request */ @SuppressWarnings("rawtypes") - protected WebScriptRequest getMockedWebScriptRequest(DeclarativeWebScript webScript, final Map parameters, String content) throws Exception + protected WebScriptRequest getMockedWebScriptRequest(AbstractWebScript webScript, final Map parameters, String content) throws Exception { Match match = new Match(null, parameters, null, webScript); org.springframework.extensions.webscripts.Runtime mockedRuntime = mock(org.springframework.extensions.webscripts.Runtime.class); diff --git a/rm-community/rm-community-repo/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java b/rm-community/rm-community-repo/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java index 70304fb172..4f903a2b75 100644 --- a/rm-community/rm-community-repo/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java +++ b/rm-community/rm-community-repo/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java @@ -49,17 +49,22 @@ import static java.util.Collections.emptyMap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyLong; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import java.io.File; import java.io.Serializable; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -70,6 +75,7 @@ import java.util.stream.Stream; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; +import org.alfresco.model.ContentModel; import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; import org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService; @@ -81,6 +87,9 @@ import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.transaction.RetryingTransactionHelper; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import org.alfresco.repo.web.scripts.content.ContentStreamer; +import org.alfresco.service.cmr.model.FileFolderService; +import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.security.PermissionService; @@ -90,12 +99,17 @@ import org.alfresco.util.Pair; import org.json.JSONObject; import org.junit.Before; import org.junit.Test; +import org.mockito.ArgumentCaptor; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import org.springframework.extensions.webscripts.DeclarativeWebScript; +import org.springframework.extensions.webscripts.AbstractWebScript; +import org.springframework.extensions.webscripts.Status; +import org.springframework.extensions.webscripts.WebScriptException; +import org.springframework.extensions.webscripts.WebScriptRequest; +import org.springframework.extensions.webscripts.WebScriptResponse; /** * DynamicAuthoritiesGet Unit Test @@ -126,13 +140,17 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme private TransactionService mockedTransactionService; @Mock private RetryingTransactionHelper mockedRetryingTransactionHelper; + @Mock + private ContentStreamer contentStreamer; + @Mock + private FileFolderService mockedFileFolderService; /** test component */ @InjectMocks private DynamicAuthoritiesGet webScript; @Override - protected DeclarativeWebScript getWebScript() + protected AbstractWebScript getWebScript() { return webScript; } @@ -154,6 +172,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme webScript.setNodeService(mockedNodeService); webScript.setPermissionService(mockedPermissionService); webScript.setExtendedSecurityService(mockedExtendedSecurityService); + webScript.setFileFolderService(mockedFileFolderService); // setup retrying transaction helper Answer doInTransactionAnswer = new Answer() { @@ -179,7 +198,9 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme } /** - * Given that there are no nodes with the extended security aspect When the action is executed Nothing happens + * Given that there are no nodes with the extended security aspect + * When the action is executed Nothing happens + * * @throws Exception */ @SuppressWarnings({ "unchecked" }) @@ -200,7 +221,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 0 records.\"}"; assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_READERS)); verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); verify(mockedNodeService, never()).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); @@ -212,8 +232,12 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme } /** - * Given that there are records with the extended security aspect When the action is executed Then the aspect is - * removed And the dynamic authorities permissions are cleared And extended security is set via the updated API + * Given that there are records with the extended security aspect + * When the action is executed + * Then the aspect is removed + * And the dynamic authorities permissions are cleared + * And extended security is set via the updated API + * * @throws Exception */ @SuppressWarnings("unchecked") @@ -222,8 +246,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -234,7 +257,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme .thenReturn((Serializable) Collections.emptyMap()); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) .thenReturn((Serializable) Collections.emptyMap()); - }); // Set up parameters. @@ -246,7 +268,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); @@ -259,8 +280,10 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme } /** - * Given that there are non-records with the extended security aspect When the web script is executed Then the aspect is - * removed And the dynamic authorities permissions are cleared + * Given that there are non-records with the extended security aspect + * When the web script is executed + * Then the aspect is removed And the dynamic authorities permissions are cleared + * * @throws Exception */ @SuppressWarnings("unchecked") @@ -269,8 +292,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -281,7 +303,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme .thenReturn((Serializable) Collections.emptyMap()); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) .thenReturn((Serializable) Collections.emptyMap()); - }); // Set up parameters. @@ -307,39 +328,50 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme @Test public void missingBatchSizeParameter() throws Exception { - JSONObject json = executeJSONWebScript(emptyMap()); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"failed\",\"message\":\"Parameter batchsize is mandatory\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + try + { + executeJSONWebScript(emptyMap()); + fail("Expected exception as parameter batchsize is mandatory."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test public void invalidBatchSizeParameter() throws Exception { + try + { // Set up parameters. Map parameters = ImmutableMap.of("batchsize", "dd"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"failed\",\"message\":\"Parameter batchsize is invalid.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is invalid."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test public void batchSizeShouldBeGraterThanZero() throws Exception { - when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(null); + try + { // Set up parameters. Map parameters = ImmutableMap.of("batchsize", "0"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"failed\",\"message\":\"Parameter batchsize should be a number greater than 0.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is not a number greater than 0."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test @@ -361,8 +393,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l,4l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -373,7 +404,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme .thenReturn((Serializable) Collections.emptyMap()); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) .thenReturn((Serializable) Collections.emptyMap()); - }); // Set up parameters. @@ -391,8 +421,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -403,7 +432,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme .thenReturn((Serializable) Collections.emptyMap()); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) .thenReturn((Serializable) Collections.emptyMap()); - }); // Set up parameters. @@ -415,4 +443,302 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}"; assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)).thenReturn(null); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); + ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), + writersKeysCaptor.capture()); + List allReaderKeySets = readerKeysCaptor.getAllValues(); + List allWritersKeySets = writersKeysCaptor.getAllValues(); + for (Set keySet : allReaderKeySets) + { + assertNull(keySet); + } + for (Set keySet : allWritersKeySets) + { + assertNull(keySet); + } + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void recordsWithExtendedSecurityAspectAndNullWriters() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); + ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), + writersKeysCaptor.capture()); + List allReaderKeySets = readerKeysCaptor.getAllValues(); + List allWritersKeySets = writersKeysCaptor.getAllValues(); + for (Set keySet : allReaderKeySets) + { + assertNotNull(keySet); + } + for (Set keySet : allWritersKeySets) + { + assertNull(keySet); + } + } + + /** + * Given I have records that require migration + * And I am interested in knowning which records are migrated + * When I run the migration tool + * Then I will be returned a CSV file containing the name and node reference of the record migrated + * + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void processWithCSVFile() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + }); + + ArgumentCaptor csvFileCaptor = ArgumentCaptor.forClass(File.class); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", + "true"); + executeWebScript(parameters); + + verify(contentStreamer, times(1)).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + csvFileCaptor.capture(), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + + File fileForDownload = csvFileCaptor.getValue(); + assertNotNull(fileForDownload); + } + + /** + * Given that I have record that require migration + * And I'm not interested in knowing which records were migrated + * When I run the migration tool + * Then I will not be returned a CSV file of details. + * + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void processedWithouthCSVFile() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + }); + + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", + "false"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } + + @Test + public void invalidParentNodeRefParameter() throws Exception + { + try + { + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", "invalidNodeRef"); + executeJSONWebScript(parameters); + fail("Expected exception as parameter parentNodeRef is invalid."); + } + catch (WebScriptException e) + { + assertEquals("If parameter parentNodeRef is invalid then 'Internal server error' should be returned.", + Status.STATUS_INTERNAL_SERVER_ERROR, e.getStatus()); + } + } + + @Test + public void inexistentParentNodeRefParameter() throws Exception + { + try + { + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.exists(parentNodeRef)).thenReturn(false); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", + parentNodeRef.toString()); + executeJSONWebScript(parameters); + fail("Expected exception as parameter parentNodeRef does not exist."); + } + catch (WebScriptException e) + { + assertEquals("If parameter parentNodeRef is does not exist then 'Bad Reequest' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } + } + + @SuppressWarnings("unchecked") + @Test + public void processedWithParentNodeRef() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + List children = new ArrayList(); + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + FileInfo mockedFileInfo = mock(FileInfo.class); + when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); + children.add(mockedFileInfo); + }); + when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) + .thenReturn(children); + + Map parameters = ImmutableMap.of("batchsize", "3", "maxProcessedRecords", "4", "export", + "false", "parentNodeRef", parentNodeRef.toString()); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } + + @SuppressWarnings("unchecked") + @Test + public void processedWithParentNodeRefWithFirstTwoBatchesAlreadyProcessed() throws Exception + { + List ids = Stream.of(1l, 2l, 3l, 4l, 5l, 6l, 7l, 8l).collect(Collectors.toList()); + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + List children = new ArrayList(); + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + if (i <= 6l) + { + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(false); + } + else + { + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); + } + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + FileInfo mockedFileInfo = mock(FileInfo.class); + when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); + children.add(mockedFileInfo); + }); + when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) + .thenReturn(children); + + Map parameters = ImmutableMap.of("batchsize", "3", "parentNodeRef", parentNodeRef.toString()); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 2 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } } \ No newline at end of file