diff --git a/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml b/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml index fedd555546..d4f8b73a05 100644 --- a/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml +++ b/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml @@ -598,6 +598,8 @@ + + diff --git a/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml b/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml index 00a2cad9e1..6cf7174dff 100644 --- a/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml +++ b/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml @@ -2,11 +2,13 @@ Removes dynamic authorities - URL parameter batchsize is mandatory, and represents the number of records that are processed in one transaction.
+ URL parameter batchsize is mandatory, and represents the maximum number of records that can be processed in one transaction.
URL parameter maxProcessedRecords is optional, and represents the maximum number of records that will be processed in one request.
+ URL parameter export is optional, and if the it's value is true, will export the processed records into a csv file.
+ URL parameter parentNodeRef is optional, and represents the nodeRef of the folder that contains the records to be processed.
]]>
- /api/rm/rm-dynamicauthorities?batchsize={batchsize}&maxProcessedRecords={maxProcessedRecords?} + /api/rm/rm-dynamicauthorities?batchsize={batchsize}&maxProcessedRecords={maxProcessedRecords?}&export={export?}&parentNodeRef={parentNodeRef?} argument admin required diff --git a/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java b/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java index 7dd4c29056..97f22265a3 100644 --- a/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java +++ b/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java @@ -18,12 +18,22 @@ */ package org.alfresco.repo.web.scripts.roles; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.io.Writer; import java.text.MessageFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import javax.servlet.http.HttpServletResponse; + import org.alfresco.model.ContentModel; import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; @@ -33,19 +43,26 @@ import org.alfresco.repo.domain.node.NodeDAO; import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import org.alfresco.repo.web.scripts.content.ContentStreamer; +import org.alfresco.service.cmr.model.FileFolderService; +import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.security.PermissionService; import org.alfresco.service.namespace.QName; import org.alfresco.service.transaction.TransactionService; import org.alfresco.util.Pair; +import org.alfresco.util.TempFileProvider; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Cache; -import org.springframework.extensions.webscripts.DeclarativeWebScript; +import org.springframework.extensions.webscripts.Format; import org.springframework.extensions.webscripts.Status; +import org.springframework.extensions.webscripts.WebScriptException; import org.springframework.extensions.webscripts.WebScriptRequest; +import org.springframework.extensions.webscripts.WebScriptResponse; /** * Webscript used for removing dynamic authorities from the records. @@ -54,7 +71,7 @@ import org.springframework.extensions.webscripts.WebScriptRequest; * @since 2.3.0.7 */ @SuppressWarnings("deprecation") -public class DynamicAuthoritiesGet extends DeclarativeWebScript implements RecordsManagementModel +public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel { private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0."; private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN"; @@ -63,14 +80,16 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor private static final String MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE = "Processing record {0} - BEGIN"; private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid."; private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory"; + private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist."; private static final String SUCCESS_STATUS = "success"; - private static final String FAILED_STATUS = "failed"; /** * The logger */ private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class); private static final String BATCH_SIZE = "batchsize"; private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords"; + private static final String PARAM_EXPORT = "export"; + private static final String PARAM_PARENT_NODE_REF = "parentNodeRef"; private static final String MODEL_STATUS = "responsestatus"; private static final String MODEL_MESSAGE = "message"; private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records."; @@ -86,50 +105,60 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor private PermissionService permissionService; private ExtendedSecurityService extendedSecurityService; private TransactionService transactionService; + /** Content Streamer */ + protected ContentStreamer contentStreamer; + private FileFolderService fileFolderService; + + /** service setters */ + public void setPatchDAO(PatchDAO patchDAO) + { + this.patchDAO = patchDAO; + } - /** service setters */ - public void setPatchDAO(PatchDAO patchDAO) { this.patchDAO = patchDAO; } - public void setNodeDAO(NodeDAO nodeDAO) { this.nodeDAO = nodeDAO; } - public void setQnameDAO(QNameDAO qnameDAO) { this.qnameDAO = qnameDAO; } - public void setNodeService(NodeService nodeService) { this.nodeService = nodeService; } - public void setPermissionService(PermissionService permissionService) { this.permissionService = permissionService; } - public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) { this.extendedSecurityService = extendedSecurityService; } - public void setTransactionService(TransactionService transactionService) { this.transactionService = transactionService; } + public void setNodeDAO(NodeDAO nodeDAO) + { + this.nodeDAO = nodeDAO; + } - @Override - protected Map executeImpl(WebScriptRequest req, Status status, Cache cache) + public void setQnameDAO(QNameDAO qnameDAO) { - Map model = new HashMap(); - String batchSizeStr = req.getParameter(BATCH_SIZE); - String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); + this.qnameDAO = qnameDAO; + } - Long size = 0L; - if (StringUtils.isBlank(batchSizeStr)) - { - model.put(MODEL_STATUS, FAILED_STATUS); - model.put(MODEL_MESSAGE, MESSAGE_BATCHSIZE_IS_MANDATORY); - logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY); - return model; - } - try - { - size = Long.parseLong(batchSizeStr); - if(size <= 0) - { - model.put(MODEL_STATUS, FAILED_STATUS); - model.put(MODEL_MESSAGE, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); - logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); - return model; - } - } - catch(NumberFormatException ex) - { - model.put(MODEL_STATUS, FAILED_STATUS); - model.put(MODEL_MESSAGE, MESSAGE_BATCHSIZE_IS_INVALID); - logger.info(MESSAGE_BATCHSIZE_IS_INVALID); - return model; - } - final Long batchSize = size; + public void setNodeService(NodeService nodeService) + { + this.nodeService = nodeService; + } + + public void setPermissionService(PermissionService permissionService) + { + this.permissionService = permissionService; + } + + public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) + { + this.extendedSecurityService = extendedSecurityService; + } + + public void setTransactionService(TransactionService transactionService) + { + this.transactionService = transactionService; + } + + public void setContentStreamer(ContentStreamer contentStreamer) + { + this.contentStreamer = contentStreamer; + } + + public void setFileFolderService(FileFolderService fileFolderService) + { + this.fileFolderService = fileFolderService; + } + + protected Map buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException + { + Map model = new HashMap(); + final Long batchSize = getBatchSizeParameter(req); // get the max node id and the extended security aspect Long maxNodeId = patchDAO.getMaxAdmNodeID(); final Pair recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY); @@ -141,6 +170,201 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor return model; } + Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize); + + boolean attach = getExportParameter(req); + + File file = TempFileProvider.createTempFile("processedNodes_", ".csv"); + FileWriter writer = new FileWriter(file); + BufferedWriter out = new BufferedWriter(writer); + List processedNodes = new ArrayList(); + try + { + NodeRef parentNodeRef = getParentNodeRefParameter(req); + if (parentNodeRef != null) + { + processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair, + totalNumberOfRecordsToProcess.intValue(), out, attach); + } + else + { + processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess, + out, attach); + } + } + finally + { + out.close(); + } + + int processedNodesSize = processedNodes.size(); + + String message = ""; + if (totalNumberOfRecordsToProcess == 0 + || (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess)) + { + message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize); + } + if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize) + { + message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess); + } + model.put(MODEL_STATUS, SUCCESS_STATUS); + model.put(MODEL_MESSAGE, message); + logger.info(message); + + if (attach) + { + try + { + String fileName = file.getName(); + contentStreamer.streamContent(req, res, file, null, attach, fileName, model); + model = null; + } + finally + { + if (file != null) + { + file.delete(); + } + } + } + return model; + } + + /** + * Get export parameter from the request + * + * @param req + * @return + */ + protected boolean getExportParameter(WebScriptRequest req) + { + boolean attach = false; + String export = req.getParameter(PARAM_EXPORT); + if (export != null && Boolean.parseBoolean(export)) + { + attach = true; + } + return attach; + } + + /* + * (non-Javadoc) + * @see org.alfresco.repo.web.scripts.content.StreamContent#execute(org.springframework.extensions.webscripts. + * WebScriptRequest, org.springframework.extensions.webscripts.WebScriptResponse) + */ + @Override + public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException + { + // retrieve requested format + String format = req.getFormat(); + + try + { + String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format); + if (mimetype == null) + { + throw new WebScriptException("Web Script format '" + format + "' is not registered"); + } + + // construct model for script / template + Status status = new Status(); + Cache cache = new Cache(getDescription().getRequiredCache()); + Map model = buildModel(req, res); + if (model == null) { return; } + model.put("status", status); + model.put("cache", cache); + + Map templateModel = createTemplateParameters(req, res, model); + + // render output + int statusCode = status.getCode(); + if (statusCode != HttpServletResponse.SC_OK && !req.forceSuccessStatus()) + { + if (logger.isDebugEnabled()) + { + logger.debug("Force success status header in response: " + req.forceSuccessStatus()); + logger.debug("Setting status " + statusCode); + } + res.setStatus(statusCode); + } + + // apply location + String location = status.getLocation(); + if (location != null && location.length() > 0) + { + if (logger.isDebugEnabled()) logger.debug("Setting location to " + location); + res.setHeader(WebScriptResponse.HEADER_LOCATION, location); + } + + // apply cache + res.setCache(cache); + + String callback = null; + if (getContainer().allowCallbacks()) + { + callback = req.getJSONCallback(); + } + if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) + { + if (logger.isDebugEnabled()) logger.debug("Rendering JSON callback response: content type=" + + Format.JAVASCRIPT.mimetype() + ", status=" + statusCode + ", callback=" + callback); + + // NOTE: special case for wrapping JSON results in a javascript function callback + res.setContentType(Format.JAVASCRIPT.mimetype() + ";charset=UTF-8"); + res.getWriter().write((callback + "(")); + } + else + { + if (logger.isDebugEnabled()) + logger.debug("Rendering response: content type=" + mimetype + ", status=" + statusCode); + + res.setContentType(mimetype + ";charset=UTF-8"); + } + + // render response according to requested format + renderFormatTemplate(format, templateModel, res.getWriter()); + + if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) + { + // NOTE: special case for wrapping JSON results in a javascript function callback + res.getWriter().write(")"); + } + } + catch (Throwable e) + { + if (logger.isDebugEnabled()) + { + StringWriter stack = new StringWriter(); + e.printStackTrace(new PrintWriter(stack)); + logger.debug("Caught exception; decorating with appropriate status template : " + stack.toString()); + } + + throw createStatusException(e, req, res); + } + } + + protected void renderFormatTemplate(String format, Map model, Writer writer) + { + format = (format == null) ? "" : format; + + String templatePath = getDescription().getId() + "." + format; + + if (logger.isDebugEnabled()) logger.debug("Rendering template '" + templatePath + "'"); + + renderTemplate(templatePath, model, writer); + } + + /** + * Obtain maximum of the records to be processed from the request if it is specified or bachsize value otherwise + * + * @param req + * @return maximum of the records to be processed from the request if it is specified or bachsize value otherwise + */ + protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize) + { + String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); //default total number of records to be processed to batch size value Long totalNumberOfRecordsToProcess = batchSize; if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr)) @@ -154,7 +378,77 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor //do nothing here, the value will remain 0L in this case } } + return totalNumberOfRecordsToProcess; + } + + /** + * Obtain batchsize parameter from the request. + * + * @param req + * @return batchsize parameter from the request + */ + protected Long getBatchSizeParameter(WebScriptRequest req) + { + String batchSizeStr = req.getParameter(BATCH_SIZE); + Long size = 0L; + if (StringUtils.isBlank(batchSizeStr)) + { + logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_MANDATORY); + } + try + { + size = Long.parseLong(batchSizeStr); + if (size <= 0) + { + logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); + } + } + catch (NumberFormatException ex) + { + logger.info(MESSAGE_BATCHSIZE_IS_INVALID); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_INVALID); + } + return size; + } + /** + * Get parentNodeRef parameter from the request + * + * @param req + * @return + */ + protected NodeRef getParentNodeRefParameter(WebScriptRequest req) + { + String parentNodeRefStr = req.getParameter(PARAM_PARENT_NODE_REF); + NodeRef parentNodeRef = null; + if (StringUtils.isNotBlank(parentNodeRefStr)) + { + parentNodeRef = new NodeRef(parentNodeRefStr); + if(!nodeService.exists(parentNodeRef)) + { + String message = MessageFormat.format(MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE, parentNodeRef.toString()); + logger.info(message); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, message); + } + } + return parentNodeRef; + } + + /** + * Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess + * parameters + * + * @param batchSize + * @param maxNodeId + * @param recordAspectPair + * @param totalNumberOfRecordsToProcess + * @return the list of processed nodes + */ + protected List processNodes(final Long batchSize, Long maxNodeId, final Pair recordAspectPair, + Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach) + { final Long maxRecordsToProcess = totalNumberOfRecordsToProcess; final List processedNodes = new ArrayList(); logger.info(MESSAGE_PROCESSING_BEGIN); @@ -172,7 +466,8 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor public Void execute() throws Throwable { // get the nodes with the extended security aspect applied - List nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, currentIndex + batchSize); + List nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, + currentIndex + batchSize); // process each one for (Long nodeId : nodeIds) @@ -187,38 +482,86 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor processNode(record); logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); processedNodes.add(record); - } + if (attach) + { + out.write(recordName); + out.write(","); + out.write(record.toString()); + out.write("\n"); + } + } return null; } - }, - false, // read only + }, false, // read only true); // requires new } logger.info(MESSAGE_PROCESSING_END); - int processedNodesSize = processedNodes.size(); - String message = ""; - if(totalNumberOfRecordsToProcess == 0 || (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess)) - { - message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize); - } - if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize) - { - message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess); - } - model.put(MODEL_STATUS, SUCCESS_STATUS); - model.put(MODEL_MESSAGE, message); - logger.info(message); - return model; + return processedNodes; } + protected List processChildrenNodes(NodeRef parentNodeRef, final int batchSize, + final Pair recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out, + final boolean attach) + { + final List processedNodes = new ArrayList(); + final List children = fileFolderService.search(parentNodeRef, "*", /*filesSearch*/true, /*folderSearch*/true, /*includeSubfolders*/true); + logger.info(MESSAGE_PROCESSING_BEGIN); + // by batch size + for (int i = 0; i < children.size(); i += batchSize) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + final int currentIndex = i; + + transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback() + { + public Void execute() throws Throwable + { + List nodes = children.subList(currentIndex, Math.min(currentIndex + batchSize, children.size())); + // process each one + for (FileInfo node : nodes) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + NodeRef record = node.getNodeRef(); + if (nodeService.hasAspect(record, recordAspectPair.getSecond())) + { + String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName)); + processNode(record); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); + processedNodes.add(record); + if (attach) + { + out.write(recordName); + out.write(","); + out.write(record.toString()); + out.write("\n"); + } + } + } + + return null; + } + }, false, // read only + true); // requires new + } + logger.info(MESSAGE_PROCESSING_END); + return processedNodes; + } + /** * Process each node * * @param nodeRef */ @SuppressWarnings({ "unchecked"}) - private void processNode(NodeRef nodeRef) + protected void processNode(NodeRef nodeRef) { // get the reader/writer data Map readers = (Map)nodeService.getProperty(nodeRef, PROP_READERS); @@ -232,10 +575,20 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor permissionService.clearPermission(nodeRef, ExtendedWriterDynamicAuthority.EXTENDED_WRITER); // if record then ... - if (nodeService.hasAspect(nodeRef, ASPECT_RECORD)) + if (nodeService.hasAspect(nodeRef, ASPECT_RECORD)) + { + Set readersKeySet = null; + if (readers != null) { + readersKeySet = readers.keySet(); + } + Set writersKeySet = null; + if (writers != null) + { + writersKeySet = writers.keySet(); + } // re-set extended security via API - extendedSecurityService.set(nodeRef, readers.keySet(), writers.keySet()); + extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet); } } } diff --git a/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java b/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java index 807610699c..646fcc2572 100644 --- a/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java +++ b/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java @@ -31,8 +31,8 @@ import org.json.JSONObject; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.springframework.extensions.surf.util.Content; +import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Container; -import org.springframework.extensions.webscripts.DeclarativeWebScript; import org.springframework.extensions.webscripts.Description; import org.springframework.extensions.webscripts.Description.RequiredCache; import org.springframework.extensions.webscripts.DescriptionExtension; @@ -68,7 +68,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest /** * @return declarative webscript */ - protected abstract DeclarativeWebScript getWebScript(); + protected abstract AbstractWebScript getWebScript(); /** * @return classpath location of webscript template @@ -136,7 +136,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest */ protected String executeWebScript(Map parameters, String content) throws Exception { - DeclarativeWebScript webScript = getWebScript(); + AbstractWebScript webScript = getWebScript(); String template = getWebScriptTemplate(); // initialise webscript @@ -158,7 +158,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest * @return {@link WebScriptRequest} mocked web script request */ @SuppressWarnings("rawtypes") - protected WebScriptRequest getMockedWebScriptRequest(DeclarativeWebScript webScript, final Map parameters, String content) throws Exception + protected WebScriptRequest getMockedWebScriptRequest(AbstractWebScript webScript, final Map parameters, String content) throws Exception { Match match = new Match(null, parameters, null, webScript); org.springframework.extensions.webscripts.Runtime mockedRuntime = mock(org.springframework.extensions.webscripts.Runtime.class); diff --git a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java index d8a7818ae4..74baf0a4e1 100644 --- a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java +++ b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java @@ -23,17 +23,22 @@ import static java.util.Collections.emptyMap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyLong; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import java.io.File; import java.io.Serializable; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -44,6 +49,7 @@ import java.util.stream.Stream; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; +import org.alfresco.model.ContentModel; import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; import org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService; @@ -55,6 +61,9 @@ import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.transaction.RetryingTransactionHelper; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import org.alfresco.repo.web.scripts.content.ContentStreamer; +import org.alfresco.service.cmr.model.FileFolderService; +import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.security.PermissionService; @@ -64,12 +73,17 @@ import org.alfresco.util.Pair; import org.json.JSONObject; import org.junit.Before; import org.junit.Test; +import org.mockito.ArgumentCaptor; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import org.springframework.extensions.webscripts.DeclarativeWebScript; +import org.springframework.extensions.webscripts.AbstractWebScript; +import org.springframework.extensions.webscripts.Status; +import org.springframework.extensions.webscripts.WebScriptException; +import org.springframework.extensions.webscripts.WebScriptRequest; +import org.springframework.extensions.webscripts.WebScriptResponse; /** * DynamicAuthoritiesGet Unit Test @@ -100,13 +114,17 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme private TransactionService mockedTransactionService; @Mock private RetryingTransactionHelper mockedRetryingTransactionHelper; + @Mock + private ContentStreamer contentStreamer; + @Mock + private FileFolderService mockedFileFolderService; /** test component */ @InjectMocks private DynamicAuthoritiesGet webScript; @Override - protected DeclarativeWebScript getWebScript() + protected AbstractWebScript getWebScript() { return webScript; } @@ -128,6 +146,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme webScript.setNodeService(mockedNodeService); webScript.setPermissionService(mockedPermissionService); webScript.setExtendedSecurityService(mockedExtendedSecurityService); + webScript.setFileFolderService(mockedFileFolderService); // setup retrying transaction helper Answer doInTransactionAnswer = new Answer() { @@ -153,7 +172,9 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme } /** - * Given that there are no nodes with the extended security aspect When the action is executed Nothing happens + * Given that there are no nodes with the extended security aspect + * When the action is executed Nothing happens + * * @throws Exception */ @SuppressWarnings({ "unchecked" }) @@ -174,7 +195,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 0 records.\"}"; assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_READERS)); verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); verify(mockedNodeService, never()).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); @@ -186,8 +206,12 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme } /** - * Given that there are records with the extended security aspect When the action is executed Then the aspect is - * removed And the dynamic authorities permissions are cleared And extended security is set via the updated API + * Given that there are records with the extended security aspect + * When the action is executed + * Then the aspect is removed + * And the dynamic authorities permissions are cleared + * And extended security is set via the updated API + * * @throws Exception */ @SuppressWarnings("unchecked") @@ -196,8 +220,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -208,7 +231,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme .thenReturn((Serializable) Collections.emptyMap()); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) .thenReturn((Serializable) Collections.emptyMap()); - }); // Set up parameters. @@ -220,7 +242,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); @@ -233,8 +254,10 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme } /** - * Given that there are non-records with the extended security aspect When the web script is executed Then the aspect is - * removed And the dynamic authorities permissions are cleared + * Given that there are non-records with the extended security aspect + * When the web script is executed + * Then the aspect is removed And the dynamic authorities permissions are cleared + * * @throws Exception */ @SuppressWarnings("unchecked") @@ -243,8 +266,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -255,7 +277,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme .thenReturn((Serializable) Collections.emptyMap()); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) .thenReturn((Serializable) Collections.emptyMap()); - }); // Set up parameters. @@ -281,39 +302,50 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme @Test public void missingBatchSizeParameter() throws Exception { - JSONObject json = executeJSONWebScript(emptyMap()); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"failed\",\"message\":\"Parameter batchsize is mandatory\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + try + { + executeJSONWebScript(emptyMap()); + fail("Expected exception as parameter batchsize is mandatory."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test public void invalidBatchSizeParameter() throws Exception { + try + { // Set up parameters. Map parameters = ImmutableMap.of("batchsize", "dd"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"failed\",\"message\":\"Parameter batchsize is invalid.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is invalid."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test public void batchSizeShouldBeGraterThanZero() throws Exception { - when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(null); + try + { // Set up parameters. Map parameters = ImmutableMap.of("batchsize", "0"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"failed\",\"message\":\"Parameter batchsize should be a number greater than 0.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is not a number greater than 0."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test @@ -335,8 +367,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l,4l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -347,7 +378,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme .thenReturn((Serializable) Collections.emptyMap()); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) .thenReturn((Serializable) Collections.emptyMap()); - }); // Set up parameters. @@ -365,8 +395,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -377,7 +406,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme .thenReturn((Serializable) Collections.emptyMap()); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) .thenReturn((Serializable) Collections.emptyMap()); - }); // Set up parameters. @@ -389,4 +417,302 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}"; assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)).thenReturn(null); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); + ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), + writersKeysCaptor.capture()); + List allReaderKeySets = readerKeysCaptor.getAllValues(); + List allWritersKeySets = writersKeysCaptor.getAllValues(); + for (Set keySet : allReaderKeySets) + { + assertNull(keySet); + } + for (Set keySet : allWritersKeySets) + { + assertNull(keySet); + } + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void recordsWithExtendedSecurityAspectAndNullWriters() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); + ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), + writersKeysCaptor.capture()); + List allReaderKeySets = readerKeysCaptor.getAllValues(); + List allWritersKeySets = writersKeysCaptor.getAllValues(); + for (Set keySet : allReaderKeySets) + { + assertNotNull(keySet); + } + for (Set keySet : allWritersKeySets) + { + assertNull(keySet); + } + } + + /** + * Given I have records that require migration + * And I am interested in knowning which records are migrated + * When I run the migration tool + * Then I will be returned a CSV file containing the name and node reference of the record migrated + * + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void processWithCSVFile() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + }); + + ArgumentCaptor csvFileCaptor = ArgumentCaptor.forClass(File.class); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", + "true"); + executeWebScript(parameters); + + verify(contentStreamer, times(1)).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + csvFileCaptor.capture(), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + + File fileForDownload = csvFileCaptor.getValue(); + assertNotNull(fileForDownload); + } + + /** + * Given that I have record that require migration + * And I'm not interested in knowing which records were migrated + * When I run the migration tool + * Then I will not be returned a CSV file of details. + * + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void processedWithouthCSVFile() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + }); + + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", + "false"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } + + @Test + public void invalidParentNodeRefParameter() throws Exception + { + try + { + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", "invalidNodeRef"); + executeJSONWebScript(parameters); + fail("Expected exception as parameter parentNodeRef is invalid."); + } + catch (WebScriptException e) + { + assertEquals("If parameter parentNodeRef is invalid then 'Internal server error' should be returned.", + Status.STATUS_INTERNAL_SERVER_ERROR, e.getStatus()); + } + } + + @Test + public void inexistentParentNodeRefParameter() throws Exception + { + try + { + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.exists(parentNodeRef)).thenReturn(false); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", + parentNodeRef.toString()); + executeJSONWebScript(parameters); + fail("Expected exception as parameter parentNodeRef does not exist."); + } + catch (WebScriptException e) + { + assertEquals("If parameter parentNodeRef is does not exist then 'Bad Reequest' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } + } + + @SuppressWarnings("unchecked") + @Test + public void processedWithParentNodeRef() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + List children = new ArrayList(); + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + FileInfo mockedFileInfo = mock(FileInfo.class); + when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); + children.add(mockedFileInfo); + }); + when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) + .thenReturn(children); + + Map parameters = ImmutableMap.of("batchsize", "3", "maxProcessedRecords", "4", "export", + "false", "parentNodeRef", parentNodeRef.toString()); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } + + @SuppressWarnings("unchecked") + @Test + public void processedWithParentNodeRefWithFirstTwoBatchesAlreadyProcessed() throws Exception + { + List ids = Stream.of(1l, 2l, 3l, 4l, 5l, 6l, 7l, 8l).collect(Collectors.toList()); + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + List children = new ArrayList(); + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + if (i <= 6l) + { + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(false); + } + else + { + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); + } + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + FileInfo mockedFileInfo = mock(FileInfo.class); + when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); + children.add(mockedFileInfo); + }); + when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) + .thenReturn(children); + + Map parameters = ImmutableMap.of("batchsize", "3", "parentNodeRef", parentNodeRef.toString()); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 2 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } } \ No newline at end of file