From 577173c2b69ed8426a90c10b5744743eea501fc5 Mon Sep 17 00:00:00 2001 From: Silviu Dinuta Date: Mon, 26 Sep 2016 17:32:53 +0300 Subject: [PATCH 01/28] RM-4097: added fix for both issues --- .../rm-webscript-context.xml | 1 + .../roles/rm-dynamicauthorities.get.desc.xml | 3 +- .../scripts/roles/DynamicAuthoritiesGet.java | 450 ++++++++++++++---- .../test/util/BaseWebScriptUnitTest.java | 10 +- .../roles/DynamicAuthoritiesGetUnitTest.java | 109 ++++- 5 files changed, 439 insertions(+), 134 deletions(-) diff --git a/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml b/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml index da41dd3d0e..bf87988d70 100644 --- a/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml +++ b/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml @@ -596,6 +596,7 @@ + diff --git a/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml b/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml index bd4aee74a0..49b7ca51a0 100644 --- a/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml +++ b/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml @@ -4,9 +4,10 @@ Removes dynamic authorities from in place records created in previous verssions.
URL parameter batchsize is mandatory, and represents the number of records that are processed in one transaction.
URL parameter maxProcessedRecords is optional, and represents the maximum number of records that will be processed in one request.
+ URL parameter export is optional, and if the it's value is true, will export the processed records into a csv file.
]]> - /api/rm/rm-dynamicauthorities?batchsize={batchsize}&maxProcessedRecords={maxProcessedRecords?} + /api/rm/rm-dynamicauthorities?batchsize={batchsize}&maxProcessedRecords={maxProcessedRecords?}&export={export?} argument admin required diff --git a/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java b/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java index b023dcf45e..c7da809e5c 100644 --- a/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java +++ b/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java @@ -16,14 +16,24 @@ * You should have received a copy of the GNU Lesser General Public License * along with Alfresco. If not, see . */ + package org.alfresco.repo.web.scripts.roles; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.io.Writer; import java.text.MessageFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import javax.servlet.http.HttpServletResponse; + import org.alfresco.model.ContentModel; import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; @@ -33,19 +43,24 @@ import org.alfresco.repo.domain.node.NodeDAO; import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import org.alfresco.repo.web.scripts.content.ContentStreamer; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.security.PermissionService; import org.alfresco.service.namespace.QName; import org.alfresco.service.transaction.TransactionService; import org.alfresco.util.Pair; +import org.alfresco.util.TempFileProvider; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Cache; -import org.springframework.extensions.webscripts.DeclarativeWebScript; +import org.springframework.extensions.webscripts.Format; import org.springframework.extensions.webscripts.Status; +import org.springframework.extensions.webscripts.WebScriptException; import org.springframework.extensions.webscripts.WebScriptRequest; +import org.springframework.extensions.webscripts.WebScriptResponse; /** * Webscript used for removing dynamic authorities from the records. @@ -54,7 +69,7 @@ import org.springframework.extensions.webscripts.WebScriptRequest; * @since 2.3.0.7 */ @SuppressWarnings("deprecation") -public class DynamicAuthoritiesGet extends DeclarativeWebScript implements RecordsManagementModel +public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel { private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0."; private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN"; @@ -64,20 +79,19 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid."; private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory"; private static final String SUCCESS_STATUS = "success"; - private static final String FAILED_STATUS = "failed"; /** * The logger */ private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class); private static final String BATCH_SIZE = "batchsize"; private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords"; + private static final String PARAM_EXPORT = "export"; private static final String MODEL_STATUS = "responsestatus"; private static final String MODEL_MESSAGE = "message"; private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records."; private static final String MESSAGE_PARTIAL_TEMPLATE = "Processed first {0} records."; private static final String MESSAGE_NO_RECORDS_TO_PROCESS = "There where no records to be processed."; - /** services */ private PatchDAO patchDAO; private NodeDAO nodeDAO; @@ -86,54 +100,57 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor private PermissionService permissionService; private ExtendedSecurityService extendedSecurityService; private TransactionService transactionService; - + /** Content Streamer */ + protected ContentStreamer contentStreamer; /** service setters */ - public void setPatchDAO(PatchDAO patchDAO) { this.patchDAO = patchDAO; } - public void setNodeDAO(NodeDAO nodeDAO) { this.nodeDAO = nodeDAO; } - public void setQnameDAO(QNameDAO qnameDAO) { this.qnameDAO = qnameDAO; } - public void setNodeService(NodeService nodeService) { this.nodeService = nodeService; } - public void setPermissionService(PermissionService permissionService) { this.permissionService = permissionService; } - public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) { this.extendedSecurityService = extendedSecurityService; } - public void setTransactionService(TransactionService transactionService) { this.transactionService = transactionService; } + public void setPatchDAO(PatchDAO patchDAO) + { + this.patchDAO = patchDAO; + } - @Override - protected Map executeImpl(WebScriptRequest req, Status status, Cache cache) + public void setNodeDAO(NodeDAO nodeDAO) + { + this.nodeDAO = nodeDAO; + } + + public void setQnameDAO(QNameDAO qnameDAO) + { + this.qnameDAO = qnameDAO; + } + + public void setNodeService(NodeService nodeService) + { + this.nodeService = nodeService; + } + + public void setPermissionService(PermissionService permissionService) + { + this.permissionService = permissionService; + } + + public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) + { + this.extendedSecurityService = extendedSecurityService; + } + + public void setTransactionService(TransactionService transactionService) + { + this.transactionService = transactionService; + } + + public void setContentStreamer(ContentStreamer contentStreamer) + { + this.contentStreamer = contentStreamer; + } + + protected Map buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException { Map model = new HashMap(); - String batchSizeStr = req.getParameter(BATCH_SIZE); - String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); - - Long size = 0L; - if (StringUtils.isBlank(batchSizeStr)) - { - model.put(MODEL_STATUS, FAILED_STATUS); - model.put(MODEL_MESSAGE, MESSAGE_BATCHSIZE_IS_MANDATORY); - logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY); - return model; - } - try - { - size = Long.parseLong(batchSizeStr); - if(size <= 0) - { - model.put(MODEL_STATUS, FAILED_STATUS); - model.put(MODEL_MESSAGE, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); - logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); - return model; - } - } - catch(NumberFormatException ex) - { - model.put(MODEL_STATUS, FAILED_STATUS); - model.put(MODEL_MESSAGE, MESSAGE_BATCHSIZE_IS_INVALID); - logger.info(MESSAGE_BATCHSIZE_IS_INVALID); - return model; - } - final Long batchSize = size; + final Long batchSize = getBatchSizeParameter(req); // get the max node id and the extended security aspect Long maxNodeId = patchDAO.getMaxAdmNodeID(); final Pair recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY); - if(recordAspectPair == null) + if (recordAspectPair == null) { model.put(MODEL_STATUS, SUCCESS_STATUS); model.put(MODEL_MESSAGE, MESSAGE_NO_RECORDS_TO_PROCESS); @@ -141,64 +158,29 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor return model; } - //default total number of records to be processed to batch size value - Long totalNumberOfRecordsToProcess = batchSize; - if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr)) + Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize); + + boolean attach = getExportParameter(req); + + File file = TempFileProvider.createTempFile("processedNodes_", ".csv"); + FileWriter writer = new FileWriter(file); + BufferedWriter out = new BufferedWriter(writer); + List processedNodes = new ArrayList(); + try { - try - { - totalNumberOfRecordsToProcess = Long.parseLong(totalToBeProcessedRecordsStr); - } - catch(NumberFormatException ex) - { - //do nothing here, the value will remain 0L in this case - } + processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess, out, + attach); + } + finally + { + out.close(); } - final Long maxRecordsToProcess = totalNumberOfRecordsToProcess; - final List processedNodes = new ArrayList(); - logger.info(MESSAGE_PROCESSING_BEGIN); - // by batch size - for (Long i = 0L; i < maxNodeId; i+=batchSize) - { - if(maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) - { - break; - } - final Long currentIndex = i; - - transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback() - { - public Void execute() throws Throwable - { - // get the nodes with the extended security aspect applied - List nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, currentIndex + batchSize); - - // process each one - for (Long nodeId : nodeIds) - { - if(maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) - { - break; - } - NodeRef record = nodeDAO.getNodePair(nodeId).getSecond(); - String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME); - logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName)); - processNode(record); - logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); - processedNodes.add(record); - } - - return null; - } - }, - false, // read only - true); // requires new - } - logger.info(MESSAGE_PROCESSING_END); int processedNodesSize = processedNodes.size(); + String message = ""; - if(totalNumberOfRecordsToProcess == 0 || (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess)) + if (totalNumberOfRecordsToProcess == 0 + || (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess)) { message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize); } @@ -209,20 +191,284 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor model.put(MODEL_STATUS, SUCCESS_STATUS); model.put(MODEL_MESSAGE, message); logger.info(message); + + if (attach) + { + try + { + String fileName = file.getName(); + contentStreamer.streamContent(req, res, file, null, attach, fileName, model); + model = null; + } + finally + { + if (file != null) + { + file.delete(); + } + } + } return model; } + /** + * Get export parameter from the request + * @param req + * @return + */ + protected boolean getExportParameter(WebScriptRequest req) + { + boolean attach = false; + String export = req.getParameter(PARAM_EXPORT); + if (export != null && Boolean.parseBoolean(export)) + { + attach = true; + } + return attach; + } + + /* + * (non-Javadoc) + * @see org.alfresco.repo.web.scripts.content.StreamContent#execute(org.springframework.extensions.webscripts. + * WebScriptRequest, org.springframework.extensions.webscripts.WebScriptResponse) + */ + @Override + public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException + { + // retrieve requested format + String format = req.getFormat(); + + try + { + String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format); + if (mimetype == null) + { + throw new WebScriptException("Web Script format '" + format + "' is not registered"); + } + + // construct model for script / template + Status status = new Status(); + Cache cache = new Cache(getDescription().getRequiredCache()); + Map model = buildModel(req, res); + if (model == null) + { + return; + } + model.put("status", status); + model.put("cache", cache); + + Map templateModel = createTemplateParameters(req, res, model); + + // render output + int statusCode = status.getCode(); + if (statusCode != HttpServletResponse.SC_OK && !req.forceSuccessStatus()) + { + if (logger.isDebugEnabled()) + { + logger.debug("Force success status header in response: " + req.forceSuccessStatus()); + logger.debug("Setting status " + statusCode); + } + res.setStatus(statusCode); + } + + // apply location + String location = status.getLocation(); + if (location != null && location.length() > 0) + { + if (logger.isDebugEnabled()) logger.debug("Setting location to " + location); + res.setHeader(WebScriptResponse.HEADER_LOCATION, location); + } + + // apply cache + res.setCache(cache); + + String callback = null; + if (getContainer().allowCallbacks()) + { + callback = req.getJSONCallback(); + } + if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) + { + if (logger.isDebugEnabled()) logger.debug("Rendering JSON callback response: content type=" + + Format.JAVASCRIPT.mimetype() + ", status=" + statusCode + ", callback=" + callback); + + // NOTE: special case for wrapping JSON results in a javascript function callback + res.setContentType(Format.JAVASCRIPT.mimetype() + ";charset=UTF-8"); + res.getWriter().write((callback + "(")); + } + else + { + if (logger.isDebugEnabled()) + logger.debug("Rendering response: content type=" + mimetype + ", status=" + statusCode); + + res.setContentType(mimetype + ";charset=UTF-8"); + } + + // render response according to requested format + renderFormatTemplate(format, templateModel, res.getWriter()); + + if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) + { + // NOTE: special case for wrapping JSON results in a javascript function callback + res.getWriter().write(")"); + } + } + catch (Throwable e) + { + if (logger.isDebugEnabled()) + { + StringWriter stack = new StringWriter(); + e.printStackTrace(new PrintWriter(stack)); + logger.debug("Caught exception; decorating with appropriate status template : " + stack.toString()); + } + + throw createStatusException(e, req, res); + } + } + + protected void renderFormatTemplate(String format, Map model, Writer writer) + { + format = (format == null) ? "" : format; + + String templatePath = getDescription().getId() + "." + format; + + if (logger.isDebugEnabled()) logger.debug("Rendering template '" + templatePath + "'"); + + renderTemplate(templatePath, model, writer); + } + + /** + * Obtain maximum of the records to be processed from the request if it is specified or bachsize value otherwise + * + * @param req + * @return maximum of the records to be processed from the request if it is specified or bachsize value otherwise + */ + protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize) + { + String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); + // default total number of records to be processed to batch size value + Long totalNumberOfRecordsToProcess = batchSize; + if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr)) + { + try + { + totalNumberOfRecordsToProcess = Long.parseLong(totalToBeProcessedRecordsStr); + } + catch (NumberFormatException ex) + { + // do nothing here, the value will remain 0L in this case + } + } + return totalNumberOfRecordsToProcess; + } + + /** + * Obtain batchsize parameter from the request. + * + * @param req + * @return batchsize parameter from the request + */ + protected Long getBatchSizeParameter(WebScriptRequest req) + { + String batchSizeStr = req.getParameter(BATCH_SIZE); + Long size = 0L; + if (StringUtils.isBlank(batchSizeStr)) + { + logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_MANDATORY); + } + try + { + size = Long.parseLong(batchSizeStr); + if (size <= 0) + { + logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); + } + } + catch (NumberFormatException ex) + { + logger.info(MESSAGE_BATCHSIZE_IS_INVALID); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_INVALID); + } + return size; + } + + /** + * Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess + * parameters + * + * @param batchSize + * @param maxNodeId + * @param recordAspectPair + * @param totalNumberOfRecordsToProcess + * @return the list of processed nodes + */ + protected List processNodes(final Long batchSize, Long maxNodeId, final Pair recordAspectPair, + Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach) + { + final Long maxRecordsToProcess = totalNumberOfRecordsToProcess; + final List processedNodes = new ArrayList(); + logger.info(MESSAGE_PROCESSING_BEGIN); + // by batch size + for (Long i = 0L; i < maxNodeId; i += batchSize) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + final Long currentIndex = i; + + transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback() + { + public Void execute() throws Throwable + { + // get the nodes with the extended security aspect applied + List nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, + currentIndex + batchSize); + + // process each one + for (Long nodeId : nodeIds) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + NodeRef record = nodeDAO.getNodePair(nodeId).getSecond(); + String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName)); + processNode(record); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); + processedNodes.add(record); + if (attach) + { + out.write(recordName); + out.write(","); + out.write(record.toString()); + out.write("\n"); + } + } + + return null; + } + }, false, // read only + true); // requires new + } + logger.info(MESSAGE_PROCESSING_END); + return processedNodes; + } + /** * Process each node * * @param nodeRef */ - @SuppressWarnings({ "unchecked"}) + @SuppressWarnings({ "unchecked" }) private void processNode(NodeRef nodeRef) { // get the reader/writer data - Map readers = (Map)nodeService.getProperty(nodeRef, PROP_READERS); - Map writers = (Map)nodeService.getProperty(nodeRef, PROP_WRITERS); + Map readers = (Map) nodeService.getProperty(nodeRef, PROP_READERS); + Map writers = (Map) nodeService.getProperty(nodeRef, PROP_WRITERS); // remove extended security aspect nodeService.removeAspect(nodeRef, ASPECT_EXTENDED_SECURITY); @@ -232,7 +478,7 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor permissionService.clearPermission(nodeRef, ExtendedWriterDynamicAuthority.EXTENDED_WRITER); // if record then ... - if (nodeService.hasAspect(nodeRef, ASPECT_RECORD)) + if (nodeService.hasAspect(nodeRef, ASPECT_RECORD) && readers != null && writers != null) { // re-set extended security via API extendedSecurityService.set(nodeRef, readers.keySet(), writers.keySet()); diff --git a/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java b/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java index 2a0f18a927..79f9843585 100644 --- a/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java +++ b/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java @@ -31,8 +31,8 @@ import org.json.JSONObject; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.springframework.extensions.surf.util.Content; +import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Container; -import org.springframework.extensions.webscripts.DeclarativeWebScript; import org.springframework.extensions.webscripts.Description; import org.springframework.extensions.webscripts.Description.RequiredCache; import org.springframework.extensions.webscripts.DescriptionExtension; @@ -68,8 +68,8 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest /** * @return declarative webscript */ - protected abstract DeclarativeWebScript getWebScript(); - + protected abstract AbstractWebScript getWebScript(); + /** * @return classpath location of webscript template */ @@ -136,7 +136,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest */ protected String executeWebScript(Map parameters, String content) throws Exception { - DeclarativeWebScript webScript = getWebScript(); + AbstractWebScript webScript = getWebScript(); String template = getWebScriptTemplate(); // initialise webscript @@ -158,7 +158,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest * @return {@link WebScriptRequest} mocked web script request */ @SuppressWarnings("rawtypes") - protected WebScriptRequest getMockedWebScriptRequest(DeclarativeWebScript webScript, final Map parameters, String content) throws Exception + protected WebScriptRequest getMockedWebScriptRequest(AbstractWebScript webScript, final Map parameters, String content) throws Exception { Match match = new Match(null, parameters, null, webScript); org.springframework.extensions.webscripts.Runtime mockedRuntime = mock(org.springframework.extensions.webscripts.Runtime.class); diff --git a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java index c18ed9c9cf..80e65f5984 100644 --- a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java +++ b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java @@ -23,6 +23,7 @@ import static java.util.Collections.emptyMap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyLong; @@ -55,6 +56,7 @@ import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.transaction.RetryingTransactionHelper; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import org.alfresco.repo.web.scripts.content.ContentStreamer; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.security.PermissionService; @@ -69,7 +71,9 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import org.springframework.extensions.webscripts.DeclarativeWebScript; +import org.springframework.extensions.webscripts.AbstractWebScript; +import org.springframework.extensions.webscripts.Status; +import org.springframework.extensions.webscripts.WebScriptException; /** * DynamicAuthoritiesGet Unit Test @@ -100,13 +104,15 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme private TransactionService mockedTransactionService; @Mock private RetryingTransactionHelper mockedRetryingTransactionHelper; + @Mock + private ContentStreamer contentStreamer; /** test component */ @InjectMocks private DynamicAuthoritiesGet webScript; @Override - protected DeclarativeWebScript getWebScript() + protected AbstractWebScript getWebScript() { return webScript; } @@ -255,7 +261,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme .thenReturn((Serializable) Collections.emptyMap()); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) .thenReturn((Serializable) Collections.emptyMap()); - }); // Set up parameters. @@ -281,39 +286,50 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme @Test public void missingBatchSizeParameter() throws Exception { - JSONObject json = executeJSONWebScript(emptyMap()); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"failed\",\"message\":\"Parameter batchsize is mandatory\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + try + { + executeJSONWebScript(emptyMap()); + fail("Expected exception as parameter batchsize is mandatory."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test public void invalidBatchSizeParameter() throws Exception { - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "dd"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"failed\",\"message\":\"Parameter batchsize is invalid.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + try + { + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "dd"); + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is invalid."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test public void batchSizeShouldBeGraterThanZero() throws Exception { - when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(null); - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "0"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"failed\",\"message\":\"Parameter batchsize should be a number greater than 0.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + try + { + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "0"); + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is not a number greater than 0."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test @@ -389,4 +405,45 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}"; assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); } + + @SuppressWarnings("unchecked") + @Test + public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) + .thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn(null); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn(null); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); + } } \ No newline at end of file From 8d05147c6e511fac0d9cc73c111e2474035f2d06 Mon Sep 17 00:00:00 2001 From: Silviu Dinuta Date: Tue, 27 Sep 2016 12:17:04 +0300 Subject: [PATCH 02/28] RM-4907: added unit tests for csv acceptance criteria and made processNode method protected --- .../scripts/roles/DynamicAuthoritiesGet.java | 2 +- .../roles/DynamicAuthoritiesGetUnitTest.java | 887 ++++++++++-------- 2 files changed, 482 insertions(+), 407 deletions(-) diff --git a/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java b/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java index 82ec4dc211..d957ccdd1b 100644 --- a/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java +++ b/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java @@ -464,7 +464,7 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM * @param nodeRef */ @SuppressWarnings({ "unchecked"}) - private void processNode(NodeRef nodeRef) + protected void processNode(NodeRef nodeRef) { // get the reader/writer data Map readers = (Map)nodeService.getProperty(nodeRef, PROP_READERS); diff --git a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java index c81df1251c..9ee07f681b 100644 --- a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java +++ b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java @@ -1,420 +1,214 @@ -/* - * Copyright (C) 2005-2014 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ - -package org.alfresco.repo.web.scripts.roles; - -import static java.util.Collections.emptyMap; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +/* + * Copyright (C) 2005-2014 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ + +package org.alfresco.repo.web.scripts.roles; + +import static java.util.Collections.emptyMap; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyBoolean; -import static org.mockito.Matchers.anyLong; -import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.io.Serializable; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; - -import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; -import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; -import org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService; -import org.alfresco.module.org_alfresco_module_rm.security.ExtendedWriterDynamicAuthority; -import org.alfresco.module.org_alfresco_module_rm.test.util.AlfMock; -import org.alfresco.module.org_alfresco_module_rm.test.util.BaseWebScriptUnitTest; -import org.alfresco.repo.domain.node.NodeDAO; -import org.alfresco.repo.domain.patch.PatchDAO; -import org.alfresco.repo.domain.qname.QNameDAO; -import org.alfresco.repo.transaction.RetryingTransactionHelper; -import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyBoolean; +import static org.mockito.Matchers.anyLong; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.Serializable; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableMap; + +import org.alfresco.model.ContentModel; +import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; +import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; +import org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService; +import org.alfresco.module.org_alfresco_module_rm.security.ExtendedWriterDynamicAuthority; +import org.alfresco.module.org_alfresco_module_rm.test.util.AlfMock; +import org.alfresco.module.org_alfresco_module_rm.test.util.BaseWebScriptUnitTest; +import org.alfresco.repo.domain.node.NodeDAO; +import org.alfresco.repo.domain.patch.PatchDAO; +import org.alfresco.repo.domain.qname.QNameDAO; +import org.alfresco.repo.transaction.RetryingTransactionHelper; +import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.repo.web.scripts.content.ContentStreamer; -import org.alfresco.service.cmr.repository.NodeRef; -import org.alfresco.service.cmr.repository.NodeService; -import org.alfresco.service.cmr.security.PermissionService; -import org.alfresco.service.namespace.QName; -import org.alfresco.service.transaction.TransactionService; -import org.alfresco.util.Pair; -import org.json.JSONObject; -import org.junit.Before; -import org.junit.Test; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.cmr.repository.NodeService; +import org.alfresco.service.cmr.security.PermissionService; +import org.alfresco.service.namespace.QName; +import org.alfresco.service.transaction.TransactionService; +import org.alfresco.util.Pair; +import org.json.JSONObject; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Status; import org.springframework.extensions.webscripts.WebScriptException; - -/** - * DynamicAuthoritiesGet Unit Test - * - * @author Silviu Dinuta - */ -@SuppressWarnings("deprecation") -public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest implements RecordsManagementModel -{ - /** test data */ - private static final Long ASPECT_ID = 123l; - private static final QName ASPECT = AlfMock.generateQName(); - - /** mocks */ - @Mock - private PatchDAO mockedPatchDAO; - @Mock - private NodeDAO mockedNodeDAO; - @Mock - private QNameDAO mockedQnameDAO; - @Mock - private NodeService mockedNodeService; - @Mock - private PermissionService mockedPermissionService; - @Mock - private ExtendedSecurityService mockedExtendedSecurityService; - @Mock - private TransactionService mockedTransactionService; - @Mock - private RetryingTransactionHelper mockedRetryingTransactionHelper; +import org.springframework.extensions.webscripts.WebScriptRequest; +import org.springframework.extensions.webscripts.WebScriptResponse; + +/** + * DynamicAuthoritiesGet Unit Test + * + * @author Silviu Dinuta + */ +@SuppressWarnings("deprecation") +public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest implements RecordsManagementModel +{ + /** test data */ + private static final Long ASPECT_ID = 123l; + private static final QName ASPECT = AlfMock.generateQName(); + + /** mocks */ + @Mock + private PatchDAO mockedPatchDAO; + @Mock + private NodeDAO mockedNodeDAO; + @Mock + private QNameDAO mockedQnameDAO; + @Mock + private NodeService mockedNodeService; + @Mock + private PermissionService mockedPermissionService; + @Mock + private ExtendedSecurityService mockedExtendedSecurityService; + @Mock + private TransactionService mockedTransactionService; + @Mock + private RetryingTransactionHelper mockedRetryingTransactionHelper; @Mock private ContentStreamer contentStreamer; - - /** test component */ - @InjectMocks - private DynamicAuthoritiesGet webScript; - - @Override - protected AbstractWebScript getWebScript() - { - return webScript; - } - - @Override - protected String getWebScriptTemplate() - { - return "alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.json.ftl"; - } - - /** - * Before test - */ - @SuppressWarnings("unchecked") - @Before - public void before() - { - MockitoAnnotations.initMocks(this); - webScript.setNodeService(mockedNodeService); - webScript.setPermissionService(mockedPermissionService); - webScript.setExtendedSecurityService(mockedExtendedSecurityService); - // setup retrying transaction helper - Answer doInTransactionAnswer = new Answer() - { - @SuppressWarnings("rawtypes") - @Override - public Object answer(InvocationOnMock invocation) throws Throwable - { - RetryingTransactionCallback callback = (RetryingTransactionCallback) invocation.getArguments()[0]; - return callback.execute(); - } - }; - - doAnswer(doInTransactionAnswer).when(mockedRetryingTransactionHelper) - . doInTransaction(any(RetryingTransactionCallback.class), anyBoolean(), anyBoolean()); - - when(mockedTransactionService.getRetryingTransactionHelper()).thenReturn(mockedRetryingTransactionHelper); - - // max node id - when(mockedPatchDAO.getMaxAdmNodeID()).thenReturn(500000L); - - // aspect - when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(new Pair(ASPECT_ID, ASPECT)); - } - - /** - * Given that there are no nodes with the extended security aspect When the action is executed Nothing happens - * @throws Exception - */ - @SuppressWarnings({ "unchecked" }) - @Test - public void noNodesWithExtendedSecurity() throws Exception - { - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(Collections.emptyList()); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "3"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - - // Check the JSON result using Jackson to allow easy equality testing. - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 0 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - - verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, never()).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); - } - - /** - * Given that there are records with the extended security aspect When the action is executed Then the aspect is - * removed And the dynamic authorities permissions are cleared And extended security is set via the updated API - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void recordsWithExtendedSecurityAspect() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), any(Set.class), any(Set.class)); - - } - - /** - * Given that there are non-records with the extended security aspect When the web script is executed Then the aspect is - * removed And the dynamic authorities permissions are cleared - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void nonRecordsWithExtendedSecurityAspect() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); - } - - @Test - public void missingBatchSizeParameter() throws Exception - { - try - { - executeJSONWebScript(emptyMap()); - fail("Expected exception as parameter batchsize is mandatory."); - } - catch (WebScriptException e) - { - assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.", - Status.STATUS_BAD_REQUEST, e.getStatus()); - } - } - - @Test - public void invalidBatchSizeParameter() throws Exception - { - try - { - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "dd"); - executeJSONWebScript(parameters); - fail("Expected exception as parameter batchsize is invalid."); - } - catch (WebScriptException e) - { - assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.", - Status.STATUS_BAD_REQUEST, e.getStatus()); - } - } - - @Test - public void batchSizeShouldBeGraterThanZero() throws Exception - { - try - { - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "0"); - executeJSONWebScript(parameters); - fail("Expected exception as parameter batchsize is not a number greater than 0."); - } - catch (WebScriptException e) - { - assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.", - Status.STATUS_BAD_REQUEST, e.getStatus()); - } - } - - @Test - public void extendedSecurityAspectNotCreated() throws Exception - { - when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(null); - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "3"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"There where no records to be processed.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - } - - @Test - public void processAllRecordsWhenMaxProcessedRecordsIsZero() throws Exception - { - List ids = Stream.of(1l, 2l, 3l,4l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "0"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 4 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - } - - @Test - public void whenMaxProcessedRecordsIsMissingItDefaultsToBatchSize() throws Exception - { - List ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - } + /** test component */ + @InjectMocks + private DynamicAuthoritiesGet webScript; + + @Override + protected AbstractWebScript getWebScript() + { + return webScript; + } + + @Override + protected String getWebScriptTemplate() + { + return "alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.json.ftl"; + } + + /** + * Before test + */ + @SuppressWarnings("unchecked") + @Before + public void before() + { + MockitoAnnotations.initMocks(this); + webScript.setNodeService(mockedNodeService); + webScript.setPermissionService(mockedPermissionService); + webScript.setExtendedSecurityService(mockedExtendedSecurityService); + // setup retrying transaction helper + Answer doInTransactionAnswer = new Answer() + { + @SuppressWarnings("rawtypes") + @Override + public Object answer(InvocationOnMock invocation) throws Throwable + { + RetryingTransactionCallback callback = (RetryingTransactionCallback) invocation.getArguments()[0]; + return callback.execute(); + } + }; + + doAnswer(doInTransactionAnswer).when(mockedRetryingTransactionHelper) + . doInTransaction(any(RetryingTransactionCallback.class), anyBoolean(), anyBoolean()); + + when(mockedTransactionService.getRetryingTransactionHelper()).thenReturn(mockedRetryingTransactionHelper); + + // max node id + when(mockedPatchDAO.getMaxAdmNodeID()).thenReturn(500000L); + + // aspect + when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(new Pair(ASPECT_ID, ASPECT)); + } + + /** + * Given that there are no nodes with the extended security aspect When the action is executed Nothing happens + * + * @throws Exception + */ + @SuppressWarnings({ "unchecked" }) + @Test + public void noNodesWithExtendedSecurity() throws Exception + { + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) + .thenReturn(Collections.emptyList()); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "3"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + + // Check the JSON result using Jackson to allow easy equality testing. + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 0 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, never()).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); + } + + /** + * Given that there are records with the extended security aspect When the action is executed Then the aspect is + * removed And the dynamic authorities permissions are cleared And extended security is set via the updated API + * + * @throws Exception + */ @SuppressWarnings("unchecked") @Test - public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception + public void recordsWithExtendedSecurityAspect() throws Exception { List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -422,9 +216,9 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn(null); + .thenReturn((Serializable) Collections.emptyMap()); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn(null); + .thenReturn((Serializable) Collections.emptyMap()); }); @@ -437,6 +231,51 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), any(Set.class), any(Set.class)); + + } + + /** + * Given that there are non-records with the extended security aspect When the web script is executed Then the + * aspect is removed And the dynamic authorities permissions are cleared + * + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void nonRecordsWithExtendedSecurityAspect() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); @@ -447,4 +286,240 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); } + + @Test + public void missingBatchSizeParameter() throws Exception + { + try + { + executeJSONWebScript(emptyMap()); + fail("Expected exception as parameter batchsize is mandatory."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } + } + + @Test + public void invalidBatchSizeParameter() throws Exception + { + try + { + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "dd"); + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is invalid."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } + } + + @Test + public void batchSizeShouldBeGraterThanZero() throws Exception + { + try + { + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "0"); + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is not a number greater than 0."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } + } + + @Test + public void extendedSecurityAspectNotCreated() throws Exception + { + when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(null); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "3"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"There where no records to be processed.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + } + + @Test + public void processAllRecordsWhenMaxProcessedRecordsIsZero() throws Exception + { + List ids = Stream.of(1l, 2l, 3l, 4l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "0"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 4 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + } + + @Test + public void whenMaxProcessedRecordsIsMissingItDefaultsToBatchSize() throws Exception + { + List ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + } + + @SuppressWarnings("unchecked") + @Test + public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)).thenReturn(null); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); + } + + /** + * Given I have records that require migration + * And I am interested in knowning which records are migrated + * When I run the migration tool + * Then I will be returned a CSV file containing the name and node reference of the record migrated + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void processWithCSVFile() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + }); + + ArgumentCaptor csvFileCaptor = ArgumentCaptor.forClass(File.class); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", + "true"); + executeWebScript(parameters); + + verify(contentStreamer, times(1)).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + csvFileCaptor.capture(), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + + File fileForDownload = csvFileCaptor.getValue(); + assertNotNull(fileForDownload); + } + + /** + * Given that I have record that require migration + * And I'm not interested in knowing which records were migrated + * When I run the migration tool + * And I will not be returned a CSV file of details. + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void processedWithouthCSVFile() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + }); + + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", + "false"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } } \ No newline at end of file From b9bb54579e050f3294da7af251e044dfee17fe7c Mon Sep 17 00:00:00 2001 From: Silviu Dinuta Date: Tue, 27 Sep 2016 12:30:43 +0300 Subject: [PATCH 03/28] minor formating issue --- .../roles/DynamicAuthoritiesGetUnitTest.java | 587 +++++++++--------- 1 file changed, 296 insertions(+), 291 deletions(-) diff --git a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java index 9ee07f681b..d3a3280cf0 100644 --- a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java +++ b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java @@ -1,28 +1,28 @@ -/* - * Copyright (C) 2005-2014 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ - -package org.alfresco.repo.web.scripts.roles; - -import static java.util.Collections.emptyMap; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +/* + * Copyright (C) 2005-2014 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ + +package org.alfresco.repo.web.scripts.roles; + +import static java.util.Collections.emptyMap; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; @@ -111,185 +111,187 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme private RetryingTransactionHelper mockedRetryingTransactionHelper; @Mock private ContentStreamer contentStreamer; - - /** test component */ - @InjectMocks - private DynamicAuthoritiesGet webScript; - - @Override + + /** test component */ + @InjectMocks + private DynamicAuthoritiesGet webScript; + + @Override protected AbstractWebScript getWebScript() - { - return webScript; - } - - @Override - protected String getWebScriptTemplate() - { - return "alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.json.ftl"; - } - - /** - * Before test - */ - @SuppressWarnings("unchecked") - @Before - public void before() - { - MockitoAnnotations.initMocks(this); - webScript.setNodeService(mockedNodeService); - webScript.setPermissionService(mockedPermissionService); - webScript.setExtendedSecurityService(mockedExtendedSecurityService); - // setup retrying transaction helper - Answer doInTransactionAnswer = new Answer() - { - @SuppressWarnings("rawtypes") - @Override - public Object answer(InvocationOnMock invocation) throws Throwable - { - RetryingTransactionCallback callback = (RetryingTransactionCallback) invocation.getArguments()[0]; - return callback.execute(); - } - }; - - doAnswer(doInTransactionAnswer).when(mockedRetryingTransactionHelper) - . doInTransaction(any(RetryingTransactionCallback.class), anyBoolean(), anyBoolean()); - - when(mockedTransactionService.getRetryingTransactionHelper()).thenReturn(mockedRetryingTransactionHelper); - - // max node id - when(mockedPatchDAO.getMaxAdmNodeID()).thenReturn(500000L); - - // aspect - when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(new Pair(ASPECT_ID, ASPECT)); - } - - /** - * Given that there are no nodes with the extended security aspect When the action is executed Nothing happens - * - * @throws Exception - */ - @SuppressWarnings({ "unchecked" }) - @Test - public void noNodesWithExtendedSecurity() throws Exception - { - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(Collections.emptyList()); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "3"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - - // Check the JSON result using Jackson to allow easy equality testing. - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 0 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, never()).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); - } - - /** - * Given that there are records with the extended security aspect When the action is executed Then the aspect is - * removed And the dynamic authorities permissions are cleared And extended security is set via the updated API - * - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void recordsWithExtendedSecurityAspect() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), any(Set.class), any(Set.class)); - - } - - /** - * Given that there are non-records with the extended security aspect When the web script is executed Then the - * aspect is removed And the dynamic authorities permissions are cleared - * - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void nonRecordsWithExtendedSecurityAspect() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); - } - - @Test - public void missingBatchSizeParameter() throws Exception - { + { + return webScript; + } + + @Override + protected String getWebScriptTemplate() + { + return "alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.json.ftl"; + } + + /** + * Before test + */ + @SuppressWarnings("unchecked") + @Before + public void before() + { + MockitoAnnotations.initMocks(this); + webScript.setNodeService(mockedNodeService); + webScript.setPermissionService(mockedPermissionService); + webScript.setExtendedSecurityService(mockedExtendedSecurityService); + // setup retrying transaction helper + Answer doInTransactionAnswer = new Answer() + { + @SuppressWarnings("rawtypes") + @Override + public Object answer(InvocationOnMock invocation) throws Throwable + { + RetryingTransactionCallback callback = (RetryingTransactionCallback) invocation.getArguments()[0]; + return callback.execute(); + } + }; + + doAnswer(doInTransactionAnswer).when(mockedRetryingTransactionHelper) + . doInTransaction(any(RetryingTransactionCallback.class), anyBoolean(), anyBoolean()); + + when(mockedTransactionService.getRetryingTransactionHelper()).thenReturn(mockedRetryingTransactionHelper); + + // max node id + when(mockedPatchDAO.getMaxAdmNodeID()).thenReturn(500000L); + + // aspect + when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(new Pair(ASPECT_ID, ASPECT)); + } + + /** + * Given that there are no nodes with the extended security aspect When the action is executed Nothing happens + * @throws Exception + */ + @SuppressWarnings({ "unchecked" }) + @Test + public void noNodesWithExtendedSecurity() throws Exception + { + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) + .thenReturn(Collections.emptyList()); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "3"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + + // Check the JSON result using Jackson to allow easy equality testing. + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 0 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + + verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, never()).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); + } + + /** + * Given that there are records with the extended security aspect When the action is executed Then the aspect is + * removed And the dynamic authorities permissions are cleared And extended security is set via the updated API + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void recordsWithExtendedSecurityAspect() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) + .thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), any(Set.class), any(Set.class)); + + } + + /** + * Given that there are non-records with the extended security aspect When the web script is executed Then the aspect is + * removed And the dynamic authorities permissions are cleared + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void nonRecordsWithExtendedSecurityAspect() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) + .thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); + } + + @Test + public void missingBatchSizeParameter() throws Exception + { try { executeJSONWebScript(emptyMap()); @@ -300,15 +302,15 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.", Status.STATUS_BAD_REQUEST, e.getStatus()); } - } - - @Test - public void invalidBatchSizeParameter() throws Exception - { + } + + @Test + public void invalidBatchSizeParameter() throws Exception + { try { - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "dd"); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "dd"); executeJSONWebScript(parameters); fail("Expected exception as parameter batchsize is invalid."); } @@ -317,15 +319,15 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.", Status.STATUS_BAD_REQUEST, e.getStatus()); } - } - - @Test - public void batchSizeShouldBeGraterThanZero() throws Exception - { + } + + @Test + public void batchSizeShouldBeGraterThanZero() throws Exception + { try { - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "0"); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "0"); executeJSONWebScript(parameters); fail("Expected exception as parameter batchsize is not a number greater than 0."); } @@ -334,79 +336,81 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.", Status.STATUS_BAD_REQUEST, e.getStatus()); } - } - - @Test - public void extendedSecurityAspectNotCreated() throws Exception - { - when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(null); - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "3"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"There where no records to be processed.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - } - - @Test - public void processAllRecordsWhenMaxProcessedRecordsIsZero() throws Exception - { - List ids = Stream.of(1l, 2l, 3l, 4l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "0"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 4 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - } - - @Test - public void whenMaxProcessedRecordsIsMissingItDefaultsToBatchSize() throws Exception - { - List ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - } + } + + @Test + public void extendedSecurityAspectNotCreated() throws Exception + { + when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(null); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "3"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"There where no records to be processed.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + } + + @Test + public void processAllRecordsWhenMaxProcessedRecordsIsZero() throws Exception + { + List ids = Stream.of(1l, 2l, 3l,4l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) + .thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "0"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 4 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + } + + @Test + public void whenMaxProcessedRecordsIsMissingItDefaultsToBatchSize() throws Exception + { + List ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) + .thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + } @SuppressWarnings("unchecked") @Test @@ -414,7 +418,8 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) + .thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { From 48e6cc47de682e37cb130a84f42b20a9c7660034 Mon Sep 17 00:00:00 2001 From: Silviu Dinuta Date: Tue, 27 Sep 2016 12:48:31 +0300 Subject: [PATCH 04/28] formatting fix --- .../roles/DynamicAuthoritiesGetUnitTest.java | 162 +++++++++--------- 1 file changed, 81 insertions(+), 81 deletions(-) diff --git a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java index d3a3280cf0..421286bc1a 100644 --- a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java +++ b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java @@ -24,91 +24,91 @@ import static java.util.Collections.emptyMap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyBoolean; -import static org.mockito.Matchers.anyLong; -import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.io.File; -import java.io.Serializable; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; - -import org.alfresco.model.ContentModel; -import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; -import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; -import org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService; -import org.alfresco.module.org_alfresco_module_rm.security.ExtendedWriterDynamicAuthority; -import org.alfresco.module.org_alfresco_module_rm.test.util.AlfMock; -import org.alfresco.module.org_alfresco_module_rm.test.util.BaseWebScriptUnitTest; -import org.alfresco.repo.domain.node.NodeDAO; -import org.alfresco.repo.domain.patch.PatchDAO; -import org.alfresco.repo.domain.qname.QNameDAO; -import org.alfresco.repo.transaction.RetryingTransactionHelper; -import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyBoolean; +import static org.mockito.Matchers.anyLong; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.Serializable; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableMap; + +import org.alfresco.model.ContentModel; +import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; +import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; +import org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService; +import org.alfresco.module.org_alfresco_module_rm.security.ExtendedWriterDynamicAuthority; +import org.alfresco.module.org_alfresco_module_rm.test.util.AlfMock; +import org.alfresco.module.org_alfresco_module_rm.test.util.BaseWebScriptUnitTest; +import org.alfresco.repo.domain.node.NodeDAO; +import org.alfresco.repo.domain.patch.PatchDAO; +import org.alfresco.repo.domain.qname.QNameDAO; +import org.alfresco.repo.transaction.RetryingTransactionHelper; +import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.repo.web.scripts.content.ContentStreamer; -import org.alfresco.service.cmr.repository.NodeRef; -import org.alfresco.service.cmr.repository.NodeService; -import org.alfresco.service.cmr.security.PermissionService; -import org.alfresco.service.namespace.QName; -import org.alfresco.service.transaction.TransactionService; -import org.alfresco.util.Pair; -import org.json.JSONObject; -import org.junit.Before; -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.cmr.repository.NodeService; +import org.alfresco.service.cmr.security.PermissionService; +import org.alfresco.service.namespace.QName; +import org.alfresco.service.transaction.TransactionService; +import org.alfresco.util.Pair; +import org.json.JSONObject; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Status; -import org.springframework.extensions.webscripts.WebScriptException; -import org.springframework.extensions.webscripts.WebScriptRequest; +import org.springframework.extensions.webscripts.WebScriptException; +import org.springframework.extensions.webscripts.WebScriptRequest; import org.springframework.extensions.webscripts.WebScriptResponse; - -/** - * DynamicAuthoritiesGet Unit Test - * - * @author Silviu Dinuta - */ -@SuppressWarnings("deprecation") -public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest implements RecordsManagementModel -{ - /** test data */ - private static final Long ASPECT_ID = 123l; - private static final QName ASPECT = AlfMock.generateQName(); - - /** mocks */ - @Mock - private PatchDAO mockedPatchDAO; - @Mock - private NodeDAO mockedNodeDAO; - @Mock - private QNameDAO mockedQnameDAO; - @Mock - private NodeService mockedNodeService; - @Mock - private PermissionService mockedPermissionService; - @Mock - private ExtendedSecurityService mockedExtendedSecurityService; - @Mock - private TransactionService mockedTransactionService; - @Mock - private RetryingTransactionHelper mockedRetryingTransactionHelper; + +/** + * DynamicAuthoritiesGet Unit Test + * + * @author Silviu Dinuta + */ +@SuppressWarnings("deprecation") +public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest implements RecordsManagementModel +{ + /** test data */ + private static final Long ASPECT_ID = 123l; + private static final QName ASPECT = AlfMock.generateQName(); + + /** mocks */ + @Mock + private PatchDAO mockedPatchDAO; + @Mock + private NodeDAO mockedNodeDAO; + @Mock + private QNameDAO mockedQnameDAO; + @Mock + private NodeService mockedNodeService; + @Mock + private PermissionService mockedPermissionService; + @Mock + private ExtendedSecurityService mockedExtendedSecurityService; + @Mock + private TransactionService mockedTransactionService; + @Mock + private RetryingTransactionHelper mockedRetryingTransactionHelper; @Mock private ContentStreamer contentStreamer; From 31c6b1e228fba50ed3828bb124791fd95e42885a Mon Sep 17 00:00:00 2001 From: Silviu Dinuta Date: Thu, 29 Sep 2016 18:44:06 +0300 Subject: [PATCH 05/28] RM-4162: added extra parentNodeRef parameter, checked writers and readers individually for null and added unit tests for the changes --- .../rm-webscript-context.xml | 1 + .../roles/rm-dynamicauthorities.get.desc.xml | 7 +- .../scripts/roles/DynamicAuthoritiesGet.java | 444 ++++--- .../roles/DynamicAuthoritiesGetUnitTest.java | 1021 ++++++++++------- 4 files changed, 881 insertions(+), 592 deletions(-) diff --git a/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml b/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml index 4657373e41..e1deb70752 100644 --- a/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml +++ b/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml @@ -597,6 +597,7 @@ + diff --git a/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml b/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml index 7f77ed80ab..6cf7174dff 100644 --- a/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml +++ b/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml @@ -2,12 +2,13 @@ Removes dynamic authorities - URL parameter batchsize is mandatory, and represents the number of records that are processed in one transaction.
+ URL parameter batchsize is mandatory, and represents the maximum number of records that can be processed in one transaction.
URL parameter maxProcessedRecords is optional, and represents the maximum number of records that will be processed in one request.
- URL parameter export is optional, and if the it's value is true, will export the processed records into a csv file.
+ URL parameter export is optional, and if the it's value is true, will export the processed records into a csv file.
+ URL parameter parentNodeRef is optional, and represents the nodeRef of the folder that contains the records to be processed.
]]>
- /api/rm/rm-dynamicauthorities?batchsize={batchsize}&maxProcessedRecords={maxProcessedRecords?}&export={export?} + /api/rm/rm-dynamicauthorities?batchsize={batchsize}&maxProcessedRecords={maxProcessedRecords?}&export={export?}&parentNodeRef={parentNodeRef?} argument admin required diff --git a/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java b/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java index d957ccdd1b..78014bbdb3 100644 --- a/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java +++ b/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java @@ -25,12 +25,13 @@ import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.io.Writer; -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + import javax.servlet.http.HttpServletResponse; import org.alfresco.model.ContentModel; @@ -43,121 +44,131 @@ import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.repo.web.scripts.content.ContentStreamer; -import org.alfresco.service.cmr.repository.NodeRef; -import org.alfresco.service.cmr.repository.NodeService; -import org.alfresco.service.cmr.security.PermissionService; -import org.alfresco.service.namespace.QName; -import org.alfresco.service.transaction.TransactionService; -import org.alfresco.util.Pair; +import org.alfresco.service.cmr.model.FileFolderService; +import org.alfresco.service.cmr.model.FileInfo; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.cmr.repository.NodeService; +import org.alfresco.service.cmr.security.PermissionService; +import org.alfresco.service.namespace.QName; +import org.alfresco.service.transaction.TransactionService; +import org.alfresco.util.Pair; import org.alfresco.util.TempFileProvider; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.springframework.extensions.webscripts.AbstractWebScript; -import org.springframework.extensions.webscripts.Cache; +import org.springframework.extensions.webscripts.Cache; import org.springframework.extensions.webscripts.Format; -import org.springframework.extensions.webscripts.Status; +import org.springframework.extensions.webscripts.Status; import org.springframework.extensions.webscripts.WebScriptException; -import org.springframework.extensions.webscripts.WebScriptRequest; +import org.springframework.extensions.webscripts.WebScriptRequest; import org.springframework.extensions.webscripts.WebScriptResponse; - -/** - * Webscript used for removing dynamic authorities from the records. - * - * @author Silviu Dinuta - * @since 2.3.0.7 - */ -@SuppressWarnings("deprecation") + +/** + * Webscript used for removing dynamic authorities from the records. + * + * @author Silviu Dinuta + * @since 2.3.0.7 + */ +@SuppressWarnings("deprecation") public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel -{ - private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0."; - private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN"; - private static final String MESSAGE_PROCESSING_END = "Processing - END"; - private static final String MESSAGE_PROCESSING_RECORD_END_TEMPLATE = "Processing record {0} - END"; - private static final String MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE = "Processing record {0} - BEGIN"; - private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid."; - private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory"; - private static final String SUCCESS_STATUS = "success"; - /** - * The logger - */ - private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class); - private static final String BATCH_SIZE = "batchsize"; - private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords"; +{ + private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0."; + private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN"; + private static final String MESSAGE_PROCESSING_END = "Processing - END"; + private static final String MESSAGE_PROCESSING_RECORD_END_TEMPLATE = "Processing record {0} - END"; + private static final String MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE = "Processing record {0} - BEGIN"; + private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid."; + private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory"; + private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist."; + private static final String SUCCESS_STATUS = "success"; + /** + * The logger + */ + private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class); + private static final String BATCH_SIZE = "batchsize"; + private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords"; private static final String PARAM_EXPORT = "export"; - private static final String MODEL_STATUS = "responsestatus"; - private static final String MODEL_MESSAGE = "message"; - private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records."; - private static final String MESSAGE_PARTIAL_TEMPLATE = "Processed first {0} records."; - private static final String MESSAGE_NO_RECORDS_TO_PROCESS = "There where no records to be processed."; - - - /** services */ - private PatchDAO patchDAO; - private NodeDAO nodeDAO; - private QNameDAO qnameDAO; - private NodeService nodeService; - private PermissionService permissionService; - private ExtendedSecurityService extendedSecurityService; - private TransactionService transactionService; + private static final String PARAM_PARENT_NODE_REF = "parentNodeRef"; + private static final String MODEL_STATUS = "responsestatus"; + private static final String MODEL_MESSAGE = "message"; + private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records."; + private static final String MESSAGE_PARTIAL_TEMPLATE = "Processed first {0} records."; + private static final String MESSAGE_NO_RECORDS_TO_PROCESS = "There where no records to be processed."; + + /** services */ + private PatchDAO patchDAO; + private NodeDAO nodeDAO; + private QNameDAO qnameDAO; + private NodeService nodeService; + private PermissionService permissionService; + private ExtendedSecurityService extendedSecurityService; + private TransactionService transactionService; /** Content Streamer */ protected ContentStreamer contentStreamer; + private FileFolderService fileFolderService; + /** service setters */ public void setPatchDAO(PatchDAO patchDAO) { this.patchDAO = patchDAO; } - + public void setNodeDAO(NodeDAO nodeDAO) { this.nodeDAO = nodeDAO; } - + public void setQnameDAO(QNameDAO qnameDAO) - { + { this.qnameDAO = qnameDAO; } - + public void setNodeService(NodeService nodeService) - { + { this.nodeService = nodeService; - } + } public void setPermissionService(PermissionService permissionService) - { + { this.permissionService = permissionService; } public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) - { + { this.extendedSecurityService = extendedSecurityService; - } + } public void setTransactionService(TransactionService transactionService) { this.transactionService = transactionService; - } + } public void setContentStreamer(ContentStreamer contentStreamer) - { + { this.contentStreamer = contentStreamer; - } + } + + public void setFileFolderService(FileFolderService fileFolderService) + { + this.fileFolderService = fileFolderService; + } protected Map buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException { Map model = new HashMap(); final Long batchSize = getBatchSizeParameter(req); - // get the max node id and the extended security aspect - Long maxNodeId = patchDAO.getMaxAdmNodeID(); - final Pair recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY); - if(recordAspectPair == null) - { - model.put(MODEL_STATUS, SUCCESS_STATUS); - model.put(MODEL_MESSAGE, MESSAGE_NO_RECORDS_TO_PROCESS); - logger.info(MESSAGE_NO_RECORDS_TO_PROCESS); - return model; - } - + // get the max node id and the extended security aspect + Long maxNodeId = patchDAO.getMaxAdmNodeID(); + final Pair recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY); + if (recordAspectPair == null) + { + model.put(MODEL_STATUS, SUCCESS_STATUS); + model.put(MODEL_MESSAGE, MESSAGE_NO_RECORDS_TO_PROCESS); + logger.info(MESSAGE_NO_RECORDS_TO_PROCESS); + return model; + } + Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize); boolean attach = getExportParameter(req); @@ -168,8 +179,17 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM List processedNodes = new ArrayList(); try { - processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess, out, - attach); + NodeRef parentNodeRef = getParentNodeRefParameter(req); + if (parentNodeRef != null) + { + processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair, + totalNumberOfRecordsToProcess.intValue(), out, attach); + } + else + { + processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess, + out, attach); + } } finally { @@ -213,6 +233,7 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM /** * Get export parameter from the request + * * @param req * @return */ @@ -241,19 +262,14 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM try { String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format); - if (mimetype == null) - { - throw new WebScriptException("Web Script format '" + format + "' is not registered"); - } + if (mimetype == null) { throw new WebScriptException( + "Web Script format '" + format + "' is not registered"); } // construct model for script / template Status status = new Status(); Cache cache = new Cache(getDescription().getRequiredCache()); Map model = buildModel(req, res); - if (model == null) - { - return; - } + if (model == null) { return; } model.put("status", status); model.put("cache", cache); @@ -346,19 +362,19 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize) { String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); - //default total number of records to be processed to batch size value - Long totalNumberOfRecordsToProcess = batchSize; - if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr)) - { - try - { - totalNumberOfRecordsToProcess = Long.parseLong(totalToBeProcessedRecordsStr); - } - catch(NumberFormatException ex) - { - //do nothing here, the value will remain 0L in this case - } - } + // default total number of records to be processed to batch size value + Long totalNumberOfRecordsToProcess = batchSize; + if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr)) + { + try + { + totalNumberOfRecordsToProcess = Long.parseLong(totalToBeProcessedRecordsStr); + } + catch (NumberFormatException ex) + { + // do nothing here, the value will remain 0L in this case + } + } return totalNumberOfRecordsToProcess; } @@ -393,7 +409,30 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM } return size; } - + + /** + * Get parentNodeRef parameter from the request + * + * @param req + * @return + */ + protected NodeRef getParentNodeRefParameter(WebScriptRequest req) + { + String parentNodeRefStr = req.getParameter(PARAM_PARENT_NODE_REF); + NodeRef parentNodeRef = null; + if (StringUtils.isNotBlank(parentNodeRefStr)) + { + parentNodeRef = new NodeRef(parentNodeRefStr); + if(!nodeService.exists(parentNodeRef)) + { + String message = MessageFormat.format(MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE, parentNodeRef.toString()); + logger.info(message); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, message); + } + } + return parentNodeRef; + } + /** * Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess * parameters @@ -407,81 +446,146 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM protected List processNodes(final Long batchSize, Long maxNodeId, final Pair recordAspectPair, Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach) { - final Long maxRecordsToProcess = totalNumberOfRecordsToProcess; - final List processedNodes = new ArrayList(); - logger.info(MESSAGE_PROCESSING_BEGIN); - // by batch size - for (Long i = 0L; i < maxNodeId; i+=batchSize) - { - if(maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) - { - break; - } - final Long currentIndex = i; - - transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback() - { - public Void execute() throws Throwable - { - // get the nodes with the extended security aspect applied + final Long maxRecordsToProcess = totalNumberOfRecordsToProcess; + final List processedNodes = new ArrayList(); + logger.info(MESSAGE_PROCESSING_BEGIN); + // by batch size + for (Long i = 0L; i < maxNodeId; i += batchSize) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + final Long currentIndex = i; + + transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback() + { + public Void execute() throws Throwable + { + // get the nodes with the extended security aspect applied List nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, currentIndex + batchSize); - - // process each one - for (Long nodeId : nodeIds) - { - if(maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) - { - break; - } - NodeRef record = nodeDAO.getNodePair(nodeId).getSecond(); - String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME); - logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName)); - processNode(record); - logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); - processedNodes.add(record); + + // process each one + for (Long nodeId : nodeIds) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + NodeRef record = nodeDAO.getNodePair(nodeId).getSecond(); + String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName)); + processNode(record); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); + processedNodes.add(record); if (attach) { out.write(recordName); out.write(","); out.write(record.toString()); out.write("\n"); - } + } } - - return null; - } + + return null; + } }, false, // read only - true); // requires new - } - logger.info(MESSAGE_PROCESSING_END); + true); // requires new + } + logger.info(MESSAGE_PROCESSING_END); return processedNodes; - } - - /** - * Process each node - * - * @param nodeRef - */ - @SuppressWarnings({ "unchecked"}) - protected void processNode(NodeRef nodeRef) - { - // get the reader/writer data - Map readers = (Map)nodeService.getProperty(nodeRef, PROP_READERS); - Map writers = (Map)nodeService.getProperty(nodeRef, PROP_WRITERS); - - // remove extended security aspect - nodeService.removeAspect(nodeRef, ASPECT_EXTENDED_SECURITY); - - // remove dynamic authority permissions - permissionService.clearPermission(nodeRef, ExtendedReaderDynamicAuthority.EXTENDED_READER); - permissionService.clearPermission(nodeRef, ExtendedWriterDynamicAuthority.EXTENDED_WRITER); - - // if record then ... - if (nodeService.hasAspect(nodeRef, ASPECT_RECORD) && readers != null && writers != null) - { - // re-set extended security via API - extendedSecurityService.set(nodeRef, readers.keySet(), writers.keySet()); - } - } -} + } + + protected List processChildrenNodes(NodeRef parentNodeRef, final int batchSize, + final Pair recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out, + final boolean attach) + { + final List processedNodes = new ArrayList(); + final List children = fileFolderService.search(parentNodeRef, "*", /*filesSearch*/true, /*folderSearch*/true, /*includeSubfolders*/true); + logger.info(MESSAGE_PROCESSING_BEGIN); + // by batch size + for (int i = 0; i < children.size(); i += batchSize) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + final int currentIndex = i; + + transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback() + { + public Void execute() throws Throwable + { + List nodes = children.subList(currentIndex, Math.min(currentIndex + batchSize, children.size())); + // process each one + for (FileInfo node : nodes) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + NodeRef record = node.getNodeRef(); + if (nodeService.hasAspect(record, recordAspectPair.getSecond())) + { + String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName)); + processNode(record); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); + processedNodes.add(record); + if (attach) + { + out.write(recordName); + out.write(","); + out.write(record.toString()); + out.write("\n"); + } + } + } + + return null; + } + }, false, // read only + true); // requires new + } + logger.info(MESSAGE_PROCESSING_END); + return processedNodes; + } + + /** + * Process each node + * + * @param nodeRef + */ + @SuppressWarnings({ "unchecked" }) + protected void processNode(NodeRef nodeRef) + { + // get the reader/writer data + Map readers = (Map) nodeService.getProperty(nodeRef, PROP_READERS); + Map writers = (Map) nodeService.getProperty(nodeRef, PROP_WRITERS); + + // remove extended security aspect + nodeService.removeAspect(nodeRef, ASPECT_EXTENDED_SECURITY); + + // remove dynamic authority permissions + permissionService.clearPermission(nodeRef, ExtendedReaderDynamicAuthority.EXTENDED_READER); + permissionService.clearPermission(nodeRef, ExtendedWriterDynamicAuthority.EXTENDED_WRITER); + + // if record then ... + if (nodeService.hasAspect(nodeRef, ASPECT_RECORD)) + { + Set readersKeySet = null; + if (readers != null) + { + readersKeySet = readers.keySet(); + } + Set writersKeySet = null; + if (writers != null) + { + writersKeySet = writers.keySet(); + } + // re-set extended security via API + extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet); + } + } +} diff --git a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java index 421286bc1a..46d38000a0 100644 --- a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java +++ b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java @@ -1,433 +1,278 @@ -/* - * Copyright (C) 2005-2014 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ - -package org.alfresco.repo.web.scripts.roles; - -import static java.util.Collections.emptyMap; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +/* + * Copyright (C) 2005-2014 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ + +package org.alfresco.repo.web.scripts.roles; + +import static java.util.Collections.emptyMap; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyBoolean; -import static org.mockito.Matchers.anyLong; -import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.io.File; -import java.io.Serializable; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; - -import org.alfresco.model.ContentModel; -import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; -import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; -import org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService; -import org.alfresco.module.org_alfresco_module_rm.security.ExtendedWriterDynamicAuthority; -import org.alfresco.module.org_alfresco_module_rm.test.util.AlfMock; -import org.alfresco.module.org_alfresco_module_rm.test.util.BaseWebScriptUnitTest; -import org.alfresco.repo.domain.node.NodeDAO; -import org.alfresco.repo.domain.patch.PatchDAO; -import org.alfresco.repo.domain.qname.QNameDAO; -import org.alfresco.repo.transaction.RetryingTransactionHelper; -import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyBoolean; +import static org.mockito.Matchers.anyLong; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableMap; + +import org.alfresco.model.ContentModel; +import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; +import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; +import org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService; +import org.alfresco.module.org_alfresco_module_rm.security.ExtendedWriterDynamicAuthority; +import org.alfresco.module.org_alfresco_module_rm.test.util.AlfMock; +import org.alfresco.module.org_alfresco_module_rm.test.util.BaseWebScriptUnitTest; +import org.alfresco.repo.domain.node.NodeDAO; +import org.alfresco.repo.domain.patch.PatchDAO; +import org.alfresco.repo.domain.qname.QNameDAO; +import org.alfresco.repo.transaction.RetryingTransactionHelper; +import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.repo.web.scripts.content.ContentStreamer; -import org.alfresco.service.cmr.repository.NodeRef; -import org.alfresco.service.cmr.repository.NodeService; -import org.alfresco.service.cmr.security.PermissionService; -import org.alfresco.service.namespace.QName; -import org.alfresco.service.transaction.TransactionService; -import org.alfresco.util.Pair; -import org.json.JSONObject; -import org.junit.Before; -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; +import org.alfresco.service.cmr.model.FileFolderService; +import org.alfresco.service.cmr.model.FileInfo; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.cmr.repository.NodeService; +import org.alfresco.service.cmr.security.PermissionService; +import org.alfresco.service.namespace.QName; +import org.alfresco.service.transaction.TransactionService; +import org.alfresco.util.Pair; +import org.json.JSONObject; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Status; -import org.springframework.extensions.webscripts.WebScriptException; -import org.springframework.extensions.webscripts.WebScriptRequest; +import org.springframework.extensions.webscripts.WebScriptException; +import org.springframework.extensions.webscripts.WebScriptRequest; import org.springframework.extensions.webscripts.WebScriptResponse; - -/** - * DynamicAuthoritiesGet Unit Test - * - * @author Silviu Dinuta - */ -@SuppressWarnings("deprecation") -public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest implements RecordsManagementModel -{ - /** test data */ - private static final Long ASPECT_ID = 123l; - private static final QName ASPECT = AlfMock.generateQName(); - - /** mocks */ - @Mock - private PatchDAO mockedPatchDAO; - @Mock - private NodeDAO mockedNodeDAO; - @Mock - private QNameDAO mockedQnameDAO; - @Mock - private NodeService mockedNodeService; - @Mock - private PermissionService mockedPermissionService; - @Mock - private ExtendedSecurityService mockedExtendedSecurityService; - @Mock - private TransactionService mockedTransactionService; - @Mock - private RetryingTransactionHelper mockedRetryingTransactionHelper; + +/** + * DynamicAuthoritiesGet Unit Test + * + * @author Silviu Dinuta + */ +@SuppressWarnings("deprecation") +public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest implements RecordsManagementModel +{ + /** test data */ + private static final Long ASPECT_ID = 123l; + private static final QName ASPECT = AlfMock.generateQName(); + + /** mocks */ + @Mock + private PatchDAO mockedPatchDAO; + @Mock + private NodeDAO mockedNodeDAO; + @Mock + private QNameDAO mockedQnameDAO; + @Mock + private NodeService mockedNodeService; + @Mock + private PermissionService mockedPermissionService; + @Mock + private ExtendedSecurityService mockedExtendedSecurityService; + @Mock + private TransactionService mockedTransactionService; + @Mock + private RetryingTransactionHelper mockedRetryingTransactionHelper; @Mock private ContentStreamer contentStreamer; - - /** test component */ - @InjectMocks - private DynamicAuthoritiesGet webScript; - - @Override - protected AbstractWebScript getWebScript() - { - return webScript; - } - - @Override - protected String getWebScriptTemplate() - { - return "alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.json.ftl"; - } - - /** - * Before test - */ - @SuppressWarnings("unchecked") - @Before - public void before() - { - MockitoAnnotations.initMocks(this); - webScript.setNodeService(mockedNodeService); - webScript.setPermissionService(mockedPermissionService); - webScript.setExtendedSecurityService(mockedExtendedSecurityService); - // setup retrying transaction helper - Answer doInTransactionAnswer = new Answer() - { - @SuppressWarnings("rawtypes") - @Override - public Object answer(InvocationOnMock invocation) throws Throwable - { - RetryingTransactionCallback callback = (RetryingTransactionCallback) invocation.getArguments()[0]; - return callback.execute(); - } - }; - - doAnswer(doInTransactionAnswer).when(mockedRetryingTransactionHelper) - . doInTransaction(any(RetryingTransactionCallback.class), anyBoolean(), anyBoolean()); - - when(mockedTransactionService.getRetryingTransactionHelper()).thenReturn(mockedRetryingTransactionHelper); - - // max node id - when(mockedPatchDAO.getMaxAdmNodeID()).thenReturn(500000L); - - // aspect - when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(new Pair(ASPECT_ID, ASPECT)); - } - - /** - * Given that there are no nodes with the extended security aspect When the action is executed Nothing happens - * @throws Exception - */ - @SuppressWarnings({ "unchecked" }) - @Test - public void noNodesWithExtendedSecurity() throws Exception - { - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(Collections.emptyList()); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "3"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - - // Check the JSON result using Jackson to allow easy equality testing. - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 0 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - - verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, never()).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); - } - - /** - * Given that there are records with the extended security aspect When the action is executed Then the aspect is - * removed And the dynamic authorities permissions are cleared And extended security is set via the updated API - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void recordsWithExtendedSecurityAspect() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), any(Set.class), any(Set.class)); - - } - - /** - * Given that there are non-records with the extended security aspect When the web script is executed Then the aspect is - * removed And the dynamic authorities permissions are cleared - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void nonRecordsWithExtendedSecurityAspect() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); - } - - @Test - public void missingBatchSizeParameter() throws Exception - { - try - { - executeJSONWebScript(emptyMap()); - fail("Expected exception as parameter batchsize is mandatory."); - } - catch (WebScriptException e) - { - assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.", - Status.STATUS_BAD_REQUEST, e.getStatus()); - } - } - - @Test - public void invalidBatchSizeParameter() throws Exception - { - try - { - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "dd"); - executeJSONWebScript(parameters); - fail("Expected exception as parameter batchsize is invalid."); - } - catch (WebScriptException e) - { - assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.", - Status.STATUS_BAD_REQUEST, e.getStatus()); - } - } - - @Test - public void batchSizeShouldBeGraterThanZero() throws Exception - { - try - { - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "0"); - executeJSONWebScript(parameters); - fail("Expected exception as parameter batchsize is not a number greater than 0."); - } - catch (WebScriptException e) - { - assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.", - Status.STATUS_BAD_REQUEST, e.getStatus()); - } - } - - @Test - public void extendedSecurityAspectNotCreated() throws Exception - { - when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(null); - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "3"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"There where no records to be processed.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - } - - @Test - public void processAllRecordsWhenMaxProcessedRecordsIsZero() throws Exception - { - List ids = Stream.of(1l, 2l, 3l,4l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "0"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 4 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - } - - @Test - public void whenMaxProcessedRecordsIsMissingItDefaultsToBatchSize() throws Exception - { - List ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - } + @Mock + private FileFolderService mockedFileFolderService; + /** test component */ + @InjectMocks + private DynamicAuthoritiesGet webScript; + + @Override + protected AbstractWebScript getWebScript() + { + return webScript; + } + + @Override + protected String getWebScriptTemplate() + { + return "alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.json.ftl"; + } + + /** + * Before test + */ + @SuppressWarnings("unchecked") + @Before + public void before() + { + MockitoAnnotations.initMocks(this); + webScript.setNodeService(mockedNodeService); + webScript.setPermissionService(mockedPermissionService); + webScript.setExtendedSecurityService(mockedExtendedSecurityService); + webScript.setFileFolderService(mockedFileFolderService); + // setup retrying transaction helper + Answer doInTransactionAnswer = new Answer() + { + @SuppressWarnings("rawtypes") + @Override + public Object answer(InvocationOnMock invocation) throws Throwable + { + RetryingTransactionCallback callback = (RetryingTransactionCallback) invocation.getArguments()[0]; + return callback.execute(); + } + }; + + doAnswer(doInTransactionAnswer).when(mockedRetryingTransactionHelper) + . doInTransaction(any(RetryingTransactionCallback.class), anyBoolean(), anyBoolean()); + + when(mockedTransactionService.getRetryingTransactionHelper()).thenReturn(mockedRetryingTransactionHelper); + + // max node id + when(mockedPatchDAO.getMaxAdmNodeID()).thenReturn(500000L); + + // aspect + when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(new Pair(ASPECT_ID, ASPECT)); + } + + /** + * Given that there are no nodes with the extended security aspect When the action is executed Nothing happens + * + * @throws Exception + */ + @SuppressWarnings({ "unchecked" }) + @Test + public void noNodesWithExtendedSecurity() throws Exception + { + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) + .thenReturn(Collections.emptyList()); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "3"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + + // Check the JSON result using Jackson to allow easy equality testing. + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 0 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, never()).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); + } + + /** + * Given that there are records with the extended security aspect When the action is executed Then the aspect is + * removed And the dynamic authorities permissions are cleared And extended security is set via the updated API + * + * @throws Exception + */ @SuppressWarnings("unchecked") @Test - public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception + public void recordsWithExtendedSecurityAspect() throws Exception { List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)).thenReturn(null); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), any(Set.class), any(Set.class)); + + } + + /** + * Given that there are non-records with the extended security aspect When the web script is executed Then the + * aspect is removed And the dynamic authorities permissions are cleared + * + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void nonRecordsWithExtendedSecurityAspect() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); }); @@ -450,11 +295,233 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); } + @Test + public void missingBatchSizeParameter() throws Exception + { + try + { + executeJSONWebScript(emptyMap()); + fail("Expected exception as parameter batchsize is mandatory."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } + } + + @Test + public void invalidBatchSizeParameter() throws Exception + { + try + { + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "dd"); + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is invalid."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } + } + + @Test + public void batchSizeShouldBeGraterThanZero() throws Exception + { + try + { + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "0"); + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is not a number greater than 0."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } + } + + @Test + public void extendedSecurityAspectNotCreated() throws Exception + { + when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(null); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "3"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"There where no records to be processed.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + } + + @Test + public void processAllRecordsWhenMaxProcessedRecordsIsZero() throws Exception + { + List ids = Stream.of(1l, 2l, 3l, 4l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "0"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 4 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + } + + @Test + public void whenMaxProcessedRecordsIsMissingItDefaultsToBatchSize() throws Exception + { + List ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)).thenReturn(null); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); + ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), + writersKeysCaptor.capture()); + List allReaderKeySets = readerKeysCaptor.getAllValues(); + List allWritersKeySets = writersKeysCaptor.getAllValues(); + for (Set keySet : allReaderKeySets) + { + assertNull(keySet); + } + for (Set keySet : allWritersKeySets) + { + assertNull(keySet); + } + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void recordsWithExtendedSecurityAspectAndNullWriters() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); + ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), + writersKeysCaptor.capture()); + List allReaderKeySets = readerKeysCaptor.getAllValues(); + List allWritersKeySets = writersKeysCaptor.getAllValues(); + for (Set keySet : allReaderKeySets) + { + assertNotNull(keySet); + } + for (Set keySet : allWritersKeySets) + { + assertNull(keySet); + } + } + /** - * Given I have records that require migration - * And I am interested in knowning which records are migrated - * When I run the migration tool - * Then I will be returned a CSV file containing the name and node reference of the record migrated + * Given I have records that require migration And I am interested in knowning which records are migrated When I run + * the migration tool Then I will be returned a CSV file containing the name and node reference of the record + * migrated + * * @throws Exception */ @SuppressWarnings("unchecked") @@ -491,10 +558,9 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme } /** - * Given that I have record that require migration - * And I'm not interested in knowing which records were migrated - * When I run the migration tool - * And I will not be returned a CSV file of details. + * Given that I have record that require migration And I'm not interested in knowing which records were migrated + * When I run the migration tool And I will not be returned a CSV file of details. + * * @throws Exception */ @SuppressWarnings("unchecked") @@ -527,4 +593,121 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); } + + @Test + public void invalidParentNodeRefParameter() throws Exception + { + try + { + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", "invalidNodeRef"); + executeJSONWebScript(parameters); + fail("Expected exception as parameter parentNodeRef is invalid."); + } + catch (WebScriptException e) + { + assertEquals("If parameter parentNodeRef is invalid then 'Internal server error' should be returned.", + Status.STATUS_INTERNAL_SERVER_ERROR, e.getStatus()); + } + } + + @Test + public void inexistentParentNodeRefParameter() throws Exception + { + try + { + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.exists(parentNodeRef)).thenReturn(false); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", + parentNodeRef.toString()); + executeJSONWebScript(parameters); + fail("Expected exception as parameter parentNodeRef does not exist."); + } + catch (WebScriptException e) + { + assertEquals("If parameter parentNodeRef is does not exist then 'Bad Reequest' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } + } + + @SuppressWarnings("unchecked") + @Test + public void processedWithParentNodeRef() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + List children = new ArrayList(); + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + FileInfo mockedFileInfo = mock(FileInfo.class); + when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); + children.add(mockedFileInfo); + }); + when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) + .thenReturn(children); + + Map parameters = ImmutableMap.of("batchsize", "3", "maxProcessedRecords", "4", "export", + "false", "parentNodeRef", parentNodeRef.toString()); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } + + @SuppressWarnings("unchecked") + @Test + public void processedWithParentNodeRefWithFirstTwoBatchesAlreadyProcessed() throws Exception + { + List ids = Stream.of(1l, 2l, 3l, 4l, 5l, 6l, 7l, 8l).collect(Collectors.toList()); + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + List children = new ArrayList(); + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + if (i <= 6l) + { + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(false); + } + else + { + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); + } + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + FileInfo mockedFileInfo = mock(FileInfo.class); + when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); + children.add(mockedFileInfo); + }); + when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) + .thenReturn(children); + + Map parameters = ImmutableMap.of("batchsize", "3", "parentNodeRef", parentNodeRef.toString()); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 2 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } } \ No newline at end of file From 39a8273063667f60da17798e4e89f95ed620bcc6 Mon Sep 17 00:00:00 2001 From: Silviu Dinuta Date: Thu, 29 Sep 2016 22:44:37 +0300 Subject: [PATCH 06/28] RM-4162: fixed formatting --- .../scripts/roles/DynamicAuthoritiesGet.java | 323 ++++---- .../roles/DynamicAuthoritiesGetUnitTest.java | 743 +++++++++--------- 2 files changed, 537 insertions(+), 529 deletions(-) diff --git a/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java b/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java index 78014bbdb3..7782a14534 100644 --- a/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java +++ b/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java @@ -25,13 +25,13 @@ import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.io.Writer; -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.Set; - + import javax.servlet.http.HttpServletResponse; import org.alfresco.model.ContentModel; @@ -46,64 +46,65 @@ import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransacti import org.alfresco.repo.web.scripts.content.ContentStreamer; import org.alfresco.service.cmr.model.FileFolderService; import org.alfresco.service.cmr.model.FileInfo; -import org.alfresco.service.cmr.repository.NodeRef; -import org.alfresco.service.cmr.repository.NodeService; -import org.alfresco.service.cmr.security.PermissionService; -import org.alfresco.service.namespace.QName; -import org.alfresco.service.transaction.TransactionService; -import org.alfresco.util.Pair; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.cmr.repository.NodeService; +import org.alfresco.service.cmr.security.PermissionService; +import org.alfresco.service.namespace.QName; +import org.alfresco.service.transaction.TransactionService; +import org.alfresco.util.Pair; import org.alfresco.util.TempFileProvider; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.springframework.extensions.webscripts.AbstractWebScript; -import org.springframework.extensions.webscripts.Cache; +import org.springframework.extensions.webscripts.Cache; import org.springframework.extensions.webscripts.Format; -import org.springframework.extensions.webscripts.Status; +import org.springframework.extensions.webscripts.Status; import org.springframework.extensions.webscripts.WebScriptException; -import org.springframework.extensions.webscripts.WebScriptRequest; +import org.springframework.extensions.webscripts.WebScriptRequest; import org.springframework.extensions.webscripts.WebScriptResponse; - -/** - * Webscript used for removing dynamic authorities from the records. - * - * @author Silviu Dinuta - * @since 2.3.0.7 - */ -@SuppressWarnings("deprecation") + +/** + * Webscript used for removing dynamic authorities from the records. + * + * @author Silviu Dinuta + * @since 2.3.0.7 + */ +@SuppressWarnings("deprecation") public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel -{ - private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0."; - private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN"; - private static final String MESSAGE_PROCESSING_END = "Processing - END"; - private static final String MESSAGE_PROCESSING_RECORD_END_TEMPLATE = "Processing record {0} - END"; - private static final String MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE = "Processing record {0} - BEGIN"; - private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid."; - private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory"; +{ + private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0."; + private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN"; + private static final String MESSAGE_PROCESSING_END = "Processing - END"; + private static final String MESSAGE_PROCESSING_RECORD_END_TEMPLATE = "Processing record {0} - END"; + private static final String MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE = "Processing record {0} - BEGIN"; + private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid."; + private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory"; private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist."; - private static final String SUCCESS_STATUS = "success"; - /** - * The logger - */ - private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class); - private static final String BATCH_SIZE = "batchsize"; - private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords"; + private static final String SUCCESS_STATUS = "success"; + /** + * The logger + */ + private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class); + private static final String BATCH_SIZE = "batchsize"; + private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords"; private static final String PARAM_EXPORT = "export"; private static final String PARAM_PARENT_NODE_REF = "parentNodeRef"; - private static final String MODEL_STATUS = "responsestatus"; - private static final String MODEL_MESSAGE = "message"; - private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records."; - private static final String MESSAGE_PARTIAL_TEMPLATE = "Processed first {0} records."; - private static final String MESSAGE_NO_RECORDS_TO_PROCESS = "There where no records to be processed."; - - /** services */ - private PatchDAO patchDAO; - private NodeDAO nodeDAO; - private QNameDAO qnameDAO; - private NodeService nodeService; - private PermissionService permissionService; - private ExtendedSecurityService extendedSecurityService; - private TransactionService transactionService; + private static final String MODEL_STATUS = "responsestatus"; + private static final String MODEL_MESSAGE = "message"; + private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records."; + private static final String MESSAGE_PARTIAL_TEMPLATE = "Processed first {0} records."; + private static final String MESSAGE_NO_RECORDS_TO_PROCESS = "There where no records to be processed."; + + + /** services */ + private PatchDAO patchDAO; + private NodeDAO nodeDAO; + private QNameDAO qnameDAO; + private NodeService nodeService; + private PermissionService permissionService; + private ExtendedSecurityService extendedSecurityService; + private TransactionService transactionService; /** Content Streamer */ protected ContentStreamer contentStreamer; private FileFolderService fileFolderService; @@ -113,41 +114,41 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM { this.patchDAO = patchDAO; } - + public void setNodeDAO(NodeDAO nodeDAO) { this.nodeDAO = nodeDAO; } - + public void setQnameDAO(QNameDAO qnameDAO) - { + { this.qnameDAO = qnameDAO; } - + public void setNodeService(NodeService nodeService) - { + { this.nodeService = nodeService; - } + } public void setPermissionService(PermissionService permissionService) - { + { this.permissionService = permissionService; } public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) - { + { this.extendedSecurityService = extendedSecurityService; - } + } public void setTransactionService(TransactionService transactionService) { this.transactionService = transactionService; - } + } public void setContentStreamer(ContentStreamer contentStreamer) - { + { this.contentStreamer = contentStreamer; - } + } public void setFileFolderService(FileFolderService fileFolderService) { @@ -158,17 +159,17 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM { Map model = new HashMap(); final Long batchSize = getBatchSizeParameter(req); - // get the max node id and the extended security aspect - Long maxNodeId = patchDAO.getMaxAdmNodeID(); - final Pair recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY); - if (recordAspectPair == null) - { - model.put(MODEL_STATUS, SUCCESS_STATUS); - model.put(MODEL_MESSAGE, MESSAGE_NO_RECORDS_TO_PROCESS); - logger.info(MESSAGE_NO_RECORDS_TO_PROCESS); - return model; - } - + // get the max node id and the extended security aspect + Long maxNodeId = patchDAO.getMaxAdmNodeID(); + final Pair recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY); + if(recordAspectPair == null) + { + model.put(MODEL_STATUS, SUCCESS_STATUS); + model.put(MODEL_MESSAGE, MESSAGE_NO_RECORDS_TO_PROCESS); + logger.info(MESSAGE_NO_RECORDS_TO_PROCESS); + return model; + } + Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize); boolean attach = getExportParameter(req); @@ -184,7 +185,7 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM { processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair, totalNumberOfRecordsToProcess.intValue(), out, attach); - } + } else { processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess, @@ -262,8 +263,10 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM try { String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format); - if (mimetype == null) { throw new WebScriptException( - "Web Script format '" + format + "' is not registered"); } + if (mimetype == null) + { + throw new WebScriptException("Web Script format '" + format + "' is not registered"); + } // construct model for script / template Status status = new Status(); @@ -362,19 +365,19 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize) { String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); - // default total number of records to be processed to batch size value - Long totalNumberOfRecordsToProcess = batchSize; - if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr)) - { - try - { - totalNumberOfRecordsToProcess = Long.parseLong(totalToBeProcessedRecordsStr); - } - catch (NumberFormatException ex) - { - // do nothing here, the value will remain 0L in this case - } - } + //default total number of records to be processed to batch size value + Long totalNumberOfRecordsToProcess = batchSize; + if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr)) + { + try + { + totalNumberOfRecordsToProcess = Long.parseLong(totalToBeProcessedRecordsStr); + } + catch(NumberFormatException ex) + { + //do nothing here, the value will remain 0L in this case + } + } return totalNumberOfRecordsToProcess; } @@ -409,7 +412,7 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM } return size; } - + /** * Get parentNodeRef parameter from the request * @@ -446,57 +449,57 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM protected List processNodes(final Long batchSize, Long maxNodeId, final Pair recordAspectPair, Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach) { - final Long maxRecordsToProcess = totalNumberOfRecordsToProcess; - final List processedNodes = new ArrayList(); - logger.info(MESSAGE_PROCESSING_BEGIN); - // by batch size - for (Long i = 0L; i < maxNodeId; i += batchSize) - { - if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) - { - break; - } - final Long currentIndex = i; - - transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback() - { - public Void execute() throws Throwable - { - // get the nodes with the extended security aspect applied + final Long maxRecordsToProcess = totalNumberOfRecordsToProcess; + final List processedNodes = new ArrayList(); + logger.info(MESSAGE_PROCESSING_BEGIN); + // by batch size + for (Long i = 0L; i < maxNodeId; i+=batchSize) + { + if(maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + final Long currentIndex = i; + + transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback() + { + public Void execute() throws Throwable + { + // get the nodes with the extended security aspect applied List nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, currentIndex + batchSize); - - // process each one - for (Long nodeId : nodeIds) - { - if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) - { - break; - } - NodeRef record = nodeDAO.getNodePair(nodeId).getSecond(); - String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME); - logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName)); - processNode(record); - logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); - processedNodes.add(record); + + // process each one + for (Long nodeId : nodeIds) + { + if(maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + NodeRef record = nodeDAO.getNodePair(nodeId).getSecond(); + String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName)); + processNode(record); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); + processedNodes.add(record); if (attach) { out.write(recordName); out.write(","); out.write(record.toString()); out.write("\n"); - } + } } - - return null; - } + + return null; + } }, false, // read only - true); // requires new - } - logger.info(MESSAGE_PROCESSING_END); + true); // requires new + } + logger.info(MESSAGE_PROCESSING_END); return processedNodes; - } - + } + protected List processChildrenNodes(NodeRef parentNodeRef, final int batchSize, final Pair recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out, final boolean attach) @@ -552,31 +555,31 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM return processedNodes; } - /** - * Process each node - * - * @param nodeRef - */ - @SuppressWarnings({ "unchecked" }) - protected void processNode(NodeRef nodeRef) - { - // get the reader/writer data - Map readers = (Map) nodeService.getProperty(nodeRef, PROP_READERS); - Map writers = (Map) nodeService.getProperty(nodeRef, PROP_WRITERS); - - // remove extended security aspect - nodeService.removeAspect(nodeRef, ASPECT_EXTENDED_SECURITY); - - // remove dynamic authority permissions - permissionService.clearPermission(nodeRef, ExtendedReaderDynamicAuthority.EXTENDED_READER); - permissionService.clearPermission(nodeRef, ExtendedWriterDynamicAuthority.EXTENDED_WRITER); - - // if record then ... + /** + * Process each node + * + * @param nodeRef + */ + @SuppressWarnings({ "unchecked"}) + protected void processNode(NodeRef nodeRef) + { + // get the reader/writer data + Map readers = (Map)nodeService.getProperty(nodeRef, PROP_READERS); + Map writers = (Map)nodeService.getProperty(nodeRef, PROP_WRITERS); + + // remove extended security aspect + nodeService.removeAspect(nodeRef, ASPECT_EXTENDED_SECURITY); + + // remove dynamic authority permissions + permissionService.clearPermission(nodeRef, ExtendedReaderDynamicAuthority.EXTENDED_READER); + permissionService.clearPermission(nodeRef, ExtendedWriterDynamicAuthority.EXTENDED_WRITER); + + // if record then ... if (nodeService.hasAspect(nodeRef, ASPECT_RECORD)) { Set readersKeySet = null; if (readers != null) - { + { readersKeySet = readers.keySet(); } Set writersKeySet = null; @@ -584,8 +587,8 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM { writersKeySet = writers.keySet(); } - // re-set extended security via API + // re-set extended security via API extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet); - } - } -} + } + } +} diff --git a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java index 46d38000a0..74baf0a4e1 100644 --- a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java +++ b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java @@ -1,303 +1,307 @@ -/* - * Copyright (C) 2005-2014 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ - -package org.alfresco.repo.web.scripts.roles; - -import static java.util.Collections.emptyMap; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +/* + * Copyright (C) 2005-2014 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ + +package org.alfresco.repo.web.scripts.roles; + +import static java.util.Collections.emptyMap; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyBoolean; -import static org.mockito.Matchers.anyLong; -import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.doAnswer; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyBoolean; +import static org.mockito.Matchers.anyLong; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.io.File; -import java.io.Serializable; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.Serializable; import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; - -import org.alfresco.model.ContentModel; -import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; -import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; -import org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService; -import org.alfresco.module.org_alfresco_module_rm.security.ExtendedWriterDynamicAuthority; -import org.alfresco.module.org_alfresco_module_rm.test.util.AlfMock; -import org.alfresco.module.org_alfresco_module_rm.test.util.BaseWebScriptUnitTest; -import org.alfresco.repo.domain.node.NodeDAO; -import org.alfresco.repo.domain.patch.PatchDAO; -import org.alfresco.repo.domain.qname.QNameDAO; -import org.alfresco.repo.transaction.RetryingTransactionHelper; -import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableMap; + +import org.alfresco.model.ContentModel; +import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; +import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; +import org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService; +import org.alfresco.module.org_alfresco_module_rm.security.ExtendedWriterDynamicAuthority; +import org.alfresco.module.org_alfresco_module_rm.test.util.AlfMock; +import org.alfresco.module.org_alfresco_module_rm.test.util.BaseWebScriptUnitTest; +import org.alfresco.repo.domain.node.NodeDAO; +import org.alfresco.repo.domain.patch.PatchDAO; +import org.alfresco.repo.domain.qname.QNameDAO; +import org.alfresco.repo.transaction.RetryingTransactionHelper; +import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.repo.web.scripts.content.ContentStreamer; import org.alfresco.service.cmr.model.FileFolderService; import org.alfresco.service.cmr.model.FileInfo; -import org.alfresco.service.cmr.repository.NodeRef; -import org.alfresco.service.cmr.repository.NodeService; -import org.alfresco.service.cmr.security.PermissionService; -import org.alfresco.service.namespace.QName; -import org.alfresco.service.transaction.TransactionService; -import org.alfresco.util.Pair; -import org.json.JSONObject; -import org.junit.Before; -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.cmr.repository.NodeService; +import org.alfresco.service.cmr.security.PermissionService; +import org.alfresco.service.namespace.QName; +import org.alfresco.service.transaction.TransactionService; +import org.alfresco.util.Pair; +import org.json.JSONObject; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Status; -import org.springframework.extensions.webscripts.WebScriptException; -import org.springframework.extensions.webscripts.WebScriptRequest; +import org.springframework.extensions.webscripts.WebScriptException; +import org.springframework.extensions.webscripts.WebScriptRequest; import org.springframework.extensions.webscripts.WebScriptResponse; - -/** - * DynamicAuthoritiesGet Unit Test - * - * @author Silviu Dinuta - */ -@SuppressWarnings("deprecation") -public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest implements RecordsManagementModel -{ - /** test data */ - private static final Long ASPECT_ID = 123l; - private static final QName ASPECT = AlfMock.generateQName(); - - /** mocks */ - @Mock - private PatchDAO mockedPatchDAO; - @Mock - private NodeDAO mockedNodeDAO; - @Mock - private QNameDAO mockedQnameDAO; - @Mock - private NodeService mockedNodeService; - @Mock - private PermissionService mockedPermissionService; - @Mock - private ExtendedSecurityService mockedExtendedSecurityService; - @Mock - private TransactionService mockedTransactionService; - @Mock - private RetryingTransactionHelper mockedRetryingTransactionHelper; + +/** + * DynamicAuthoritiesGet Unit Test + * + * @author Silviu Dinuta + */ +@SuppressWarnings("deprecation") +public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest implements RecordsManagementModel +{ + /** test data */ + private static final Long ASPECT_ID = 123l; + private static final QName ASPECT = AlfMock.generateQName(); + + /** mocks */ + @Mock + private PatchDAO mockedPatchDAO; + @Mock + private NodeDAO mockedNodeDAO; + @Mock + private QNameDAO mockedQnameDAO; + @Mock + private NodeService mockedNodeService; + @Mock + private PermissionService mockedPermissionService; + @Mock + private ExtendedSecurityService mockedExtendedSecurityService; + @Mock + private TransactionService mockedTransactionService; + @Mock + private RetryingTransactionHelper mockedRetryingTransactionHelper; @Mock private ContentStreamer contentStreamer; @Mock private FileFolderService mockedFileFolderService; - - /** test component */ - @InjectMocks - private DynamicAuthoritiesGet webScript; - - @Override + + /** test component */ + @InjectMocks + private DynamicAuthoritiesGet webScript; + + @Override protected AbstractWebScript getWebScript() - { - return webScript; - } - - @Override - protected String getWebScriptTemplate() - { - return "alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.json.ftl"; - } - - /** - * Before test - */ - @SuppressWarnings("unchecked") - @Before - public void before() - { - MockitoAnnotations.initMocks(this); - webScript.setNodeService(mockedNodeService); - webScript.setPermissionService(mockedPermissionService); - webScript.setExtendedSecurityService(mockedExtendedSecurityService); + { + return webScript; + } + + @Override + protected String getWebScriptTemplate() + { + return "alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.json.ftl"; + } + + /** + * Before test + */ + @SuppressWarnings("unchecked") + @Before + public void before() + { + MockitoAnnotations.initMocks(this); + webScript.setNodeService(mockedNodeService); + webScript.setPermissionService(mockedPermissionService); + webScript.setExtendedSecurityService(mockedExtendedSecurityService); webScript.setFileFolderService(mockedFileFolderService); - // setup retrying transaction helper - Answer doInTransactionAnswer = new Answer() - { - @SuppressWarnings("rawtypes") - @Override - public Object answer(InvocationOnMock invocation) throws Throwable - { - RetryingTransactionCallback callback = (RetryingTransactionCallback) invocation.getArguments()[0]; - return callback.execute(); - } - }; - - doAnswer(doInTransactionAnswer).when(mockedRetryingTransactionHelper) - . doInTransaction(any(RetryingTransactionCallback.class), anyBoolean(), anyBoolean()); - - when(mockedTransactionService.getRetryingTransactionHelper()).thenReturn(mockedRetryingTransactionHelper); - - // max node id - when(mockedPatchDAO.getMaxAdmNodeID()).thenReturn(500000L); - - // aspect - when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(new Pair(ASPECT_ID, ASPECT)); - } - - /** - * Given that there are no nodes with the extended security aspect When the action is executed Nothing happens + // setup retrying transaction helper + Answer doInTransactionAnswer = new Answer() + { + @SuppressWarnings("rawtypes") + @Override + public Object answer(InvocationOnMock invocation) throws Throwable + { + RetryingTransactionCallback callback = (RetryingTransactionCallback) invocation.getArguments()[0]; + return callback.execute(); + } + }; + + doAnswer(doInTransactionAnswer).when(mockedRetryingTransactionHelper) + . doInTransaction(any(RetryingTransactionCallback.class), anyBoolean(), anyBoolean()); + + when(mockedTransactionService.getRetryingTransactionHelper()).thenReturn(mockedRetryingTransactionHelper); + + // max node id + when(mockedPatchDAO.getMaxAdmNodeID()).thenReturn(500000L); + + // aspect + when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(new Pair(ASPECT_ID, ASPECT)); + } + + /** + * Given that there are no nodes with the extended security aspect + * When the action is executed Nothing happens * - * @throws Exception - */ - @SuppressWarnings({ "unchecked" }) - @Test - public void noNodesWithExtendedSecurity() throws Exception - { - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(Collections.emptyList()); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "3"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - - // Check the JSON result using Jackson to allow easy equality testing. - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 0 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, never()).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); - } - - /** - * Given that there are records with the extended security aspect When the action is executed Then the aspect is - * removed And the dynamic authorities permissions are cleared And extended security is set via the updated API + * @throws Exception + */ + @SuppressWarnings({ "unchecked" }) + @Test + public void noNodesWithExtendedSecurity() throws Exception + { + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) + .thenReturn(Collections.emptyList()); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "3"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + + // Check the JSON result using Jackson to allow easy equality testing. + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 0 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, never()).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, never()).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); + } + + /** + * Given that there are records with the extended security aspect + * When the action is executed + * Then the aspect is removed + * And the dynamic authorities permissions are cleared + * And extended security is set via the updated API * - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void recordsWithExtendedSecurityAspect() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void recordsWithExtendedSecurityAspect() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), any(Set.class), any(Set.class)); - - } - - /** - * Given that there are non-records with the extended security aspect When the web script is executed Then the - * aspect is removed And the dynamic authorities permissions are cleared + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), any(Set.class), any(Set.class)); + + } + + /** + * Given that there are non-records with the extended security aspect + * When the web script is executed + * Then the aspect is removed And the dynamic authorities permissions are cleared * - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void nonRecordsWithExtendedSecurityAspect() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void nonRecordsWithExtendedSecurityAspect() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); - } - - @Test - public void missingBatchSizeParameter() throws Exception - { + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, never()).set(any(NodeRef.class), any(Set.class), any(Set.class)); + } + + @Test + public void missingBatchSizeParameter() throws Exception + { try { executeJSONWebScript(emptyMap()); @@ -308,15 +312,15 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.", Status.STATUS_BAD_REQUEST, e.getStatus()); } - } - - @Test - public void invalidBatchSizeParameter() throws Exception - { + } + + @Test + public void invalidBatchSizeParameter() throws Exception + { try { - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "dd"); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "dd"); executeJSONWebScript(parameters); fail("Expected exception as parameter batchsize is invalid."); } @@ -325,15 +329,15 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.", Status.STATUS_BAD_REQUEST, e.getStatus()); } - } - - @Test - public void batchSizeShouldBeGraterThanZero() throws Exception - { + } + + @Test + public void batchSizeShouldBeGraterThanZero() throws Exception + { try { - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "0"); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "0"); executeJSONWebScript(parameters); fail("Expected exception as parameter batchsize is not a number greater than 0."); } @@ -342,79 +346,77 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.", Status.STATUS_BAD_REQUEST, e.getStatus()); } - } - - @Test - public void extendedSecurityAspectNotCreated() throws Exception - { - when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(null); - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "3"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"There where no records to be processed.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - } - - @Test - public void processAllRecordsWhenMaxProcessedRecordsIsZero() throws Exception - { - List ids = Stream.of(1l, 2l, 3l, 4l).collect(Collectors.toList()); - + } + + @Test + public void extendedSecurityAspectNotCreated() throws Exception + { + when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(null); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "3"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"There where no records to be processed.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + } + + @Test + public void processAllRecordsWhenMaxProcessedRecordsIsZero() throws Exception + { + List ids = Stream.of(1l, 2l, 3l,4l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "0"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 4 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - } - - @Test - public void whenMaxProcessedRecordsIsMissingItDefaultsToBatchSize() throws Exception - { - List ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList()); - + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "0"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 4 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + } + + @Test + public void whenMaxProcessedRecordsIsMissingItDefaultsToBatchSize() throws Exception + { + List ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - } + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(false); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + } @SuppressWarnings({ "unchecked", "rawtypes" }) @Test @@ -518,9 +520,10 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme } /** - * Given I have records that require migration And I am interested in knowning which records are migrated When I run - * the migration tool Then I will be returned a CSV file containing the name and node reference of the record - * migrated + * Given I have records that require migration + * And I am interested in knowning which records are migrated + * When I run the migration tool + * Then I will be returned a CSV file containing the name and node reference of the record migrated * * @throws Exception */ @@ -558,8 +561,10 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme } /** - * Given that I have record that require migration And I'm not interested in knowing which records were migrated - * When I run the migration tool And I will not be returned a CSV file of details. + * Given that I have record that require migration + * And I'm not interested in knowing which records were migrated + * When I run the migration tool + * Then I will not be returned a CSV file of details. * * @throws Exception */ From c336a70846826b4bd56c5f8ab19adfada89afd96 Mon Sep 17 00:00:00 2001 From: Silviu Dinuta Date: Thu, 29 Sep 2016 23:02:13 +0300 Subject: [PATCH 07/28] RM-4162: formatting fix --- .../alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java b/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java index 7782a14534..97f22265a3 100644 --- a/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java +++ b/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java @@ -185,7 +185,7 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM { processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair, totalNumberOfRecordsToProcess.intValue(), out, attach); - } + } else { processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess, From 228bcbd96088c2616ab15e958bd7f7a78ccefef3 Mon Sep 17 00:00:00 2001 From: Tuna Aksoy Date: Sat, 1 Oct 2016 17:21:57 +0100 Subject: [PATCH 08/28] RM-4101 (Link to, Copy to and File to rules fail when not run in background) --- .../impl/CopyMoveLinkFileToBaseAction.java | 26 ++-- .../integration/issue/IssueTestSuite.java | 3 +- .../test/integration/issue/RM4101Test.java | 121 ++++++++++++++++++ 3 files changed, 135 insertions(+), 15 deletions(-) create mode 100644 rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM4101Test.java diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/CopyMoveLinkFileToBaseAction.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/CopyMoveLinkFileToBaseAction.java index 8017009e82..17eda31bb9 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/CopyMoveLinkFileToBaseAction.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/CopyMoveLinkFileToBaseAction.java @@ -125,15 +125,7 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr NodeRef recordFolder = (NodeRef)action.getParameterValue(PARAM_DESTINATION_RECORD_FOLDER); if (recordFolder == null) { - final boolean finaltargetIsUnfiledRecords = targetIsUnfiledRecords; - recordFolder = getTransactionService().getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback() - { - public NodeRef execute() throws Throwable - { - // get the reference to the record folder based on the relative path - return createOrResolvePath(action, actionedUponNodeRef, finaltargetIsUnfiledRecords); - } - }, false, true); + recordFolder = createOrResolvePath(action, actionedUponNodeRef, targetIsUnfiledRecords); } // now we have the reference to the target folder we can do some final checks to see if the action is valid @@ -259,23 +251,29 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr * @param targetisUnfiledRecords true is the target is in unfiled records * @return */ - private NodeRef createOrResolvePath(Action action, NodeRef actionedUponNodeRef, boolean targetisUnfiledRecords) + private NodeRef createOrResolvePath(final Action action, final NodeRef actionedUponNodeRef, final boolean targetisUnfiledRecords) { // get the starting context - NodeRef context = getContext(action, actionedUponNodeRef, targetisUnfiledRecords); + final NodeRef context = getContext(action, actionedUponNodeRef, targetisUnfiledRecords); NodeRef path = context; // get the path we wish to resolve String pathParameter = (String)action.getParameterValue(PARAM_PATH); - String[] pathElementsArray = StringUtils.tokenizeToStringArray(pathParameter, "/", false, true); + final String[] pathElementsArray = StringUtils.tokenizeToStringArray(pathParameter, "/", false, true); if((pathElementsArray != null) && (pathElementsArray.length > 0)) { // get the create parameter Boolean createValue = (Boolean)action.getParameterValue(PARAM_CREATE_RECORD_PATH); - boolean create = createValue == null ? false : createValue.booleanValue(); + final boolean create = createValue == null ? false : createValue.booleanValue(); // create or resolve the specified path - path = createOrResolvePath(action, context, actionedUponNodeRef, Arrays.asList(pathElementsArray), targetisUnfiledRecords, create, false); + path = getTransactionService().getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback() + { + public NodeRef execute() throws Throwable + { + return createOrResolvePath(action, context, actionedUponNodeRef, Arrays.asList(pathElementsArray), targetisUnfiledRecords, create, false); + } + }, false, true); } return path; } diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/IssueTestSuite.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/IssueTestSuite.java index 61b99cc2b4..56de827b02 100755 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/IssueTestSuite.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/IssueTestSuite.java @@ -46,7 +46,8 @@ import org.junit.runners.Suite.SuiteClasses; RM1814Test.class, RM978Test.class, RM1887Test.class, - RM1914Test.class + RM1914Test.class, + RM4101Test.class }) public class IssueTestSuite { diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM4101Test.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM4101Test.java new file mode 100644 index 0000000000..3c302264a3 --- /dev/null +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM4101Test.java @@ -0,0 +1,121 @@ +/* + * Copyright (C) 2005-2016 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ +package org.alfresco.module.org_alfresco_module_rm.test.integration.issue; + +import java.util.UUID; + +import org.alfresco.module.org_alfresco_module_rm.action.impl.LinkToAction; +import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase; +import org.alfresco.service.cmr.action.Action; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.cmr.rule.Rule; +import org.alfresco.service.cmr.rule.RuleService; +import org.alfresco.service.cmr.rule.RuleType; + +/** + * Tests issue #4101: Link to, Copy to and File to rules fail when not run in background + * + * @author Tuna Aksoy + * @since 2.3.0.8 + */ +public class RM4101Test extends BaseRMTestCase +{ + private RuleService ruleService; + + @Override + protected void initServices() + { + super.initServices(); + + ruleService = (RuleService) applicationContext.getBean("RuleService"); + } + + @Override + protected boolean isRecordTest() + { + return true; + } + + public void testRunRuleNotInBackground() throws Exception + { + final String categoryName = "category1" + UUID.randomUUID().toString(); + final NodeRef category1 = doTestInTransaction(new Test() + { + @Override + public NodeRef run() + { + return filePlanService.createRecordCategory(filePlan, categoryName); + } + }); + + final NodeRef folder1 = doTestInTransaction(new Test() + { + @Override + public NodeRef run() + { + return recordFolderService.createRecordFolder(category1, "folder1WithRule" + UUID.randomUUID().toString()); + } + }); + + final String folder2Name = "folder2FolderToLinkTo" + UUID.randomUUID().toString(); + final NodeRef folder2 = doTestInTransaction(new Test() + { + @Override + public NodeRef run() + { + return recordFolderService.createRecordFolder(category1, folder2Name); + } + }); + + doTestInTransaction(new Test() + { + @Override + public Void run() + { + Action linkToAction = actionService.createAction(LinkToAction.NAME); + linkToAction.setParameterValue(LinkToAction.PARAM_PATH, "/" + categoryName + "/" + folder2Name); + + Rule rule = new Rule(); + rule.setRuleType(RuleType.INBOUND); + rule.setTitle("LinkTo"); + rule.setAction(linkToAction); + rule.setExecuteAsynchronously(false); + ruleService.saveRule(folder1, rule); + + return null; + } + }); + + doTestInTransaction(new Test() + { + @Override + public Void run() + { + utils.createRecord(folder1, "record1" + UUID.randomUUID().toString()); + return null; + } + + @Override + public void test(Void result) throws Exception + { + assertEquals(1, nodeService.getChildAssocs(folder2).size()); + } + }); + } +} From 4cb13c00521dddbc861de9fdaaa4ca376dd3e947 Mon Sep 17 00:00:00 2001 From: Tuna Aksoy Date: Sat, 1 Oct 2016 21:56:18 +0100 Subject: [PATCH 09/28] RM-4095 (Fix failing tests after merging 2.3.0.x up to master) --- .../org_alfresco_module_rm/rm-action-context.xml | 1 + .../action/impl/DestroyAction.java | 15 +++++++++++++++ .../record/InplaceRecordPermissionTest.java | 3 +-- .../test/integration/record/RecordTestSuite.java | 4 +++- 4 files changed, 20 insertions(+), 3 deletions(-) diff --git a/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-action-context.xml b/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-action-context.xml index 7c53ee1dea..1845cdf360 100644 --- a/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-action-context.xml +++ b/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-action-context.xml @@ -259,6 +259,7 @@ depends-on="rmDestroyRecordsScheduledForDestructionCapability"> + ${rm.ghosting.enabled} diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/DestroyAction.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/DestroyAction.java index b4b01a87e2..07a3bb8cb8 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/DestroyAction.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/DestroyAction.java @@ -29,6 +29,7 @@ import org.alfresco.module.org_alfresco_module_rm.action.RMDispositionActionExec import org.alfresco.module.org_alfresco_module_rm.capability.CapabilityService; import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionActionDefinition; import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionSchedule; +import org.alfresco.module.org_alfresco_module_rm.record.InplaceRecordService; import org.alfresco.repo.content.cleanup.EagerContentStoreCleaner; import org.alfresco.service.cmr.action.Action; import org.alfresco.service.cmr.dictionary.DataTypeDefinition; @@ -55,6 +56,9 @@ public class DestroyAction extends RMDispositionActionExecuterAbstractBase /** Capability service */ private CapabilityService capabilityService; + /** Inplace record service */ + private InplaceRecordService inplaceRecordService; + /** Indicates if ghosting is enabled or not */ private boolean ghostingEnabled = true; @@ -74,6 +78,14 @@ public class DestroyAction extends RMDispositionActionExecuterAbstractBase this.capabilityService = capabilityService; } + /** + * @param inplaceRecordService inplace record service + */ + public void setInplaceRecordService(InplaceRecordService inplaceRecordService) + { + this.inplaceRecordService = inplaceRecordService; + } + /** * @param ghostingEnabled true if ghosting is enabled, false otherwise */ @@ -152,6 +164,9 @@ public class DestroyAction extends RMDispositionActionExecuterAbstractBase { // Add the ghosted aspect getNodeService().addAspect(record, ASPECT_GHOSTED, null); + + // Hide from inplace users to give the impression of destruction + inplaceRecordService.hideRecord(record); } else { diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/InplaceRecordPermissionTest.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/InplaceRecordPermissionTest.java index b16c1472c3..ef086d2bc8 100644 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/InplaceRecordPermissionTest.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/InplaceRecordPermissionTest.java @@ -500,8 +500,7 @@ public class InplaceRecordPermissionTest extends BaseRMTestCase * And it's metadata is maintained * Then the inplace users will no longer see the record */ - // FIXME: See RM-4095 - public void ztestDestroyedRecordInplacePermissions() + public void testDestroyedRecordInplacePermissions() { test() .given() diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/RecordTestSuite.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/RecordTestSuite.java index a374616e98..ad664420cf 100644 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/RecordTestSuite.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/RecordTestSuite.java @@ -37,7 +37,9 @@ import org.junit.runners.Suite.SuiteClasses; HideInplaceRecordTest.class, MoveInplaceRecordTest.class, ViewRecordTest.class, - LinkRecordTest.class + LinkRecordTest.class, + CreateInplaceRecordTest.class, + InplaceRecordPermissionTest.class }) public class RecordTestSuite { From 283259478881f269eebf652a927da5f66f1917bb Mon Sep 17 00:00:00 2001 From: Tuna Aksoy Date: Sat, 1 Oct 2016 22:50:50 +0100 Subject: [PATCH 10/28] RM-4095 (Fix failing tests after merging 2.3.0.x up to master) --- .../test/integration/issue/RM1727Test.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM1727Test.java b/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM1727Test.java index 89f9aea966..d74556cc22 100644 --- a/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM1727Test.java +++ b/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM1727Test.java @@ -80,8 +80,17 @@ public class RM1727Test extends BaseRMTestCase public void testRM1727() { - //set read and file permissions for folder - filePlanPermissionService.setPermission(folder, myUser, RMPermissionModel.FILING); + doTestInTransaction(new Test() + { + @Override + public Void run() + { + //set read and file permissions for folder + filePlanPermissionService.setPermission(folder, myUser, RMPermissionModel.FILING); + return null; + } + }); + doTestInTransaction(new Test() { @Override From 17099d1731aca3186aa832c3fc9d905ea58ac5fb Mon Sep 17 00:00:00 2001 From: Mihai Cozma Date: Mon, 3 Oct 2016 18:02:20 +0300 Subject: [PATCH 11/28] RM-2368 Initial version record does not inherit document type and aspects from original document --- .../alfresco-global.properties | 7 + .../records-management-service.properties | 3 +- .../rm-version-context.xml | 3 + .../record/RecordServiceImpl.java | 4 +- .../version/RecordableVersionService.java | 61 ++-- .../version/RecordableVersionServiceImpl.java | 267 +++++++++++------- .../version/DeclareAsRecordVersionTest.java | 256 +++++++++++++---- 7 files changed, 420 insertions(+), 181 deletions(-) diff --git a/rm-server/config/alfresco/module/org_alfresco_module_rm/alfresco-global.properties b/rm-server/config/alfresco/module/org_alfresco_module_rm/alfresco-global.properties index 4c3a2f3d64..8116b69d78 100644 --- a/rm-server/config/alfresco/module/org_alfresco_module_rm/alfresco-global.properties +++ b/rm-server/config/alfresco/module/org_alfresco_module_rm/alfresco-global.properties @@ -77,3 +77,10 @@ rm.patch.v22.convertToStandardFilePlan=false # a document will be auto-versioned when its type is changed. # version.store.enableAutoVersionOnTypeChange=false + +# +# Enable auto-version to be created when there is a difference between the document and latest record state +# to ensure that the created version record matches the current document state, +# otherwise create the version record from the version history +# +rm.enableAutoVersionOnRecordCreation=false diff --git a/rm-server/config/alfresco/module/org_alfresco_module_rm/messages/records-management-service.properties b/rm-server/config/alfresco/module/org_alfresco_module_rm/messages/records-management-service.properties index f53f066f9d..20a0cf4c62 100644 --- a/rm-server/config/alfresco/module/org_alfresco_module_rm/messages/records-management-service.properties +++ b/rm-server/config/alfresco/module/org_alfresco_module_rm/messages/records-management-service.properties @@ -18,4 +18,5 @@ rm.service.vital-def-missing=Vital record definition aspect is not present on no rm.service.close-record-folder-not-folder=The record folder couldn't be closed because it's not defined as a record folder.(nodeRef={0}) rm.service.node-has-aspect=The node {0} has already the aspect {1}. rm.service.final-version=Final -rm.service.final-version-description=The final archived record version \ No newline at end of file +rm.service.final-version-description=The final archived record version +rm.service.enable-autoversion-on-record-creation=Auto Version on Record Creation \ No newline at end of file diff --git a/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-version-context.xml b/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-version-context.xml index 1f755ca57f..4088679ca1 100644 --- a/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-version-context.xml +++ b/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-version-context.xml @@ -21,6 +21,7 @@ + @@ -28,6 +29,7 @@ + @@ -77,6 +79,7 @@ * Returns false if not versionable or no version. * - * @param nodeRef node reference - * @return boolean true if latest version recorded, false otherwise + * @param nodeRef node reference + * @return boolean true if latest version recorded, false otherwise */ boolean isCurrentVersionRecorded(NodeRef nodeRef); - + /** * Indicates whether a version is recorded or not. * - * @param version version - * @return boolean true if recorded version, false otherwise + * @param version version + * @return boolean true if recorded version, false otherwise */ boolean isRecordedVersion(Version version); - + /** - * If the version is a recorded version, gets the related version - * record. + * If the version is a recorded version, gets the related version record. * - * @param version version - * @return NodeRef node reference of version record + * @param version version + * @return NodeRef node reference of version record */ NodeRef getVersionRecord(Version version); - + /** * Gets the version that relates to the version record * * @param versionRecord version record node reference - * @return Version version or null if not found + * @return Version version or null if not found */ Version getRecordedVersion(NodeRef record); - + /** * Creates a record from the latest version, marking it as recorded. *

- * Does not create a record if the node is not versionable or the latest - * version is already recorded. + * Does not create a record if the node is not versionable or the latest version is already recorded. * - * @param nodeRef node reference - * @return NodeRef node reference to the created record. + * @param nodeRef node reference + * @return NodeRef node reference to the created record. */ NodeRef createRecordFromLatestVersion(NodeRef filePlan, NodeRef nodeRef); - + /** * Indicates whether a record version is destroyed or not. * - * @param version version - * @return boolean true if destroyed, false otherwise + * @param version version + * @return boolean true if destroyed, false otherwise */ boolean isRecordedVersionDestroyed(Version version); - + /** * Marks a recorded version as destroyed. *

- * Note this method does not destroy the associated record, instead it marks the - * version as destroyed. + * Note this method does not destroy the associated record, instead it marks the version as destroyed. * - * @param version version + * @param version version */ void destroyRecordedVersion(Version version); + /** + * Flag that indicate to create new version on record creation if current state of node is modified + * + * @return boolean + */ + public boolean isEnableAutoVersionOnRecordCreation(); + + /** + * Create a snapshot - 'freeze' version of current node + * + * @param nodeRef node reference + * @return version version or null + */ + Version createFreezeVersion(NodeRef nodeRef); } diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/version/RecordableVersionServiceImpl.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/version/RecordableVersionServiceImpl.java index e63d5b696b..b2508898fb 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/version/RecordableVersionServiceImpl.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/version/RecordableVersionServiceImpl.java @@ -24,6 +24,7 @@ import static org.codehaus.plexus.util.StringUtils.isNotBlank; import java.io.Serializable; import java.util.Collection; import java.util.Collections; +import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.Set; @@ -57,6 +58,7 @@ import org.alfresco.util.PropertyMap; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.springframework.extensions.surf.util.I18NUtil; /** * Recordable version service implementation @@ -79,6 +81,12 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl protected static final String PROP_VERSION_RECORD = "RecordVersion"; protected static final String PROP_RECORDED_VERSION_DESTROYED = "RecordedVersionDestroyed"; + /** I18N */ + private static final String AUTO_VERSION_ON_RECORD_CREATION = "rm.service.enable-autoversion-on-record-creation"; + + /** flag that enable auto-version on record creation */ + private boolean isEnableAutoVersionOnRecordCreation = false; + /** version aspect property names */ private static final String[] VERSION_PROPERTY_NAMES = new String[] { @@ -113,15 +121,15 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl /** cm object type */ private CmObjectType cmObjectType; - + /** extended permission service */ private ExtendedPermissionService extendedPermissionService; - + /** extended security service */ private ExtendedSecurityService extendedSecurityService; /** - * @param filePlanService file plan service + * @param filePlanService file plan service */ public void setFilePlanService(FilePlanService filePlanService) { @@ -129,7 +137,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl } /** - * @param authenticationUtil authentication util helper + * @param authenticationUtil authentication util helper */ public void setAuthenticationUtil(AuthenticationUtil authenticationUtil) { @@ -137,7 +145,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl } /** - * @param relationshipService relationship service + * @param relationshipService relationship service */ public void setRelationshipService(RelationshipService relationshipService) { @@ -153,7 +161,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl } /** - * @param modelSecurityService model security service + * @param modelSecurityService model security service */ public void setModelSecurityService(ModelSecurityService modelSecurityService) { @@ -167,7 +175,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl { this.cmObjectType = cmObjectType; } - + /** * @param extendedPermissionService extended permission service */ @@ -175,9 +183,9 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl { this.extendedPermissionService = extendedPermissionService; } - + /** - * @param extendedSecurityService extended security service + * @param extendedSecurityService extended security service */ public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) { @@ -185,7 +193,21 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl } /** - * @see org.alfresco.repo.version.Version2ServiceImpl#createVersion(org.alfresco.service.cmr.repository.NodeRef, java.util.Map, int) + * @param isEnableAutoVersionOnRecordCreation + */ + public void setEnableAutoVersionOnRecordCreation(boolean isEnableAutoVersionOnRecordCreation) + { + this.isEnableAutoVersionOnRecordCreation = isEnableAutoVersionOnRecordCreation; + } + + public boolean isEnableAutoVersionOnRecordCreation() + { + return isEnableAutoVersionOnRecordCreation; + } + + /** + * @see org.alfresco.repo.version.Version2ServiceImpl#createVersion(org.alfresco.service.cmr.repository.NodeRef, + * java.util.Map, int) */ @Override protected Version createVersion(NodeRef nodeRef, Map origVersionProperties, int versionNumber) throws ReservedVersionNameException @@ -205,7 +227,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl VersionType versionType = null; if (origVersionProperties != null) { - versionType = (VersionType)origVersionProperties.get(VersionModel.PROP_VERSION_TYPE); + versionType = (VersionType) origVersionProperties.get(VersionModel.PROP_VERSION_TYPE); } // determine whether this is a recorded version or not @@ -232,12 +254,12 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl } /** - * @param nodeRef node reference - * @return {@link NodeRef} associated file plan, default if none + * @param nodeRef node reference + * @return {@link NodeRef} associated file plan, default if none */ private NodeRef getFilePlan(NodeRef nodeRef) { - NodeRef filePlan = (NodeRef)nodeService.getProperty(nodeRef, PROP_FILE_PLAN); + NodeRef filePlan = (NodeRef) nodeService.getProperty(nodeRef, PROP_FILE_PLAN); if (filePlan == null) { filePlan = getFilePlan(); @@ -246,7 +268,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl } /** - * @return {@link NodeRef} default file plan, exception if none + * @return {@link NodeRef} default file plan, exception if none */ private NodeRef getFilePlan() { @@ -276,7 +298,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl boolean result = false; if (nodeService.hasAspect(nodeRef, RecordableVersionModel.ASPECT_VERSIONABLE)) { - String policyString = (String)nodeService.getProperty(nodeRef, PROP_RECORDABLE_VERSION_POLICY); + String policyString = (String) nodeService.getProperty(nodeRef, PROP_RECORDABLE_VERSION_POLICY); if (policyString != null) { RecordableVersionPolicy policy = RecordableVersionPolicy.valueOf(policyString.toUpperCase()); @@ -306,7 +328,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl NodeRef version = null; if (versionProperties.containsKey(KEY_RECORDABLE_VERSION) && - ((Boolean)versionProperties.get(KEY_RECORDABLE_VERSION)).booleanValue()) + ((Boolean)versionProperties.get(KEY_RECORDABLE_VERSION)).booleanValue()) { // create a recorded version version = createNewRecordedVersion(sourceTypeRef, versionHistoryRef, standardVersionProperties, versionProperties, versionNumber, nodeDetails); @@ -323,13 +345,13 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl /** * Creates a new recorded version * - * @param sourceTypeRef source type name - * @param versionHistoryRef version history reference - * @param standardVersionProperties standard version properties - * @param versionProperties version properties - * @param versionNumber version number - * @param nodeDetails policy scope - * @return {@link NodeRef} record version + * @param sourceTypeRef source type name + * @param versionHistoryRef version history reference + * @param standardVersionProperties standard version properties + * @param versionProperties version properties + * @param versionNumber version number + * @param nodeDetails policy scope + * @return {@link NodeRef} record version */ protected NodeRef createNewRecordedVersion(QName sourceTypeRef, NodeRef versionHistoryRef, @@ -356,14 +378,14 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl try { // get the destination file plan - final NodeRef filePlan = (NodeRef)versionProperties.get(KEY_FILE_PLAN); + final NodeRef filePlan = (NodeRef) versionProperties.get(KEY_FILE_PLAN); if (filePlan == null) { throw new AlfrescoRuntimeException("Can't create a new recorded version, because no file plan has been specified in the version properties."); } // create a copy of the source node and place in the file plan - final NodeRef nodeRef = (NodeRef)standardVersionProperties.get(Version2Model.PROP_QNAME_FROZEN_NODE_REF); + final NodeRef nodeRef = (NodeRef) standardVersionProperties.get(Version2Model.PROP_QNAME_FROZEN_NODE_REF); cmObjectType.disableCopy(); try @@ -400,7 +422,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl nodeService.addAspect(versionNodeRef, Version2Model.ASPECT_VERSION, standardVersionProperties); // add the recordedVersion aspect with link to record - nodeService.addAspect(versionNodeRef, ASPECT_RECORDED_VERSION, Collections.singletonMap(PROP_RECORD_NODE_REF, (Serializable)record)); + nodeService.addAspect(versionNodeRef, ASPECT_RECORDED_VERSION, Collections.singletonMap(PROP_RECORD_NODE_REF, (Serializable) record)); // freeze auditable aspect information freezeAuditableAspect(nodeRef, versionNodeRef); @@ -441,8 +463,8 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl /** * Helper method to link the record to the previous version record * - * @param nodeRef noderef source node reference - * @param record record record node reference + * @param nodeRef noderef source node reference + * @param record record record node reference */ private void linkToPreviousVersionRecord(final NodeRef nodeRef, final NodeRef record) { @@ -465,8 +487,8 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl /** * Helper to get the latest version record for a given document (ie non-record) * - * @param nodeRef node reference - * @return NodeRef latest version record, null otherwise + * @param nodeRef node reference + * @return NodeRef latest version record, null otherwise */ private NodeRef getLatestVersionRecord(NodeRef nodeRef) { @@ -480,7 +502,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl for (Version previousVersion : previousVersions) { // look for the associated record - final NodeRef previousRecord = (NodeRef)previousVersion.getVersionProperties().get(PROP_VERSION_RECORD); + final NodeRef previousRecord = (NodeRef) previousVersion.getVersionProperties().get(PROP_VERSION_RECORD); if (previousRecord != null && nodeService.exists(previousRecord)) { @@ -492,7 +514,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl return versionRecord; } - + /** * @see org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService#getRecordedVersion(org.alfresco.service.cmr.repository.NodeRef) */ @@ -545,20 +567,21 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl @Override protected Version getVersion(NodeRef versionRef) { + Version version = super.getVersion(versionRef); // place the version record reference in the version properties - NodeRef record = (NodeRef)dbNodeService.getProperty(versionRef, PROP_RECORD_NODE_REF); + NodeRef record = (NodeRef) dbNodeService.getProperty(versionRef, PROP_RECORD_NODE_REF); if (record != null) - { + { version.getVersionProperties().put(PROP_VERSION_RECORD, record); } // place information about the destruction of the version record in the properties - Boolean destroyed = (Boolean)dbNodeService.getProperty(versionRef, PROP_DESTROYED); + Boolean destroyed = (Boolean) dbNodeService.getProperty(versionRef, PROP_DESTROYED); if (destroyed == null) { destroyed = Boolean.FALSE; } version.getVersionProperties().put(PROP_RECORDED_VERSION_DESTROYED, destroyed); - + return version; } @@ -577,22 +600,22 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl dbNodeService.setProperty(nodeRef, PROP_RECORDABLE_VERSION_POLICY, versionPolicy); } } - + /** * @see org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService#isLatestVersionRecorded(org.alfresco.service.cmr.repository.NodeRef) */ @Override public boolean isCurrentVersionRecorded(NodeRef nodeRef) { - boolean result = false; + boolean result = false; Version version = getCurrentVersion(nodeRef); if (version != null) { result = isRecordedVersion(version); - } + } return result; } - + /** * @see org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService#isRecordedVersion(org.alfresco.service.cmr.version.Version) */ @@ -602,7 +625,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl NodeRef versionNodeRef = getVersionNodeRef(version); return dbNodeService.hasAspect(versionNodeRef, RecordableVersionModel.ASPECT_RECORDED_VERSION); } - + /** * @see org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService#getVersionRecord(org.alfresco.service.cmr.version.Version) */ @@ -614,8 +637,8 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl if (dbNodeService.hasAspect(versionNodeRef, RecordableVersionModel.ASPECT_RECORDED_VERSION)) { // get the version record - result = (NodeRef)dbNodeService.getProperty(versionNodeRef, RecordableVersionModel.PROP_RECORD_NODE_REF); - + result = (NodeRef) dbNodeService.getProperty(versionNodeRef, RecordableVersionModel.PROP_RECORD_NODE_REF); + // check that the version record exists if (result != null && !dbNodeService.exists(result)) @@ -627,47 +650,48 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl } return result; } - + /** * Create Version Store Ref * - * @param store ref - * @return store ref for version store + * @param store ref + * @return store ref for version store */ public StoreRef convertStoreRef(StoreRef storeRef) { return new StoreRef(StoreRef.PROTOCOL_WORKSPACE, storeRef.getIdentifier()); } - + /** * Convert the incomming node ref (with the version store protocol specified) * to the internal representation with the workspace protocol. * - * @param nodeRef the incomming verison protocol node reference - * @return the internal version node reference + * @param nodeRef the incomming verison protocol node reference + * @return the internal version node reference */ public NodeRef convertNodeRef(NodeRef nodeRef) { return new NodeRef(convertStoreRef(nodeRef.getStoreRef()), nodeRef.getId()); } - + /** * @see org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService#createRecordFromLatestVersion(org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.cmr.repository.NodeRef) */ @Override public NodeRef createRecordFromLatestVersion(final NodeRef filePlan, final NodeRef nodeRef) - { + { ParameterCheck.mandatory("filePlan", filePlan); ParameterCheck.mandatory("nodeRef", nodeRef); - + NodeRef record = null; - + // check for versionable aspect if (nodeService.hasAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE)) { + createFreezeVersion(nodeRef); // get the latest version final Version currentVersion = getCurrentVersion(nodeRef); - + if (currentVersion != null && !isRecordedVersion(currentVersion)) { @@ -678,19 +702,19 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl { // get the documents readers and writers Pair, Set> readersAndWriters = extendedPermissionService.getReadersAndWriters(nodeRef); - + // grab the frozen state NodeRef currentFrozenState = currentVersion.getFrozenStateNodeRef(); - + // determine the type of the object QName type = nodeService.getType(currentFrozenState); - + // grab all the properties Map properties = nodeService.getProperties(currentFrozenState); - + // grab all the aspects Set aspects = nodeService.getAspects(currentFrozenState); - + // create the record NodeRef record = recordService.createRecordFromContent( filePlan, @@ -698,14 +722,14 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl type, properties, null); - + // apply aspects to record for (QName aspect : aspects) { // add the aspect, properties have already been set nodeService.addAspect(record, aspect, null); } - + // apply version record aspect to record PropertyMap versionRecordProps = new PropertyMap(3); versionRecordProps.put(PROP_VERSIONED_NODEREF, nodeRef); @@ -715,38 +739,38 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl // wire record up to previous record linkToPreviousVersionRecord(nodeRef, record); - + // set the extended security extendedSecurityService.set(record, readersAndWriters); - + return record; } }); - + // get the version history NodeRef versionHistoryRef = getVersionHistoryNodeRef(nodeRef); - + // get details from the version before we remove it int versionNumber = getVersionNumber(currentVersion); Map versionProperties = getVersionAspectProperties(currentVersion); QName sourceTypeRef = getVersionType(currentVersion); - + // patch-up owner information, which needs to be frozen for recorded versions - String owner = (String)nodeService.getProperty(currentVersion.getFrozenStateNodeRef(), ContentModel.PROP_OWNER); + String owner = (String) nodeService.getProperty(currentVersion.getFrozenStateNodeRef(), ContentModel.PROP_OWNER); if (owner != null) { versionProperties.put(PROP_FROZEN_OWNER, owner); } - + // delete the current version this.dbNodeService.deleteNode(convertNodeRef(currentVersion.getFrozenStateNodeRef())); - + // create a new version history if we need to if (!nodeService.exists(versionHistoryRef)) { versionHistoryRef = createVersionHistory(nodeRef); } - + // create recorded version nodeRef ChildAssociationRef childAssocRef = dbNodeService.createNode( versionHistoryRef, @@ -760,34 +784,34 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl nodeService.addAspect(versionNodeRef, Version2Model.ASPECT_VERSION, versionProperties); // add the recordedVersion aspect with link to record - nodeService.addAspect(versionNodeRef, ASPECT_RECORDED_VERSION, Collections.singletonMap(PROP_RECORD_NODE_REF, (Serializable)record)); + nodeService.addAspect(versionNodeRef, ASPECT_RECORDED_VERSION, Collections.singletonMap(PROP_RECORD_NODE_REF, (Serializable) record)); } } - + return record; } - + /** - * @see org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService#isRecordedVersionDestroyed(org.alfresco.service.cmr.version.Version) + * @see org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService#isRecordedVersionDestroyed(org.alfresco.service.cmr.version.Version) */ @Override public boolean isRecordedVersionDestroyed(Version version) { boolean result = false; - + // get the version node reference - NodeRef versionNodeRef = getVersionNodeRef(version); + NodeRef versionNodeRef = getVersionNodeRef(version); // get the destroyed property value - Boolean isDestroyed = (Boolean)dbNodeService.getProperty(versionNodeRef, PROP_DESTROYED); + Boolean isDestroyed = (Boolean) dbNodeService.getProperty(versionNodeRef, PROP_DESTROYED); if (isDestroyed != null) { result = isDestroyed.booleanValue(); } - + return result; } - + /** * @see org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService#destroyRecordedVersion(org.alfresco.service.cmr.version.Version) */ @@ -796,24 +820,24 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl { // get the version node reference NodeRef versionNodeRef = getVersionNodeRef(version); - + // if it's a recorded version if (dbNodeService.hasAspect(versionNodeRef, ASPECT_RECORDED_VERSION)) { // mark it as destroyed dbNodeService.setProperty(versionNodeRef, PROP_DESTROYED, true); - + // clear the record node reference property - dbNodeService.setProperty(versionNodeRef, RecordableVersionModel.PROP_RECORD_NODE_REF, null); - } + dbNodeService.setProperty(versionNodeRef, RecordableVersionModel.PROP_RECORD_NODE_REF, null); + } } - + /** * Helper method to get the version number of a given version by inspecting the * name of the parent association. * - * @param version version - * @return int version number + * @param version version + * @return int version number */ private int getVersionNumber(Version version) { @@ -823,11 +847,11 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl String versionNumber = fullVersionNumber.substring(fullVersionNumber.indexOf("-") + 1); return Integer.parseInt(versionNumber); } - + /** * Helper method to get all the version aspect properties from an existing version * - * @param version version + * @param version version * @return Map property values */ private Map getVersionAspectProperties(Version version) @@ -839,7 +863,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl { QName propertyQName = QName.createQName(Version2Model.NAMESPACE_URI, propertyName); result.put(propertyQName, versionProps.get(propertyQName)); - + if (propertyName.equals(Version2Model.PROP_FROZEN_NODE_DBID)) { System.out.println(versionProps.get(propertyQName)); @@ -847,7 +871,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl } return result; } - + /** * Helper method to get the type of a versions frozen state * @@ -857,16 +881,67 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl private QName getVersionType(Version version) { return nodeService.getType(getVersionNodeRef(version)); - } - + } + /** * Helper method to get the internal node reference of a version * - * @param version version - * @return NodeRef internal node reference to version + * @param version version + * @return NodeRef internal node reference to version */ private NodeRef getVersionNodeRef(Version version) { - return convertNodeRef(version.getFrozenStateNodeRef()); + return convertNodeRef(version.getFrozenStateNodeRef()); + } + + /** + * Check if current version of the node is modified compared with versioned version + * + * @param nodeRef internal node reference + * @return boolean true if nodeRef is modified, otherwise false + */ + public boolean isCurrentVersionDirty(NodeRef nodeRef) + { + + if (nodeService.hasAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE) == true) + { + // get the latest version + Version currentVersion = getCurrentVersion(nodeRef); + Date modificationDate = (Date) nodeService.getProperty(nodeRef, ContentModel.PROP_MODIFIED); + if (currentVersion != null) + { + // grab the frozen state + NodeRef currentFrozenState = currentVersion.getFrozenStateNodeRef(); + Date frozenModificationDate = (Date) nodeService.getProperty(currentFrozenState, ContentModel.PROP_MODIFIED); + if (frozenModificationDate != null) + { + if (modificationDate.getTime() > frozenModificationDate.getTime()) { return true; } + } + } + else + { + return true; + } + + } + return false; + } + + /** + * @see RecordableVersionService#createFreezeVersion(NodeRef) + */ + public Version createFreezeVersion(NodeRef nodeRef) + { + Version newVersion = null; + boolean autoVersion = isEnableAutoVersionOnRecordCreation(); + // if the flag autoversion on record creation set, create new version on dirty nodes + if (autoVersion && isCurrentVersionDirty(nodeRef)) + { + Map autoVersionProperties = new HashMap(2); + autoVersionProperties.put(VersionModel.PROP_VERSION_TYPE, VersionType.MINOR); + autoVersionProperties.put(VersionModel.PROP_DESCRIPTION, I18NUtil.getMessage(AUTO_VERSION_ON_RECORD_CREATION)); + newVersion = createVersion(nodeRef, autoVersionProperties); + } + return newVersion; } } diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java index b305a1801c..69bbb27879 100755 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java @@ -19,20 +19,23 @@ package org.alfresco.module.org_alfresco_module_rm.test.integration.version; import java.io.Serializable; +import java.util.Date; import java.util.HashMap; import java.util.Map; +import org.alfresco.model.ContentModel; import org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService; import org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionServiceImpl; import org.alfresco.repo.version.VersionModel; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.version.Version; import org.alfresco.service.cmr.version.VersionType; +import org.alfresco.service.namespace.QName; import org.alfresco.util.GUID; /** * Declare as record version integration tests - * + * * @author Roy Wetherall * @since 2.3 */ @@ -40,7 +43,7 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest { /** recordable version service */ private RecordableVersionService recordableVersionService; - + /** * @see org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase#initServices() */ @@ -48,69 +51,66 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest protected void initServices() { super.initServices(); - recordableVersionService = (RecordableVersionService)applicationContext.getBean("RecordableVersionService"); + recordableVersionService = (RecordableVersionService) applicationContext.getBean("RecordableVersionService"); } - + /** - * Given versionable content with a non-recorded latest version - * When I declare a version record - * Then the latest version is recorded and a record is created + * Given versionable content with a non-recorded latest version When I declare a version record Then the latest + * version is recorded and a record is created */ public void testDeclareLatestVersionAsRecord() { doBehaviourDrivenTest(new BehaviourDrivenTest(dmCollaborator) - { + { private NodeRef versionRecord; - private Map versionProperties; - + private Map versionProperties; + public void given() throws Exception { // setup version properties versionProperties = new HashMap(4); versionProperties.put(Version.PROP_DESCRIPTION, DESCRIPTION); versionProperties.put(VersionModel.PROP_VERSION_TYPE, VersionType.MINOR); - + // create version versionService.createVersion(dmDocument, versionProperties); - + // assert that the latest version is not recorded assertFalse(recordableVersionService.isCurrentVersionRecorded(dmDocument)); } - + public void when() - { + { // create version record from latest version versionRecord = recordableVersionService.createRecordFromLatestVersion(filePlan, dmDocument); - } - + } + public void then() { // check the created record assertNotNull(versionRecord); assertTrue(recordService.isRecord(versionRecord)); - + // assert the current version is recorded assertTrue(recordableVersionService.isCurrentVersionRecorded(dmDocument)); - + // check the recorded version checkRecordedVersion(dmDocument, DESCRIPTION, "0.1"); } - }); - } - + }); + } + /** - * Given versionable content with a recorded latest version - * When I declare a version record - * Then nothing happens since the latest version is already recorded - * And a warning is logged + * Given versionable content with a recorded latest version When I declare a version record Then nothing happens + * since the latest version is already recorded And a warning is logged */ public void testDeclareLatestVersionAsRecordButAlreadyRecorded() { doBehaviourDrivenTest(new BehaviourDrivenTest(dmCollaborator) - { + { private NodeRef versionRecord; - private Map versionProperties; - + private Map versionProperties; + public void given() throws Exception { // setup version properties @@ -119,90 +119,230 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest versionProperties.put(VersionModel.PROP_VERSION_TYPE, VersionType.MINOR); versionProperties.put(RecordableVersionServiceImpl.KEY_RECORDABLE_VERSION, true); versionProperties.put(RecordableVersionServiceImpl.KEY_FILE_PLAN, filePlan); - + // create version versionService.createVersion(dmDocument, versionProperties); - + // assert that the latest version is not recorded assertTrue(recordableVersionService.isCurrentVersionRecorded(dmDocument)); } - + public void when() - { + { // create version record from latest version versionRecord = recordableVersionService.createRecordFromLatestVersion(filePlan, dmDocument); - } - + } + public void then() { // check that a record was not created assertNull(versionRecord); - - // assert the current version is recorded + + // assert the current version is recorded assertTrue(recordableVersionService.isCurrentVersionRecorded(dmDocument)); - + // check the recorded version checkRecordedVersion(dmDocument, DESCRIPTION, "0.1"); } - }); + }); } /** - * Given that a document is a specialized type - * When version is declared as a record - * Then the record is the same type as the source document + * Given that a document is a specialized type When version is declared as a record Then the record is the same type + * as the source document * * @see https://issues.alfresco.com/jira/browse/RM-2194 */ public void testSpecializedContentType() { doBehaviourDrivenTest(new BehaviourDrivenTest(dmCollaborator) - { + { private NodeRef customDocument; private NodeRef versionRecord; - private Map versionProperties; - + private Map versionProperties; + public void given() throws Exception { - // create content + // create content customDocument = fileFolderService.create(dmFolder, GUID.generate(), TYPE_CUSTOM_TYPE).getNodeRef(); prepareContent(customDocument); - + // setup version properties versionProperties = new HashMap(2); versionProperties.put(Version.PROP_DESCRIPTION, DESCRIPTION); versionProperties.put(VersionModel.PROP_VERSION_TYPE, VersionType.MINOR); - + // create version versionService.createVersion(customDocument, versionProperties); - + // assert that the latest version is not recorded assertFalse(recordableVersionService.isCurrentVersionRecorded(customDocument)); } - + public void when() - { + { // create version record from latest version versionRecord = recordableVersionService.createRecordFromLatestVersion(filePlan, customDocument); - } - + } + public void then() { // check the created record assertNotNull(versionRecord); assertTrue(recordService.isRecord(versionRecord)); - + // check the record type is correct assertEquals(TYPE_CUSTOM_TYPE, nodeService.getType(versionRecord)); - + // assert the current version is recorded assertTrue(recordableVersionService.isCurrentVersionRecorded(customDocument)); - + // check the recorded version checkRecordedVersion(customDocument, DESCRIPTION, "0.1"); } - }); - + }); + } - + + /** + * @see https://issues.alfresco.com/jira/browse/RM-2368 + */ + public void testCreateRecordFromLatestVersion() + { + doBehaviourDrivenTest(new BehaviourDrivenTest(dmCollaborator) + { + private NodeRef myDocument; + private NodeRef versionedRecord; + private Map versionProperties; + private Date createdDate; + private Date frozenModifDate; + private Date modificationDate; + private String record_name = "initial_name"; + private String AUTO_VERSION_DESCRIPTION = "Auto Version on Record Creation"; + private boolean autoVersion = false; + + public void given() throws Exception + { + // create a document + myDocument = fileFolderService.create(dmFolder, GUID.generate(), ContentModel.TYPE_CONTENT).getNodeRef(); + createdDate = (Date) nodeService.getProperty(myDocument, ContentModel.PROP_CREATED); + modificationDate = (Date) nodeService.getProperty(myDocument, ContentModel.PROP_MODIFIED); + assertTrue("Modified date must be after or on creation date", createdDate.getTime() == modificationDate.getTime()); + + // Set initial set of properties + Map properties = new HashMap(3); + // Ensure default behaviour autoversion on change properties is set to false + properties.put(ContentModel.PROP_AUTO_VERSION_PROPS, false); + // Set initial name + properties.put(ContentModel.PROP_NAME, "initial_name"); + nodeService.setProperties(myDocument, properties); + nodeService.setProperty(myDocument, ContentModel.PROP_DESCRIPTION, DESCRIPTION); + nodeService.addAspect(myDocument, ContentModel.ASPECT_OWNABLE, null); + // make sure document is versionable + nodeService.addAspect(myDocument, ContentModel.ASPECT_VERSIONABLE, null); + // Change Type to a custom document + nodeService.setType(myDocument, TYPE_CUSTOM_TYPE); + + Date modificationDate1 = (Date) nodeService.getProperty(myDocument, ContentModel.PROP_MODIFIED); + assertTrue("Frozen modification date", modificationDate.getTime() == modificationDate1.getTime()); + // setup version properties + versionProperties = new HashMap(2); + versionProperties.put(Version.PROP_DESCRIPTION, DESCRIPTION); + versionProperties.put(VersionModel.PROP_VERSION_TYPE, VersionType.MAJOR); + + // create initial version + versionService.createVersion(myDocument, versionProperties); + Version version = versionService.getCurrentVersion(myDocument); + frozenModifDate = version.getFrozenModifiedDate(); + + // get autoversion flag from cofiguratie + autoVersion = recordableVersionService.isEnableAutoVersionOnRecordCreation(); + } + + public void when() + { + // check1 + assertTrue("Frozen modification date is the same with initial document ", modificationDate.getTime() == frozenModifDate.getTime()); + // current node is not dirty + assertFalse(isCurrentVersionDirty(myDocument)); + + if (autoVersion) + { + // Apply a custom aspect + nodeService.addAspect(myDocument, ContentModel.ASPECT_TITLED, null); + // Update properties + nodeService.setProperty(myDocument, ContentModel.PROP_NAME, "updated_name"); + nodeService.setProperty(myDocument, ContentModel.PROP_DESCRIPTION, DESCRIPTION); + // node should be modified + assertTrue(isCurrentVersionDirty(myDocument)); + } + else + { + assertFalse(isCurrentVersionDirty(myDocument)); + } + + assertFalse(recordableVersionService.isCurrentVersionRecorded(myDocument)); + // test RM-2368 + versionedRecord = recordableVersionService.createRecordFromLatestVersion(filePlan, myDocument); + + } + + public void then() + { + // Properties updated / flag as modified + // check the created record + assertNotNull(versionedRecord); + assertTrue(recordService.isRecord(versionedRecord)); + + // check the record type is correct + assertEquals(TYPE_CUSTOM_TYPE, nodeService.getType(versionedRecord)); + + // assert the current version is recorded + assertTrue(recordableVersionService.isCurrentVersionRecorded(myDocument)); + + // get name of record + record_name = (String) nodeService.getProperty(versionedRecord, ContentModel.PROP_NAME); + + if (autoVersion) + { + // new version is create, current node was modified + assertTrue("Name was updated:", record_name.contains("updated_name")); + // check record + checkRecordedVersion(myDocument, AUTO_VERSION_DESCRIPTION, "1.1"); + } + else + { + // record is created based on existing frozen, which does not contain any modification of node + assertTrue("Name is not modified: ", record_name.contains("initial_name")); + checkRecordedVersion(myDocument, DESCRIPTION, "1.0"); + } + + } + + public boolean isCurrentVersionDirty(NodeRef nodeRef) + { + if (nodeService.hasAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE) == true) + { + // get the latest version + Version currentVersion = versionService.getCurrentVersion(nodeRef); + Date modificationDate = (Date) nodeService.getProperty(nodeRef, ContentModel.PROP_MODIFIED); + if (currentVersion != null) + { + // grab the frozen state + NodeRef currentFrozenState = currentVersion.getFrozenStateNodeRef(); + Date frozenModificationDate = (Date) nodeService.getProperty(currentFrozenState, ContentModel.PROP_MODIFIED); + if (modificationDate.getTime() > frozenModificationDate.getTime()) { return true; } + } + else + { + return true; + } + } + return false; + } + + }); + + } + } From db0f5a2b4525483f23730d15ca67435ab2eca6b9 Mon Sep 17 00:00:00 2001 From: Mihai Cozma Date: Tue, 4 Oct 2016 14:22:59 +0300 Subject: [PATCH 12/28] Re-format acceptance criteria comments --- .../version/DeclareAsRecordVersionTest.java | 24 +++++++++++-------- 1 file changed, 14 insertions(+), 10 deletions(-) mode change 100755 => 100644 rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java old mode 100755 new mode 100644 index 69bbb27879..db99d11803 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java @@ -55,8 +55,9 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest } /** - * Given versionable content with a non-recorded latest version When I declare a version record Then the latest - * version is recorded and a record is created + * Given versionable content with a non-recorded latest version + * When I declare a version record + * Then the latest version is recorded and a record is created */ public void testDeclareLatestVersionAsRecord() { @@ -101,8 +102,9 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest } /** - * Given versionable content with a recorded latest version When I declare a version record Then nothing happens - * since the latest version is already recorded And a warning is logged + * Given versionable content with a recorded latest version + * When I declare a version record + * Then nothing happens since the latest version is already recorded And a warning is logged */ public void testDeclareLatestVersionAsRecordButAlreadyRecorded() { @@ -148,8 +150,9 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest } /** - * Given that a document is a specialized type When version is declared as a record Then the record is the same type - * as the source document + * Given that a document is a specialized type + * When version is declared as a record + * Then the record is the same type as the source document * * @see https://issues.alfresco.com/jira/browse/RM-2194 */ @@ -205,6 +208,11 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest } /** + * + * Given versionable content with a recorded latest version with auto-version set on true + * When I declare this version record + * Then a new minor version is created for document + * * @see https://issues.alfresco.com/jira/browse/RM-2368 */ public void testCreateRecordFromLatestVersion() @@ -261,8 +269,6 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest public void when() { - // check1 - assertTrue("Frozen modification date is the same with initial document ", modificationDate.getTime() == frozenModifDate.getTime()); // current node is not dirty assertFalse(isCurrentVersionDirty(myDocument)); @@ -280,8 +286,6 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest { assertFalse(isCurrentVersionDirty(myDocument)); } - - assertFalse(recordableVersionService.isCurrentVersionRecorded(myDocument)); // test RM-2368 versionedRecord = recordableVersionService.createRecordFromLatestVersion(filePlan, myDocument); From cf602aa9e18bf5b416e8fff20bac9ee11de6bccc Mon Sep 17 00:00:00 2001 From: Mihai Cozma Date: Tue, 4 Oct 2016 14:27:03 +0300 Subject: [PATCH 13/28] Re-format acceptance criteria comments --- .../test/integration/version/DeclareAsRecordVersionTest.java | 1 - 1 file changed, 1 deletion(-) diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java index db99d11803..3e2d83f0f6 100644 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java @@ -320,7 +320,6 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest assertTrue("Name is not modified: ", record_name.contains("initial_name")); checkRecordedVersion(myDocument, DESCRIPTION, "1.0"); } - } public boolean isCurrentVersionDirty(NodeRef nodeRef) From b46709d75a304048b22ab21831bdfcb10bb71ace Mon Sep 17 00:00:00 2001 From: Mihai Cozma Date: Tue, 4 Oct 2016 14:34:07 +0300 Subject: [PATCH 14/28] Re-format acceptance criteria comments --- .../version/DeclareAsRecordVersionTest.java | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java index 69bbb27879..d735a2b31c 100755 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java @@ -101,8 +101,10 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest } /** - * Given versionable content with a recorded latest version When I declare a version record Then nothing happens - * since the latest version is already recorded And a warning is logged + * Given versionable content with a recorded latest version + * When I declare a version record + * Then nothing happens since the latest version is already recorded And a warning is logged + * */ public void testDeclareLatestVersionAsRecordButAlreadyRecorded() { @@ -148,8 +150,9 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest } /** - * Given that a document is a specialized type When version is declared as a record Then the record is the same type - * as the source document + * Given that a document is a specialized type + * When version is declared as a record + * Then the record is the same type as the source document * * @see https://issues.alfresco.com/jira/browse/RM-2194 */ @@ -205,6 +208,9 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest } /** + * Given versionable content with a non recorded latest version, with auto-version flag true , and properties are changed for current version + * When I declare this version as a record + * Then a new minor version is created * @see https://issues.alfresco.com/jira/browse/RM-2368 */ public void testCreateRecordFromLatestVersion() From ef1c5a2cfde71a632781d0e8edb63f32e494fe83 Mon Sep 17 00:00:00 2001 From: Tom Page Date: Mon, 3 Oct 2016 14:30:49 +0100 Subject: [PATCH 15/28] RM-3386 Refactor duplicated disposition date code into method. --- .../disposition/DispositionServiceImpl.java | 72 +++---------------- 1 file changed, 9 insertions(+), 63 deletions(-) diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java index 70f0a43957..4b3866bb3d 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java @@ -217,7 +217,7 @@ public class DispositionServiceImpl extends ServiceBaseImpl DispositionActionDefinition nextDispositionActionDefinition = dispositionActionDefinitions.get(0); // initialise the details of the next disposition action - initialiseDispositionAction(nodeRef, nextDispositionActionDefinition); + initialiseDispositionAction(nodeRef, nextDispositionActionDefinition, true); } } } @@ -618,10 +618,11 @@ public class DispositionServiceImpl extends ServiceBaseImpl * Initialises the details of the next disposition action based on the details of a disposition * action definition. * - * @param nodeRef node reference - * @param dispositionActionDefinition disposition action definition + * @param nodeRef node reference + * @param dispositionActionDefinition disposition action definition + * @param allowContextFromAsOf true if the context date is allowed to be obtained from the disposition "as of" property. */ - private void initialiseDispositionAction(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition) + private void initialiseDispositionAction(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition, boolean allowContextFromAsOf) { // Create the properties Map props = new HashMap(10); @@ -635,7 +636,8 @@ public class DispositionServiceImpl extends ServiceBaseImpl // Get the period properties value QName periodProperty = dispositionActionDefinition.getPeriodProperty(); - if (periodProperty != null) + if (periodProperty != null && (allowContextFromAsOf + || !RecordsManagementModel.PROP_DISPOSITION_AS_OF.equals(periodProperty))) { // doesn't matter if the period property isn't set ... the asOfDate will get updated later // when the value of the period property is set @@ -898,63 +900,7 @@ public class DispositionServiceImpl extends ServiceBaseImpl nodeService.addAspect(nodeRef, ASPECT_DISPOSITION_LIFECYCLE, null); } - // Create the properties - Map props = new HashMap(10); - - // Calculate the asOf date - Date asOfDate = null; - Period period = nextDispositionActionDefinition.getPeriod(); - if (period != null) - { - Date contextDate = null; - - // Get the period properties value - QName periodProperty = nextDispositionActionDefinition.getPeriodProperty(); - if (periodProperty != null && - !RecordsManagementModel.PROP_DISPOSITION_AS_OF.equals(periodProperty)) - { - // doesn't matter if the period property isn't set ... the asOfDate will get updated later - // when the value of the period property is set - contextDate = (Date) nodeService.getProperty(nodeRef, periodProperty); - } - else - { - // for now use 'NOW' as the default context date - // TODO set the default period property ... cut off date or last disposition date depending on context - contextDate = new Date(); - } - - // Calculate the as of date - if (contextDate != null) - { - asOfDate = period.getNextDate(contextDate); - } - } - - // Set the property values - props.put(PROP_DISPOSITION_ACTION_ID, nextDispositionActionDefinition.getId()); - props.put(PROP_DISPOSITION_ACTION, nextDispositionActionDefinition.getName()); - if (asOfDate != null) - { - props.put(PROP_DISPOSITION_AS_OF, asOfDate); - } - - // Create a new disposition action object - NodeRef dispositionActionNodeRef = nodeService.createNode( - nodeRef, - ASSOC_NEXT_DISPOSITION_ACTION, - ASSOC_NEXT_DISPOSITION_ACTION, - TYPE_DISPOSITION_ACTION, - props).getChildRef(); - DispositionAction da = new DispositionActionImpl(serviceRegistry, dispositionActionNodeRef); - - // Create the events - List events = nextDispositionActionDefinition.getEvents(); - for (RecordsManagementEvent event : events) - { - // For every event create an entry on the action - da.addEventCompletionDetails(event); - } + initialiseDispositionAction(nodeRef, nextDispositionActionDefinition, false); } } @@ -1009,7 +955,7 @@ public class DispositionServiceImpl extends ServiceBaseImpl recordFolderService.closeRecordFolder(nodeRef); return null; } - }); + }); } } else From 726c2284b8234cbc92c56fecd93c045c6d4ef976 Mon Sep 17 00:00:00 2001 From: Tom Page Date: Mon, 3 Oct 2016 14:36:31 +0100 Subject: [PATCH 16/28] RM-3386 Replace apache logger with slf4j. Use slf4j to remove the need for a log level check. Note that the constant strings will be concatenated at compile time so we don't have to worry about the performance issue here. --- .../disposition/DispositionServiceImpl.java | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java index 4b3866bb3d..5022bfd476 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java @@ -53,8 +53,8 @@ import org.alfresco.service.namespace.NamespaceService; import org.alfresco.service.namespace.QName; import org.alfresco.service.namespace.RegexQNamePattern; import org.alfresco.util.ParameterCheck; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Disposition service implementation. @@ -68,7 +68,7 @@ public class DispositionServiceImpl extends ServiceBaseImpl RecordsManagementPolicies.OnFileRecord { /** Logger */ - private static Log logger = LogFactory.getLog(DispositionServiceImpl.class); + Logger LOGGER = LoggerFactory.getLogger(DispositionServiceImpl.class); /** Behaviour filter */ private BehaviourFilter behaviourFilter; @@ -387,12 +387,10 @@ public class DispositionServiceImpl extends ServiceBaseImpl { // TODO in the future we should be able to support disposition schedule reuse, but for now just warn that // only the first disposition schedule will be considered - if (logger.isWarnEnabled()) - { - logger.warn("Disposition schedule has more than one associated records management container. " + - "This is not currently supported so only the first container will be considered. " + - "(dispositionScheduleNodeRef=" + dispositionSchedule.getNodeRef().toString() + ")"); - } + LOGGER.warn("Disposition schedule has more than one associated records management container. " + + "This is not currently supported so only the first container will be considered. " + + "(dispositionScheduleNodeRef={})", + dispositionSchedule.getNodeRef().toString()); } // Get the container reference From 70eb0cb3990fcaa5bdf082daa3159a3bfa1ee6ab Mon Sep 17 00:00:00 2001 From: Tom Page Date: Tue, 4 Oct 2016 12:52:34 +0100 Subject: [PATCH 17/28] RM-3386 Centralise the logic for recalculating "disposition as of" dates. --- .../rm-service-context.xml | 1 + ...spositionActionDefinitionUpdateAction.java | 12 +--- .../disposition/DispositionService.java | 12 ++++ .../disposition/DispositionServiceImpl.java | 66 ++++++++++++------- 4 files changed, 57 insertions(+), 34 deletions(-) diff --git a/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-service-context.xml b/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-service-context.xml index 0ed6c73149..271b40b23c 100644 --- a/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-service-context.xml +++ b/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-service-context.xml @@ -154,6 +154,7 @@ org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getLastCompletedDispostionAction=RM.Read.0 org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.isDisposableItemCutoff=RM.Read.0 org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.cutoffDisposableItem=RM.Read.0 + org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.calculateAsOfDate=RM_ALLOW org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.*=RM_DENY ]]> diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateAction.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateAction.java index 7dc22f9b65..d4de92dd51 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateAction.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateAction.java @@ -239,15 +239,9 @@ public class BroadcastDispositionActionDefinitionUpdateAction extends RMActionEx */ private void persistPeriodChanges(NodeRef dispositionActionDef, DispositionAction nextAction) { - Date newAsOfDate = null; - Period dispositionPeriod = (Period) getNodeService().getProperty(dispositionActionDef, PROP_DISPOSITION_PERIOD); - - if (dispositionPeriod != null) - { - // calculate the new as of date as we have been provided a new period - Date now = new Date(); - newAsOfDate = dispositionPeriod.getNextDate(now); - } + NodeRef dispositionedNode = getNodeService().getPrimaryParent(nextAction.getNodeRef()).getParentRef(); + DispositionActionDefinition definition = nextAction.getDispositionActionDefinition(); + Date newAsOfDate = getDispositionService().calculateAsOfDate(dispositionedNode, definition, false); if (logger.isDebugEnabled()) { diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionService.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionService.java index 295397f1bd..734cb286ec 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionService.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionService.java @@ -20,6 +20,7 @@ package org.alfresco.module.org_alfresco_module_rm.disposition; import java.io.Serializable; import java.util.Collection; +import java.util.Date; import java.util.List; import java.util.Map; @@ -232,4 +233,15 @@ public interface DispositionService * @param nodeRef node reference */ void refreshDispositionAction(NodeRef nodeRef); + + /** + * Compute the "disposition as of" date (if necessary) for a disposition action and a node. + * + * @param nodeRef The node which the schedule applies to. + * @param dispositionActionDefinition The definition of the disposition action. + * @param allowContextFromAsOf true if the context date is allowed to be obtained from the disposition "as of" property. + * @return The new "disposition as of" date. + */ + Date calculateAsOfDate(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition, + boolean allowContextFromAsOf); } diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java index 5022bfd476..05cb50ba52 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java @@ -625,6 +625,46 @@ public class DispositionServiceImpl extends ServiceBaseImpl // Create the properties Map props = new HashMap(10); + Date asOfDate = calculateAsOfDate(nodeRef, dispositionActionDefinition, allowContextFromAsOf); + + // Set the property values + props.put(PROP_DISPOSITION_ACTION_ID, dispositionActionDefinition.getId()); + props.put(PROP_DISPOSITION_ACTION, dispositionActionDefinition.getName()); + if (asOfDate != null) + { + props.put(PROP_DISPOSITION_AS_OF, asOfDate); + } + + // Create a new disposition action object + NodeRef dispositionActionNodeRef = this.nodeService.createNode( + nodeRef, + ASSOC_NEXT_DISPOSITION_ACTION, + ASSOC_NEXT_DISPOSITION_ACTION, + TYPE_DISPOSITION_ACTION, + props).getChildRef(); + DispositionAction da = new DispositionActionImpl(serviceRegistry, dispositionActionNodeRef); + + // Create the events + List events = dispositionActionDefinition.getEvents(); + for (RecordsManagementEvent event : events) + { + // For every event create an entry on the action + da.addEventCompletionDetails(event); + } + } + + /** + * Compute the "disposition as of" date (if necessary) for a disposition action and a node. + * + * @param nodeRef The node which the schedule applies to. + * @param dispositionActionDefinition The definition of the disposition action. + * @param allowContextFromAsOf true if the context date is allowed to be obtained from the disposition "as of" property. + * @return The new "disposition as of" date. + */ + @Override + public Date calculateAsOfDate(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition, + boolean allowContextFromAsOf) + { // Calculate the asOf date Date asOfDate = null; Period period = dispositionActionDefinition.getPeriod(); @@ -654,31 +694,7 @@ public class DispositionServiceImpl extends ServiceBaseImpl asOfDate = period.getNextDate(contextDate); } } - - // Set the property values - props.put(PROP_DISPOSITION_ACTION_ID, dispositionActionDefinition.getId()); - props.put(PROP_DISPOSITION_ACTION, dispositionActionDefinition.getName()); - if (asOfDate != null) - { - props.put(PROP_DISPOSITION_AS_OF, asOfDate); - } - - // Create a new disposition action object - NodeRef dispositionActionNodeRef = this.nodeService.createNode( - nodeRef, - ASSOC_NEXT_DISPOSITION_ACTION, - ASSOC_NEXT_DISPOSITION_ACTION, - TYPE_DISPOSITION_ACTION, - props).getChildRef(); - DispositionAction da = new DispositionActionImpl(serviceRegistry, dispositionActionNodeRef); - - // Create the events - List events = dispositionActionDefinition.getEvents(); - for (RecordsManagementEvent event : events) - { - // For every event create an entry on the action - da.addEventCompletionDetails(event); - } + return asOfDate; } /** From 2db97307024db05887bbfd3076bfd2ad8215f9bd Mon Sep 17 00:00:00 2001 From: Tuna Aksoy Date: Mon, 3 Oct 2016 17:02:06 +0100 Subject: [PATCH 18/28] Merge branch 'feature-2.3/RM-4162_MergeToV2.3' into release/V2.3 # Conflicts: # rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-action-context.xml # rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/CopyMoveLinkFileToBaseAction.java # rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/IssueTestSuite.java # rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/RecordTestSuite.java --- .../rm-action-context.xml | 11 +- .../rm-webscript-context.xml | 2 + .../roles/rm-dynamicauthorities.get.desc.xml | 6 +- .../impl/CopyMoveLinkFileToBaseAction.java | 75 +-- .../action/impl/DestroyAction.java | 21 +- .../scripts/roles/DynamicAuthoritiesGet.java | 481 +++++++++++++++--- .../test/integration/issue/RM3993Test.java | 19 - .../test/integration/issue/RM4101Test.java | 129 +++++ .../record/InplaceRecordPermissionTest.java | 3 +- .../test/util/BaseWebScriptUnitTest.java | 8 +- .../roles/DynamicAuthoritiesGetUnitTest.java | 406 +++++++++++++-- 11 files changed, 960 insertions(+), 201 deletions(-) create mode 100644 rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM4101Test.java diff --git a/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-action-context.xml b/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-action-context.xml index 4142e18c11..0191e398ba 100644 --- a/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-action-context.xml +++ b/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-action-context.xml @@ -106,11 +106,11 @@ - + - + @@ -261,6 +261,7 @@ + ${rm.ghosting.enabled} @@ -271,7 +272,7 @@ - + @@ -762,7 +763,6 @@ - @@ -791,7 +791,6 @@ - @@ -820,7 +819,6 @@ - @@ -849,7 +847,6 @@ - diff --git a/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml b/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml index fedd555546..d4f8b73a05 100644 --- a/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml +++ b/rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-webscript-context.xml @@ -598,6 +598,8 @@ + + diff --git a/rm-community/rm-community-repo/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml b/rm-community/rm-community-repo/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml index 00a2cad9e1..6cf7174dff 100644 --- a/rm-community/rm-community-repo/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml +++ b/rm-community/rm-community-repo/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml @@ -2,11 +2,13 @@ Removes dynamic authorities - URL parameter batchsize is mandatory, and represents the number of records that are processed in one transaction.
+ URL parameter batchsize is mandatory, and represents the maximum number of records that can be processed in one transaction.
URL parameter maxProcessedRecords is optional, and represents the maximum number of records that will be processed in one request.
+ URL parameter export is optional, and if the it's value is true, will export the processed records into a csv file.
+ URL parameter parentNodeRef is optional, and represents the nodeRef of the folder that contains the records to be processed.
]]>
- /api/rm/rm-dynamicauthorities?batchsize={batchsize}&maxProcessedRecords={maxProcessedRecords?} + /api/rm/rm-dynamicauthorities?batchsize={batchsize}&maxProcessedRecords={maxProcessedRecords?}&export={export?}&parentNodeRef={parentNodeRef?} argument admin required diff --git a/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/CopyMoveLinkFileToBaseAction.java b/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/CopyMoveLinkFileToBaseAction.java index 468d0a709f..c2c896afbd 100644 --- a/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/CopyMoveLinkFileToBaseAction.java +++ b/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/CopyMoveLinkFileToBaseAction.java @@ -44,12 +44,10 @@ import org.alfresco.service.cmr.action.ParameterDefinition; import org.alfresco.service.cmr.dictionary.DataTypeDefinition; import org.alfresco.service.cmr.model.FileFolderService; import org.alfresco.service.cmr.model.FileNotFoundException; -import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.namespace.QName; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.springframework.dao.ConcurrencyFailureException; import org.springframework.util.StringUtils; /** @@ -62,9 +60,6 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr { private static Log logger = LogFactory.getLog(CopyMoveLinkFileToBaseAction.class); - /** Retrying transaction helper */ - private RetryingTransactionHelper retryingTransactionHelper; - /** action parameters */ public static final String PARAM_DESTINATION_RECORD_FOLDER = "destinationRecordFolder"; public static final String PARAM_PATH = "path"; @@ -121,14 +116,6 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr this.filePlanService = filePlanService; } - /** - * @param retryingTransactionHelper retrying transaction helper - */ - public void setRetryingTransactionHelper(RetryingTransactionHelper retryingTransactionHelper) - { - this.retryingTransactionHelper = retryingTransactionHelper; - } - /** * @see org.alfresco.module.org_alfresco_module_rm.action.RMActionExecuterAbstractBase#addParameterDefinitions(java.util.List) */ @@ -165,25 +152,7 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr NodeRef recordFolder = (NodeRef)action.getParameterValue(PARAM_DESTINATION_RECORD_FOLDER); if (recordFolder == null) { - final boolean finaltargetIsUnfiledRecords = targetIsUnfiledRecords; - recordFolder = retryingTransactionHelper.doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback() - { - public NodeRef execute() throws Throwable - { - NodeRef result = null; - try - { - // get the reference to the record folder based on the relative path - result = createOrResolvePath(action, actionedUponNodeRef, finaltargetIsUnfiledRecords); - } - catch (DuplicateChildNodeNameException ex) - { - throw new ConcurrencyFailureException("Cannot create or resolve path.", ex); - } - - return result; - } - }, false, true); + recordFolder = createOrResolvePath(action, actionedUponNodeRef, targetIsUnfiledRecords); } // now we have the reference to the target folder we can do some final checks to see if the action is valid @@ -197,30 +166,26 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr { try { - synchronized (this) + if (getMode() == CopyMoveLinkFileToActionMode.MOVE) { - if (getMode() == CopyMoveLinkFileToActionMode.MOVE) - { - fileFolderService.move(actionedUponNodeRef, finalRecordFolder, null); - } - else if (getMode() == CopyMoveLinkFileToActionMode.COPY) - { - fileFolderService.copy(actionedUponNodeRef, finalRecordFolder, null); - } - else if (getMode() == CopyMoveLinkFileToActionMode.LINK) - { - getRecordService().link(actionedUponNodeRef, finalRecordFolder); - } + fileFolderService.move(actionedUponNodeRef, finalRecordFolder, null); + } + else if (getMode() == CopyMoveLinkFileToActionMode.COPY) + { + fileFolderService.copy(actionedUponNodeRef, finalRecordFolder, null); + } + else if (getMode() == CopyMoveLinkFileToActionMode.LINK) + { + getRecordService().link(actionedUponNodeRef, finalRecordFolder); } } catch (FileNotFoundException fileNotFound) { throw new AlfrescoRuntimeException("Unable to execute file to action, because the " + (mode == CopyMoveLinkFileToActionMode.MOVE ? "move" : "copy") + " operation failed.", fileNotFound); } - + return null; } - }); } } @@ -326,21 +291,11 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr final boolean create = createValue == null ? false : createValue.booleanValue(); // create or resolve the specified path - path = retryingTransactionHelper.doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback() + path = getTransactionService().getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback() { public NodeRef execute() throws Throwable { - NodeRef path = null; - try - { - path = createOrResolvePath(action, context, actionedUponNodeRef, Arrays.asList(pathElementsArray), targetisUnfiledRecords, - create, false); - } - catch (DuplicateChildNodeNameException ex) - { - throw new ConcurrencyFailureException("Cannot create or resolve path.", ex); - } - return path; + return createOrResolvePath(action, context, actionedUponNodeRef, Arrays.asList(pathElementsArray), targetisUnfiledRecords, create, false); } }, false, true); } @@ -431,7 +386,7 @@ public abstract class CopyMoveLinkFileToBaseAction extends RMActionExecuterAbstr NodeRef child = getChild(parent, childName); if (child == null) { - if(targetisUnfiledRecords) + if (targetisUnfiledRecords) { // create unfiled folder child = fileFolderService.create(parent, childName, RecordsManagementModel.TYPE_UNFILED_RECORD_FOLDER).getNodeRef(); diff --git a/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/DestroyAction.java b/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/DestroyAction.java index 7c41598d95..17448a091e 100644 --- a/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/DestroyAction.java +++ b/rm-community/rm-community-repo/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/DestroyAction.java @@ -36,6 +36,7 @@ import org.alfresco.module.org_alfresco_module_rm.capability.CapabilityService; import org.alfresco.module.org_alfresco_module_rm.content.ContentDestructionComponent; import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionActionDefinition; import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionSchedule; +import org.alfresco.module.org_alfresco_module_rm.record.InplaceRecordService; import org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService; import org.alfresco.service.cmr.action.Action; import org.alfresco.service.cmr.repository.NodeRef; @@ -59,10 +60,13 @@ public class DestroyAction extends RMDispositionActionExecuterAbstractBase /** Capability service */ private CapabilityService capabilityService; - + /** Recordable version service */ private RecordableVersionService recordableVersionService; + /** Inplace record service */ + private InplaceRecordService inplaceRecordService; + /** Indicates if ghosting is enabled or not */ private boolean ghostingEnabled = true; @@ -81,7 +85,7 @@ public class DestroyAction extends RMDispositionActionExecuterAbstractBase { this.capabilityService = capabilityService; } - + /** * @param recordableVersionService recordable version service */ @@ -90,6 +94,14 @@ public class DestroyAction extends RMDispositionActionExecuterAbstractBase this.recordableVersionService = recordableVersionService; } + /** + * @param inplaceRecordService inplace record service + */ + public void setInplaceRecordService(InplaceRecordService inplaceRecordService) + { + this.inplaceRecordService = inplaceRecordService; + } + /** * @param ghostingEnabled true if ghosting is enabled, false otherwise */ @@ -168,7 +180,10 @@ public class DestroyAction extends RMDispositionActionExecuterAbstractBase { recordableVersionService.destroyRecordedVersion(version); } - + + // Hide from inplace users to give the impression of destruction + inplaceRecordService.hideRecord(record); + // Add the ghosted aspect getNodeService().addAspect(record, ASPECT_GHOSTED, null); diff --git a/rm-community/rm-community-repo/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java b/rm-community/rm-community-repo/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java index 5e3020e9aa..09457fc5cb 100644 --- a/rm-community/rm-community-repo/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java +++ b/rm-community/rm-community-repo/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java @@ -44,12 +44,22 @@ */ package org.alfresco.repo.web.scripts.roles; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.io.Writer; import java.text.MessageFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import javax.servlet.http.HttpServletResponse; + import org.alfresco.model.ContentModel; import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; @@ -59,19 +69,26 @@ import org.alfresco.repo.domain.node.NodeDAO; import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import org.alfresco.repo.web.scripts.content.ContentStreamer; +import org.alfresco.service.cmr.model.FileFolderService; +import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.security.PermissionService; import org.alfresco.service.namespace.QName; import org.alfresco.service.transaction.TransactionService; import org.alfresco.util.Pair; +import org.alfresco.util.TempFileProvider; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Cache; -import org.springframework.extensions.webscripts.DeclarativeWebScript; +import org.springframework.extensions.webscripts.Format; import org.springframework.extensions.webscripts.Status; +import org.springframework.extensions.webscripts.WebScriptException; import org.springframework.extensions.webscripts.WebScriptRequest; +import org.springframework.extensions.webscripts.WebScriptResponse; /** * Webscript used for removing dynamic authorities from the records. @@ -80,7 +97,7 @@ import org.springframework.extensions.webscripts.WebScriptRequest; * @since 2.3.0.7 */ @SuppressWarnings("deprecation") -public class DynamicAuthoritiesGet extends DeclarativeWebScript implements RecordsManagementModel +public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel { private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0."; private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN"; @@ -89,14 +106,16 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor private static final String MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE = "Processing record {0} - BEGIN"; private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid."; private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory"; + private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist."; private static final String SUCCESS_STATUS = "success"; - private static final String FAILED_STATUS = "failed"; /** * The logger */ private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class); private static final String BATCH_SIZE = "batchsize"; private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords"; + private static final String PARAM_EXPORT = "export"; + private static final String PARAM_PARENT_NODE_REF = "parentNodeRef"; private static final String MODEL_STATUS = "responsestatus"; private static final String MODEL_MESSAGE = "message"; private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records."; @@ -112,50 +131,60 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor private PermissionService permissionService; private ExtendedSecurityService extendedSecurityService; private TransactionService transactionService; + /** Content Streamer */ + protected ContentStreamer contentStreamer; + private FileFolderService fileFolderService; + + /** service setters */ + public void setPatchDAO(PatchDAO patchDAO) + { + this.patchDAO = patchDAO; + } - /** service setters */ - public void setPatchDAO(PatchDAO patchDAO) { this.patchDAO = patchDAO; } - public void setNodeDAO(NodeDAO nodeDAO) { this.nodeDAO = nodeDAO; } - public void setQnameDAO(QNameDAO qnameDAO) { this.qnameDAO = qnameDAO; } - public void setNodeService(NodeService nodeService) { this.nodeService = nodeService; } - public void setPermissionService(PermissionService permissionService) { this.permissionService = permissionService; } - public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) { this.extendedSecurityService = extendedSecurityService; } - public void setTransactionService(TransactionService transactionService) { this.transactionService = transactionService; } + public void setNodeDAO(NodeDAO nodeDAO) + { + this.nodeDAO = nodeDAO; + } - @Override - protected Map executeImpl(WebScriptRequest req, Status status, Cache cache) + public void setQnameDAO(QNameDAO qnameDAO) { - Map model = new HashMap(); - String batchSizeStr = req.getParameter(BATCH_SIZE); - String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); + this.qnameDAO = qnameDAO; + } - Long size = 0L; - if (StringUtils.isBlank(batchSizeStr)) - { - model.put(MODEL_STATUS, FAILED_STATUS); - model.put(MODEL_MESSAGE, MESSAGE_BATCHSIZE_IS_MANDATORY); - logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY); - return model; - } - try - { - size = Long.parseLong(batchSizeStr); - if(size <= 0) - { - model.put(MODEL_STATUS, FAILED_STATUS); - model.put(MODEL_MESSAGE, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); - logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); - return model; - } - } - catch(NumberFormatException ex) - { - model.put(MODEL_STATUS, FAILED_STATUS); - model.put(MODEL_MESSAGE, MESSAGE_BATCHSIZE_IS_INVALID); - logger.info(MESSAGE_BATCHSIZE_IS_INVALID); - return model; - } - final Long batchSize = size; + public void setNodeService(NodeService nodeService) + { + this.nodeService = nodeService; + } + + public void setPermissionService(PermissionService permissionService) + { + this.permissionService = permissionService; + } + + public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) + { + this.extendedSecurityService = extendedSecurityService; + } + + public void setTransactionService(TransactionService transactionService) + { + this.transactionService = transactionService; + } + + public void setContentStreamer(ContentStreamer contentStreamer) + { + this.contentStreamer = contentStreamer; + } + + public void setFileFolderService(FileFolderService fileFolderService) + { + this.fileFolderService = fileFolderService; + } + + protected Map buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException + { + Map model = new HashMap(); + final Long batchSize = getBatchSizeParameter(req); // get the max node id and the extended security aspect Long maxNodeId = patchDAO.getMaxAdmNodeID(); final Pair recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY); @@ -167,6 +196,201 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor return model; } + Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize); + + boolean attach = getExportParameter(req); + + File file = TempFileProvider.createTempFile("processedNodes_", ".csv"); + FileWriter writer = new FileWriter(file); + BufferedWriter out = new BufferedWriter(writer); + List processedNodes = new ArrayList(); + try + { + NodeRef parentNodeRef = getParentNodeRefParameter(req); + if (parentNodeRef != null) + { + processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair, + totalNumberOfRecordsToProcess.intValue(), out, attach); + } + else + { + processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess, + out, attach); + } + } + finally + { + out.close(); + } + + int processedNodesSize = processedNodes.size(); + + String message = ""; + if (totalNumberOfRecordsToProcess == 0 + || (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess)) + { + message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize); + } + if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize) + { + message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess); + } + model.put(MODEL_STATUS, SUCCESS_STATUS); + model.put(MODEL_MESSAGE, message); + logger.info(message); + + if (attach) + { + try + { + String fileName = file.getName(); + contentStreamer.streamContent(req, res, file, null, attach, fileName, model); + model = null; + } + finally + { + if (file != null) + { + file.delete(); + } + } + } + return model; + } + + /** + * Get export parameter from the request + * + * @param req + * @return + */ + protected boolean getExportParameter(WebScriptRequest req) + { + boolean attach = false; + String export = req.getParameter(PARAM_EXPORT); + if (export != null && Boolean.parseBoolean(export)) + { + attach = true; + } + return attach; + } + + /* + * (non-Javadoc) + * @see org.alfresco.repo.web.scripts.content.StreamContent#execute(org.springframework.extensions.webscripts. + * WebScriptRequest, org.springframework.extensions.webscripts.WebScriptResponse) + */ + @Override + public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException + { + // retrieve requested format + String format = req.getFormat(); + + try + { + String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format); + if (mimetype == null) + { + throw new WebScriptException("Web Script format '" + format + "' is not registered"); + } + + // construct model for script / template + Status status = new Status(); + Cache cache = new Cache(getDescription().getRequiredCache()); + Map model = buildModel(req, res); + if (model == null) { return; } + model.put("status", status); + model.put("cache", cache); + + Map templateModel = createTemplateParameters(req, res, model); + + // render output + int statusCode = status.getCode(); + if (statusCode != HttpServletResponse.SC_OK && !req.forceSuccessStatus()) + { + if (logger.isDebugEnabled()) + { + logger.debug("Force success status header in response: " + req.forceSuccessStatus()); + logger.debug("Setting status " + statusCode); + } + res.setStatus(statusCode); + } + + // apply location + String location = status.getLocation(); + if (location != null && location.length() > 0) + { + if (logger.isDebugEnabled()) logger.debug("Setting location to " + location); + res.setHeader(WebScriptResponse.HEADER_LOCATION, location); + } + + // apply cache + res.setCache(cache); + + String callback = null; + if (getContainer().allowCallbacks()) + { + callback = req.getJSONCallback(); + } + if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) + { + if (logger.isDebugEnabled()) logger.debug("Rendering JSON callback response: content type=" + + Format.JAVASCRIPT.mimetype() + ", status=" + statusCode + ", callback=" + callback); + + // NOTE: special case for wrapping JSON results in a javascript function callback + res.setContentType(Format.JAVASCRIPT.mimetype() + ";charset=UTF-8"); + res.getWriter().write((callback + "(")); + } + else + { + if (logger.isDebugEnabled()) + logger.debug("Rendering response: content type=" + mimetype + ", status=" + statusCode); + + res.setContentType(mimetype + ";charset=UTF-8"); + } + + // render response according to requested format + renderFormatTemplate(format, templateModel, res.getWriter()); + + if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) + { + // NOTE: special case for wrapping JSON results in a javascript function callback + res.getWriter().write(")"); + } + } + catch (Throwable e) + { + if (logger.isDebugEnabled()) + { + StringWriter stack = new StringWriter(); + e.printStackTrace(new PrintWriter(stack)); + logger.debug("Caught exception; decorating with appropriate status template : " + stack.toString()); + } + + throw createStatusException(e, req, res); + } + } + + protected void renderFormatTemplate(String format, Map model, Writer writer) + { + format = (format == null) ? "" : format; + + String templatePath = getDescription().getId() + "." + format; + + if (logger.isDebugEnabled()) logger.debug("Rendering template '" + templatePath + "'"); + + renderTemplate(templatePath, model, writer); + } + + /** + * Obtain maximum of the records to be processed from the request if it is specified or bachsize value otherwise + * + * @param req + * @return maximum of the records to be processed from the request if it is specified or bachsize value otherwise + */ + protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize) + { + String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); //default total number of records to be processed to batch size value Long totalNumberOfRecordsToProcess = batchSize; if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr)) @@ -180,7 +404,77 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor //do nothing here, the value will remain 0L in this case } } + return totalNumberOfRecordsToProcess; + } + + /** + * Obtain batchsize parameter from the request. + * + * @param req + * @return batchsize parameter from the request + */ + protected Long getBatchSizeParameter(WebScriptRequest req) + { + String batchSizeStr = req.getParameter(BATCH_SIZE); + Long size = 0L; + if (StringUtils.isBlank(batchSizeStr)) + { + logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_MANDATORY); + } + try + { + size = Long.parseLong(batchSizeStr); + if (size <= 0) + { + logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); + } + } + catch (NumberFormatException ex) + { + logger.info(MESSAGE_BATCHSIZE_IS_INVALID); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_INVALID); + } + return size; + } + /** + * Get parentNodeRef parameter from the request + * + * @param req + * @return + */ + protected NodeRef getParentNodeRefParameter(WebScriptRequest req) + { + String parentNodeRefStr = req.getParameter(PARAM_PARENT_NODE_REF); + NodeRef parentNodeRef = null; + if (StringUtils.isNotBlank(parentNodeRefStr)) + { + parentNodeRef = new NodeRef(parentNodeRefStr); + if(!nodeService.exists(parentNodeRef)) + { + String message = MessageFormat.format(MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE, parentNodeRef.toString()); + logger.info(message); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, message); + } + } + return parentNodeRef; + } + + /** + * Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess + * parameters + * + * @param batchSize + * @param maxNodeId + * @param recordAspectPair + * @param totalNumberOfRecordsToProcess + * @return the list of processed nodes + */ + protected List processNodes(final Long batchSize, Long maxNodeId, final Pair recordAspectPair, + Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach) + { final Long maxRecordsToProcess = totalNumberOfRecordsToProcess; final List processedNodes = new ArrayList(); logger.info(MESSAGE_PROCESSING_BEGIN); @@ -198,7 +492,8 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor public Void execute() throws Throwable { // get the nodes with the extended security aspect applied - List nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, currentIndex + batchSize); + List nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, + currentIndex + batchSize); // process each one for (Long nodeId : nodeIds) @@ -213,38 +508,86 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor processNode(record); logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); processedNodes.add(record); - } + if (attach) + { + out.write(recordName); + out.write(","); + out.write(record.toString()); + out.write("\n"); + } + } return null; } - }, - false, // read only + }, false, // read only true); // requires new } logger.info(MESSAGE_PROCESSING_END); - int processedNodesSize = processedNodes.size(); - String message = ""; - if(totalNumberOfRecordsToProcess == 0 || (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess)) - { - message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize); - } - if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize) - { - message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess); - } - model.put(MODEL_STATUS, SUCCESS_STATUS); - model.put(MODEL_MESSAGE, message); - logger.info(message); - return model; + return processedNodes; } + protected List processChildrenNodes(NodeRef parentNodeRef, final int batchSize, + final Pair recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out, + final boolean attach) + { + final List processedNodes = new ArrayList(); + final List children = fileFolderService.search(parentNodeRef, "*", /*filesSearch*/true, /*folderSearch*/true, /*includeSubfolders*/true); + logger.info(MESSAGE_PROCESSING_BEGIN); + // by batch size + for (int i = 0; i < children.size(); i += batchSize) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + final int currentIndex = i; + + transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback() + { + public Void execute() throws Throwable + { + List nodes = children.subList(currentIndex, Math.min(currentIndex + batchSize, children.size())); + // process each one + for (FileInfo node : nodes) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + NodeRef record = node.getNodeRef(); + if (nodeService.hasAspect(record, recordAspectPair.getSecond())) + { + String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName)); + processNode(record); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); + processedNodes.add(record); + if (attach) + { + out.write(recordName); + out.write(","); + out.write(record.toString()); + out.write("\n"); + } + } + } + + return null; + } + }, false, // read only + true); // requires new + } + logger.info(MESSAGE_PROCESSING_END); + return processedNodes; + } + /** * Process each node * * @param nodeRef */ @SuppressWarnings({ "unchecked"}) - private void processNode(NodeRef nodeRef) + protected void processNode(NodeRef nodeRef) { // get the reader/writer data Map readers = (Map)nodeService.getProperty(nodeRef, PROP_READERS); @@ -258,10 +601,20 @@ public class DynamicAuthoritiesGet extends DeclarativeWebScript implements Recor permissionService.clearPermission(nodeRef, ExtendedWriterDynamicAuthority.EXTENDED_WRITER); // if record then ... - if (nodeService.hasAspect(nodeRef, ASPECT_RECORD)) + if (nodeService.hasAspect(nodeRef, ASPECT_RECORD)) + { + Set readersKeySet = null; + if (readers != null) { + readersKeySet = readers.keySet(); + } + Set writersKeySet = null; + if (writers != null) + { + writersKeySet = writers.keySet(); + } // re-set extended security via API - extendedSecurityService.set(nodeRef, readers.keySet(), writers.keySet()); + extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet); } } } diff --git a/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM3993Test.java b/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM3993Test.java index 20c6dc3e4d..f8e8633db3 100644 --- a/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM3993Test.java +++ b/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM3993Test.java @@ -24,25 +24,6 @@ * along with Alfresco. If not, see . * #L% */ -/* - * Copyright (C) 2005-2014 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ - package org.alfresco.module.org_alfresco_module_rm.test.integration.issue; import java.io.Serializable; diff --git a/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM4101Test.java b/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM4101Test.java new file mode 100644 index 0000000000..df6a6561f5 --- /dev/null +++ b/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM4101Test.java @@ -0,0 +1,129 @@ +/* + * #%L + * Alfresco Records Management Module + * %% + * Copyright (C) 2005 - 2016 Alfresco Software Limited + * %% + * This file is part of the Alfresco software. + * - + * If the software was purchased under a paid Alfresco license, the terms of + * the paid license agreement will prevail. Otherwise, the software is + * provided under the following open source license terms: + * - + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * - + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * - + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + * #L% + */ +package org.alfresco.module.org_alfresco_module_rm.test.integration.issue; + +import java.util.UUID; + +import org.alfresco.module.org_alfresco_module_rm.action.impl.LinkToAction; +import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase; +import org.alfresco.service.cmr.action.Action; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.cmr.rule.Rule; +import org.alfresco.service.cmr.rule.RuleService; +import org.alfresco.service.cmr.rule.RuleType; + +/** + * Tests issue #4101: Link to, Copy to and File to rules fail when not run in background + * + * @author Tuna Aksoy + * @since 2.3.0.8 + */ +public class RM4101Test extends BaseRMTestCase +{ + private RuleService ruleService; + + @Override + protected void initServices() + { + super.initServices(); + + ruleService = (RuleService) applicationContext.getBean("RuleService"); + } + + @Override + protected boolean isRecordTest() + { + return true; + } + + public void testRunRuleNotInBackground() throws Exception + { + final String categoryName = "category1" + UUID.randomUUID().toString(); + final NodeRef category1 = doTestInTransaction(new Test() + { + @Override + public NodeRef run() + { + return filePlanService.createRecordCategory(filePlan, categoryName); + } + }); + + final NodeRef folder1 = doTestInTransaction(new Test() + { + @Override + public NodeRef run() + { + return recordFolderService.createRecordFolder(category1, "folder1WithRule" + UUID.randomUUID().toString()); + } + }); + + final String folder2Name = "folder2FolderToLinkTo" + UUID.randomUUID().toString(); + final NodeRef folder2 = doTestInTransaction(new Test() + { + @Override + public NodeRef run() + { + return recordFolderService.createRecordFolder(category1, folder2Name); + } + }); + + doTestInTransaction(new Test() + { + @Override + public Void run() + { + Action linkToAction = actionService.createAction(LinkToAction.NAME); + linkToAction.setParameterValue(LinkToAction.PARAM_PATH, "/" + categoryName + "/" + folder2Name); + + Rule rule = new Rule(); + rule.setRuleType(RuleType.INBOUND); + rule.setTitle("LinkTo"); + rule.setAction(linkToAction); + rule.setExecuteAsynchronously(false); + ruleService.saveRule(folder1, rule); + + return null; + } + }); + + doTestInTransaction(new Test() + { + @Override + public Void run() + { + utils.createRecord(folder1, "record1" + UUID.randomUUID().toString()); + return null; + } + + @Override + public void test(Void result) throws Exception + { + assertEquals(1, nodeService.getChildAssocs(folder2).size()); + } + }); + } +} diff --git a/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/InplaceRecordPermissionTest.java b/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/InplaceRecordPermissionTest.java index b16c1472c3..ef086d2bc8 100644 --- a/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/InplaceRecordPermissionTest.java +++ b/rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/InplaceRecordPermissionTest.java @@ -500,8 +500,7 @@ public class InplaceRecordPermissionTest extends BaseRMTestCase * And it's metadata is maintained * Then the inplace users will no longer see the record */ - // FIXME: See RM-4095 - public void ztestDestroyedRecordInplacePermissions() + public void testDestroyedRecordInplacePermissions() { test() .given() diff --git a/rm-community/rm-community-repo/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java b/rm-community/rm-community-repo/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java index 189a6b54d0..3f3105f9b8 100644 --- a/rm-community/rm-community-repo/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java +++ b/rm-community/rm-community-repo/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java @@ -46,8 +46,8 @@ import org.json.JSONObject; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.springframework.extensions.surf.util.Content; +import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Container; -import org.springframework.extensions.webscripts.DeclarativeWebScript; import org.springframework.extensions.webscripts.Description; import org.springframework.extensions.webscripts.Description.RequiredCache; import org.springframework.extensions.webscripts.DescriptionExtension; @@ -83,7 +83,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest /** * @return declarative webscript */ - protected abstract DeclarativeWebScript getWebScript(); + protected abstract AbstractWebScript getWebScript(); /** * @return classpath location of webscript template @@ -151,7 +151,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest */ protected String executeWebScript(Map parameters, String content) throws Exception { - DeclarativeWebScript webScript = getWebScript(); + AbstractWebScript webScript = getWebScript(); String template = getWebScriptTemplate(); // initialise webscript @@ -173,7 +173,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest * @return {@link WebScriptRequest} mocked web script request */ @SuppressWarnings("rawtypes") - protected WebScriptRequest getMockedWebScriptRequest(DeclarativeWebScript webScript, final Map parameters, String content) throws Exception + protected WebScriptRequest getMockedWebScriptRequest(AbstractWebScript webScript, final Map parameters, String content) throws Exception { Match match = new Match(null, parameters, null, webScript); org.springframework.extensions.webscripts.Runtime mockedRuntime = mock(org.springframework.extensions.webscripts.Runtime.class); diff --git a/rm-community/rm-community-repo/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java b/rm-community/rm-community-repo/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java index 70304fb172..4f903a2b75 100644 --- a/rm-community/rm-community-repo/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java +++ b/rm-community/rm-community-repo/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java @@ -49,17 +49,22 @@ import static java.util.Collections.emptyMap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyLong; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import java.io.File; import java.io.Serializable; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -70,6 +75,7 @@ import java.util.stream.Stream; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; +import org.alfresco.model.ContentModel; import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; import org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService; @@ -81,6 +87,9 @@ import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.transaction.RetryingTransactionHelper; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import org.alfresco.repo.web.scripts.content.ContentStreamer; +import org.alfresco.service.cmr.model.FileFolderService; +import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.security.PermissionService; @@ -90,12 +99,17 @@ import org.alfresco.util.Pair; import org.json.JSONObject; import org.junit.Before; import org.junit.Test; +import org.mockito.ArgumentCaptor; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import org.springframework.extensions.webscripts.DeclarativeWebScript; +import org.springframework.extensions.webscripts.AbstractWebScript; +import org.springframework.extensions.webscripts.Status; +import org.springframework.extensions.webscripts.WebScriptException; +import org.springframework.extensions.webscripts.WebScriptRequest; +import org.springframework.extensions.webscripts.WebScriptResponse; /** * DynamicAuthoritiesGet Unit Test @@ -126,13 +140,17 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme private TransactionService mockedTransactionService; @Mock private RetryingTransactionHelper mockedRetryingTransactionHelper; + @Mock + private ContentStreamer contentStreamer; + @Mock + private FileFolderService mockedFileFolderService; /** test component */ @InjectMocks private DynamicAuthoritiesGet webScript; @Override - protected DeclarativeWebScript getWebScript() + protected AbstractWebScript getWebScript() { return webScript; } @@ -154,6 +172,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme webScript.setNodeService(mockedNodeService); webScript.setPermissionService(mockedPermissionService); webScript.setExtendedSecurityService(mockedExtendedSecurityService); + webScript.setFileFolderService(mockedFileFolderService); // setup retrying transaction helper Answer doInTransactionAnswer = new Answer() { @@ -179,7 +198,9 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme } /** - * Given that there are no nodes with the extended security aspect When the action is executed Nothing happens + * Given that there are no nodes with the extended security aspect + * When the action is executed Nothing happens + * * @throws Exception */ @SuppressWarnings({ "unchecked" }) @@ -200,7 +221,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 0 records.\"}"; assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_READERS)); verify(mockedNodeService, never()).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); verify(mockedNodeService, never()).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); @@ -212,8 +232,12 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme } /** - * Given that there are records with the extended security aspect When the action is executed Then the aspect is - * removed And the dynamic authorities permissions are cleared And extended security is set via the updated API + * Given that there are records with the extended security aspect + * When the action is executed + * Then the aspect is removed + * And the dynamic authorities permissions are cleared + * And extended security is set via the updated API + * * @throws Exception */ @SuppressWarnings("unchecked") @@ -222,8 +246,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -234,7 +257,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme .thenReturn((Serializable) Collections.emptyMap()); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) .thenReturn((Serializable) Collections.emptyMap()); - }); // Set up parameters. @@ -246,7 +268,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); @@ -259,8 +280,10 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme } /** - * Given that there are non-records with the extended security aspect When the web script is executed Then the aspect is - * removed And the dynamic authorities permissions are cleared + * Given that there are non-records with the extended security aspect + * When the web script is executed + * Then the aspect is removed And the dynamic authorities permissions are cleared + * * @throws Exception */ @SuppressWarnings("unchecked") @@ -269,8 +292,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -281,7 +303,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme .thenReturn((Serializable) Collections.emptyMap()); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) .thenReturn((Serializable) Collections.emptyMap()); - }); // Set up parameters. @@ -307,39 +328,50 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme @Test public void missingBatchSizeParameter() throws Exception { - JSONObject json = executeJSONWebScript(emptyMap()); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"failed\",\"message\":\"Parameter batchsize is mandatory\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + try + { + executeJSONWebScript(emptyMap()); + fail("Expected exception as parameter batchsize is mandatory."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test public void invalidBatchSizeParameter() throws Exception { + try + { // Set up parameters. Map parameters = ImmutableMap.of("batchsize", "dd"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"failed\",\"message\":\"Parameter batchsize is invalid.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is invalid."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test public void batchSizeShouldBeGraterThanZero() throws Exception { - when(mockedQnameDAO.getQName(ASPECT_EXTENDED_SECURITY)).thenReturn(null); + try + { // Set up parameters. Map parameters = ImmutableMap.of("batchsize", "0"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"failed\",\"message\":\"Parameter batchsize should be a number greater than 0.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is not a number greater than 0."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test @@ -361,8 +393,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l,4l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -373,7 +404,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme .thenReturn((Serializable) Collections.emptyMap()); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) .thenReturn((Serializable) Collections.emptyMap()); - }); // Set up parameters. @@ -391,8 +421,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())) - .thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -403,7 +432,6 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme .thenReturn((Serializable) Collections.emptyMap()); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) .thenReturn((Serializable) Collections.emptyMap()); - }); // Set up parameters. @@ -415,4 +443,302 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}"; assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)).thenReturn(null); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); + ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), + writersKeysCaptor.capture()); + List allReaderKeySets = readerKeysCaptor.getAllValues(); + List allWritersKeySets = writersKeysCaptor.getAllValues(); + for (Set keySet : allReaderKeySets) + { + assertNull(keySet); + } + for (Set keySet : allWritersKeySets) + { + assertNull(keySet); + } + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void recordsWithExtendedSecurityAspectAndNullWriters() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); + ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), + writersKeysCaptor.capture()); + List allReaderKeySets = readerKeysCaptor.getAllValues(); + List allWritersKeySets = writersKeysCaptor.getAllValues(); + for (Set keySet : allReaderKeySets) + { + assertNotNull(keySet); + } + for (Set keySet : allWritersKeySets) + { + assertNull(keySet); + } + } + + /** + * Given I have records that require migration + * And I am interested in knowning which records are migrated + * When I run the migration tool + * Then I will be returned a CSV file containing the name and node reference of the record migrated + * + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void processWithCSVFile() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + }); + + ArgumentCaptor csvFileCaptor = ArgumentCaptor.forClass(File.class); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", + "true"); + executeWebScript(parameters); + + verify(contentStreamer, times(1)).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + csvFileCaptor.capture(), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + + File fileForDownload = csvFileCaptor.getValue(); + assertNotNull(fileForDownload); + } + + /** + * Given that I have record that require migration + * And I'm not interested in knowing which records were migrated + * When I run the migration tool + * Then I will not be returned a CSV file of details. + * + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void processedWithouthCSVFile() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + }); + + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", + "false"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } + + @Test + public void invalidParentNodeRefParameter() throws Exception + { + try + { + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", "invalidNodeRef"); + executeJSONWebScript(parameters); + fail("Expected exception as parameter parentNodeRef is invalid."); + } + catch (WebScriptException e) + { + assertEquals("If parameter parentNodeRef is invalid then 'Internal server error' should be returned.", + Status.STATUS_INTERNAL_SERVER_ERROR, e.getStatus()); + } + } + + @Test + public void inexistentParentNodeRefParameter() throws Exception + { + try + { + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.exists(parentNodeRef)).thenReturn(false); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", + parentNodeRef.toString()); + executeJSONWebScript(parameters); + fail("Expected exception as parameter parentNodeRef does not exist."); + } + catch (WebScriptException e) + { + assertEquals("If parameter parentNodeRef is does not exist then 'Bad Reequest' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } + } + + @SuppressWarnings("unchecked") + @Test + public void processedWithParentNodeRef() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + List children = new ArrayList(); + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + FileInfo mockedFileInfo = mock(FileInfo.class); + when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); + children.add(mockedFileInfo); + }); + when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) + .thenReturn(children); + + Map parameters = ImmutableMap.of("batchsize", "3", "maxProcessedRecords", "4", "export", + "false", "parentNodeRef", parentNodeRef.toString()); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } + + @SuppressWarnings("unchecked") + @Test + public void processedWithParentNodeRefWithFirstTwoBatchesAlreadyProcessed() throws Exception + { + List ids = Stream.of(1l, 2l, 3l, 4l, 5l, 6l, 7l, 8l).collect(Collectors.toList()); + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + List children = new ArrayList(); + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + if (i <= 6l) + { + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(false); + } + else + { + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); + } + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + FileInfo mockedFileInfo = mock(FileInfo.class); + when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); + children.add(mockedFileInfo); + }); + when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) + .thenReturn(children); + + Map parameters = ImmutableMap.of("batchsize", "3", "parentNodeRef", parentNodeRef.toString()); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 2 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } } \ No newline at end of file From c13edc3a02328ac1d1f8ad6c34cba2ff420b9038 Mon Sep 17 00:00:00 2001 From: Silviu Dinuta Date: Wed, 5 Oct 2016 11:03:11 +0300 Subject: [PATCH 19/28] removed snapshot from all the pom files --- pom.xml | 2 +- rm-automation/pom.xml | 2 +- rm-server/pom.xml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 0937066256..eb2d999d39 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.alfresco alfresco-rm-parent pom - 2.3.0.8-SNAPSHOT + 2.3.0.8 Alfresco Records Management http://www.alfresco.org/ diff --git a/rm-automation/pom.xml b/rm-automation/pom.xml index f3f133fe49..c6d65398c3 100644 --- a/rm-automation/pom.xml +++ b/rm-automation/pom.xml @@ -3,7 +3,7 @@ org.alfresco alfresco-rm-parent - 2.3.0.8-SNAPSHOT + 2.3.0.8 4.0.0 alfresco-rm-automation diff --git a/rm-server/pom.xml b/rm-server/pom.xml index 94b6f0faba..b3b81eec86 100644 --- a/rm-server/pom.xml +++ b/rm-server/pom.xml @@ -5,7 +5,7 @@ org.alfresco alfresco-rm-parent - 2.3.0.8-SNAPSHOT + 2.3.0.8 4.0.0 alfresco-rm-server From 65a3cc9951abeccbcf06706343545667e0609c36 Mon Sep 17 00:00:00 2001 From: Silviu Dinuta Date: Wed, 5 Oct 2016 11:50:22 +0300 Subject: [PATCH 20/28] changed the version to 2.3.0.9-SNAPSHOT in the pom files and incremented the version in module.properties files --- pom.xml | 2 +- rm-automation/pom.xml | 2 +- .../alfresco/module/org_alfresco_module_rm/module.properties | 2 +- rm-server/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index eb2d999d39..72c7b11b1c 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.alfresco alfresco-rm-parent pom - 2.3.0.8 + 2.3.0.9-SNAPSHOT Alfresco Records Management http://www.alfresco.org/ diff --git a/rm-automation/pom.xml b/rm-automation/pom.xml index c6d65398c3..60843ec89e 100644 --- a/rm-automation/pom.xml +++ b/rm-automation/pom.xml @@ -3,7 +3,7 @@ org.alfresco alfresco-rm-parent - 2.3.0.8 + 2.3.0.9-SNAPSHOT 4.0.0 alfresco-rm-automation diff --git a/rm-server/config/alfresco/module/org_alfresco_module_rm/module.properties b/rm-server/config/alfresco/module/org_alfresco_module_rm/module.properties index 4454716d42..c6d8c8ea4b 100644 --- a/rm-server/config/alfresco/module/org_alfresco_module_rm/module.properties +++ b/rm-server/config/alfresco/module/org_alfresco_module_rm/module.properties @@ -6,6 +6,6 @@ module.aliases=org_alfresco_module_dod5015 module.title=Records Management module.description=Alfresco Record Management Extension -module.version=2.3.0.8 +module.version=2.3.0.9 module.repo.version.min=5.0 \ No newline at end of file diff --git a/rm-server/pom.xml b/rm-server/pom.xml index b3b81eec86..65be2c424c 100644 --- a/rm-server/pom.xml +++ b/rm-server/pom.xml @@ -5,7 +5,7 @@ org.alfresco alfresco-rm-parent - 2.3.0.8 + 2.3.0.9-SNAPSHOT 4.0.0 alfresco-rm-server From 2e6d11128f51859b552b0e43da13b63de8aae9ba Mon Sep 17 00:00:00 2001 From: Tom Page Date: Wed, 5 Oct 2016 08:24:36 +0100 Subject: [PATCH 21/28] RM-3386 Create unit tests for persistPeriodChanges and calculateAsOfDate. --- ...spositionActionDefinitionUpdateAction.java | 2 +- ...nActionDefinitionUpdateActionUnitTest.java | 94 +++++++++++++++++++ .../DispositionServiceImplUnitTest.java | 93 ++++++++++++++++++ 3 files changed, 188 insertions(+), 1 deletion(-) create mode 100644 rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateActionUnitTest.java create mode 100644 rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImplUnitTest.java diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateAction.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateAction.java index d4de92dd51..cf262d2ca3 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateAction.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateAction.java @@ -237,7 +237,7 @@ public class BroadcastDispositionActionDefinitionUpdateAction extends RMActionEx * @param dispositionActionDef The disposition action definition node * @param nextAction The next disposition action */ - private void persistPeriodChanges(NodeRef dispositionActionDef, DispositionAction nextAction) + protected void persistPeriodChanges(NodeRef dispositionActionDef, DispositionAction nextAction) { NodeRef dispositionedNode = getNodeService().getPrimaryParent(nextAction.getNodeRef()).getParentRef(); DispositionActionDefinition definition = nextAction.getDispositionActionDefinition(); diff --git a/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateActionUnitTest.java b/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateActionUnitTest.java new file mode 100644 index 0000000000..2a4f2df23b --- /dev/null +++ b/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateActionUnitTest.java @@ -0,0 +1,94 @@ +/* + * Copyright (C) 2005-2016 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ +package org.alfresco.module.org_alfresco_module_rm.action.impl; + +import static org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel.PROP_DISPOSITION_AS_OF; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.Date; + +import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionAction; +import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionActionDefinition; +import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService; +import org.alfresco.service.cmr.repository.ChildAssociationRef; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.cmr.repository.NodeService; +import org.junit.Before; +import org.junit.Test; + +/** + * Unit tests for {@link BroadcastDispositionActionDefinitionUpdateAction}. + * + * @author Tom Page + * @since 2.3.1 + */ +public class BroadcastDispositionActionDefinitionUpdateActionUnitTest +{ + /** The node under the category containing information about the definition of the action. */ + private static final NodeRef DISPOSITION_ACTION_DEF_NODE = new NodeRef("disposition://Action/Def"); + /** The node containing the details of the next disposition step for the content. */ + private static final NodeRef NEXT_ACTION_NODE_REF = new NodeRef("next://Step/"); + /** The node being subject to the disposition step. */ + private static final NodeRef CONTENT_NODE_REF = new NodeRef("content://Node/Ref"); + + /** The class under test. */ + private BroadcastDispositionActionDefinitionUpdateAction action = new BroadcastDispositionActionDefinitionUpdateAction(); + + private NodeService mockNodeService = mock(NodeService.class); + private DispositionService mockDispositionService = mock(DispositionService.class); + + /** Inject the mock services into the class under test and link the content and next action nodes. */ + @Before + public void setUp() + { + action.setNodeService(mockNodeService); + action.setDispositionService(mockDispositionService); + + ChildAssociationRef mockAssocRef = mock(ChildAssociationRef.class); + when(mockNodeService.getPrimaryParent(NEXT_ACTION_NODE_REF)).thenReturn(mockAssocRef); + when(mockAssocRef.getParentRef()).thenReturn(CONTENT_NODE_REF); + } + + /** + * Check that the disposition service is used to determine the "disposition as of" date when changes are made to the + * disposition period. + */ + @Test + public void testPersistPeriodChanges() + { + // Set up the data associated with the next disposition action. + DispositionAction mockAction = mock(DispositionAction.class); + when(mockAction.getNodeRef()).thenReturn(NEXT_ACTION_NODE_REF); + DispositionActionDefinition mockDispositionActionDefinition = mock(DispositionActionDefinition.class); + when(mockAction.getDispositionActionDefinition()).thenReturn(mockDispositionActionDefinition); + when(mockAction.getName()).thenReturn("mockAction"); + // Set up the disposition service to return a known "disposition as of" date. + Date asOfDate = new Date(); + when(mockDispositionService.calculateAsOfDate(CONTENT_NODE_REF, mockDispositionActionDefinition, false)) + .thenReturn(asOfDate); + + // Call the method under test. + action.persistPeriodChanges(DISPOSITION_ACTION_DEF_NODE, mockAction); + + // Check that the "disposition as of" date has been set on the next action. + verify(mockNodeService).setProperty(NEXT_ACTION_NODE_REF, PROP_DISPOSITION_AS_OF, asOfDate); + } +} diff --git a/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImplUnitTest.java b/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImplUnitTest.java new file mode 100644 index 0000000000..8fb7ccbbb7 --- /dev/null +++ b/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImplUnitTest.java @@ -0,0 +1,93 @@ +/* + * Copyright (C) 2005-2016 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ +package org.alfresco.module.org_alfresco_module_rm.disposition; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Date; + +import org.alfresco.model.ContentModel; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.cmr.repository.NodeService; +import org.alfresco.service.cmr.repository.Period; +import org.junit.Before; +import org.junit.Test; + +/** + * Unit tests for {@link DispositionServiceImpl}. + * + * @author Tom Page + * @since 2.3.1 + */ +public class DispositionServiceImplUnitTest +{ + /** The node being subject to the disposition step. */ + NodeRef CONTENT_NODE_REF = new NodeRef("content://node/"); + + /** The class under test. */ + private DispositionServiceImpl dispositionService = new DispositionServiceImpl(); + + private NodeService mockNodeService = mock(NodeService.class); + + @Before + public void setUp() + { + dispositionService.setNodeService(mockNodeService); + } + + /** + * Check that the relevant information is retrieved from the DispositionActionDefinition in order to determine the + * "disposition as of" date. + */ + @Test + public void testCalculateAsOfDate() + { + // Set up a mock for the disposition action definition. + DispositionActionDefinition mockDispositionActionDefinition = mock(DispositionActionDefinition.class); + Period mockPeriod = mock(Period.class); + when(mockDispositionActionDefinition.getPeriod()).thenReturn(mockPeriod); + when(mockDispositionActionDefinition.getPeriodProperty()).thenReturn(ContentModel.PROP_CREATED); + // Set up a created date and another date that is some Period later. + Date createdDate = new Date(1234567890); + when(mockNodeService.getProperty(CONTENT_NODE_REF, ContentModel.PROP_CREATED)).thenReturn(createdDate); + Date nextDate = new Date(1240000000); + when(mockPeriod.getNextDate(createdDate)).thenReturn(nextDate); + + // Call the method under test. + Date asOfDate = dispositionService.calculateAsOfDate(CONTENT_NODE_REF, mockDispositionActionDefinition, true); + + assertEquals("Unexpected calculation for 'as of' date", nextDate, asOfDate); + } + + /** Check that the calculated "disposition as of" date is null if a null period is given. */ + @Test + public void testCalculateAsOfDate_nullPeriod() + { + DispositionActionDefinition mockDispositionActionDefinition = mock(DispositionActionDefinition.class); + when(mockDispositionActionDefinition.getPeriod()).thenReturn(null); + + // Call the method under test. + Date asOfDate = dispositionService.calculateAsOfDate(CONTENT_NODE_REF, mockDispositionActionDefinition, true); + + assertNull("It should not be possible to determine the 'as of' date.", asOfDate); + } +} From ee13f7bbf7237a9651a6efcb5605a2744ce7811b Mon Sep 17 00:00:00 2001 From: Tuna Aksoy Date: Wed, 5 Oct 2016 22:16:52 +0100 Subject: [PATCH 22/28] Push unstaged changes --- .../test/util/BaseWebScriptUnitTest.java | 8 +- .../roles/DynamicAuthoritiesGetUnitTest.java | 694 +++++++++--------- 2 files changed, 351 insertions(+), 351 deletions(-) diff --git a/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java b/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java index 646fcc2572..3b503e11c9 100644 --- a/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java +++ b/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java @@ -31,7 +31,7 @@ import org.json.JSONObject; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.springframework.extensions.surf.util.Content; -import org.springframework.extensions.webscripts.AbstractWebScript; +import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Container; import org.springframework.extensions.webscripts.Description; import org.springframework.extensions.webscripts.Description.RequiredCache; @@ -68,7 +68,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest /** * @return declarative webscript */ - protected abstract AbstractWebScript getWebScript(); + protected abstract AbstractWebScript getWebScript(); /** * @return classpath location of webscript template @@ -136,7 +136,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest */ protected String executeWebScript(Map parameters, String content) throws Exception { - AbstractWebScript webScript = getWebScript(); + AbstractWebScript webScript = getWebScript(); String template = getWebScriptTemplate(); // initialise webscript @@ -158,7 +158,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest * @return {@link WebScriptRequest} mocked web script request */ @SuppressWarnings("rawtypes") - protected WebScriptRequest getMockedWebScriptRequest(AbstractWebScript webScript, final Map parameters, String content) throws Exception + protected WebScriptRequest getMockedWebScriptRequest(AbstractWebScript webScript, final Map parameters, String content) throws Exception { Match match = new Match(null, parameters, null, webScript); org.springframework.extensions.webscripts.Runtime mockedRuntime = mock(org.springframework.extensions.webscripts.Runtime.class); diff --git a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java index 74baf0a4e1..e45053a596 100644 --- a/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java +++ b/rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java @@ -23,14 +23,14 @@ import static java.util.Collections.emptyMap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyLong; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -38,7 +38,7 @@ import static org.mockito.Mockito.when; import java.io.File; import java.io.Serializable; -import java.util.ArrayList; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -61,9 +61,9 @@ import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.transaction.RetryingTransactionHelper; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; -import org.alfresco.repo.web.scripts.content.ContentStreamer; -import org.alfresco.service.cmr.model.FileFolderService; -import org.alfresco.service.cmr.model.FileInfo; +import org.alfresco.repo.web.scripts.content.ContentStreamer; +import org.alfresco.service.cmr.model.FileFolderService; +import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.security.PermissionService; @@ -79,11 +79,11 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import org.springframework.extensions.webscripts.AbstractWebScript; -import org.springframework.extensions.webscripts.Status; +import org.springframework.extensions.webscripts.AbstractWebScript; +import org.springframework.extensions.webscripts.Status; import org.springframework.extensions.webscripts.WebScriptException; import org.springframework.extensions.webscripts.WebScriptRequest; -import org.springframework.extensions.webscripts.WebScriptResponse; +import org.springframework.extensions.webscripts.WebScriptResponse; /** * DynamicAuthoritiesGet Unit Test @@ -114,17 +114,17 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme private TransactionService mockedTransactionService; @Mock private RetryingTransactionHelper mockedRetryingTransactionHelper; - @Mock - private ContentStreamer contentStreamer; - @Mock - private FileFolderService mockedFileFolderService; + @Mock + private ContentStreamer contentStreamer; + @Mock + private FileFolderService mockedFileFolderService; /** test component */ @InjectMocks private DynamicAuthoritiesGet webScript; @Override - protected AbstractWebScript getWebScript() + protected AbstractWebScript getWebScript() { return webScript; } @@ -146,7 +146,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme webScript.setNodeService(mockedNodeService); webScript.setPermissionService(mockedPermissionService); webScript.setExtendedSecurityService(mockedExtendedSecurityService); - webScript.setFileFolderService(mockedFileFolderService); + webScript.setFileFolderService(mockedFileFolderService); // setup retrying transaction helper Answer doInTransactionAnswer = new Answer() { @@ -174,7 +174,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme /** * Given that there are no nodes with the extended security aspect * When the action is executed Nothing happens - * + * * @throws Exception */ @SuppressWarnings({ "unchecked" }) @@ -211,7 +211,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme * Then the aspect is removed * And the dynamic authorities permissions are cleared * And extended security is set via the updated API - * + * * @throws Exception */ @SuppressWarnings("unchecked") @@ -220,7 +220,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -256,8 +256,8 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme /** * Given that there are non-records with the extended security aspect * When the web script is executed - * Then the aspect is removed And the dynamic authorities permissions are cleared - * + * Then the aspect is removed And the dynamic authorities permissions are cleared + * * @throws Exception */ @SuppressWarnings("unchecked") @@ -266,7 +266,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -302,50 +302,50 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme @Test public void missingBatchSizeParameter() throws Exception { - try - { - executeJSONWebScript(emptyMap()); - fail("Expected exception as parameter batchsize is mandatory."); - } - catch (WebScriptException e) - { - assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.", - Status.STATUS_BAD_REQUEST, e.getStatus()); - } + try + { + executeJSONWebScript(emptyMap()); + fail("Expected exception as parameter batchsize is mandatory."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test public void invalidBatchSizeParameter() throws Exception { - try - { + try + { // Set up parameters. Map parameters = ImmutableMap.of("batchsize", "dd"); - executeJSONWebScript(parameters); - fail("Expected exception as parameter batchsize is invalid."); - } - catch (WebScriptException e) - { - assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.", - Status.STATUS_BAD_REQUEST, e.getStatus()); - } + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is invalid."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test public void batchSizeShouldBeGraterThanZero() throws Exception { - try - { + try + { // Set up parameters. Map parameters = ImmutableMap.of("batchsize", "0"); - executeJSONWebScript(parameters); - fail("Expected exception as parameter batchsize is not a number greater than 0."); - } - catch (WebScriptException e) - { - assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.", - Status.STATUS_BAD_REQUEST, e.getStatus()); - } + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is not a number greater than 0."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test @@ -367,7 +367,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l,4l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -395,7 +395,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -417,302 +417,302 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}"; assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)).thenReturn(null); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); - ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); - - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), - writersKeysCaptor.capture()); - List allReaderKeySets = readerKeysCaptor.getAllValues(); - List allWritersKeySets = writersKeysCaptor.getAllValues(); - for (Set keySet : allReaderKeySets) - { - assertNull(keySet); - } - for (Set keySet : allWritersKeySets) - { - assertNull(keySet); - } - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void recordsWithExtendedSecurityAspectAndNullWriters() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); - ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); - - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), - writersKeysCaptor.capture()); - List allReaderKeySets = readerKeysCaptor.getAllValues(); - List allWritersKeySets = writersKeysCaptor.getAllValues(); - for (Set keySet : allReaderKeySets) - { - assertNotNull(keySet); - } - for (Set keySet : allWritersKeySets) - { - assertNull(keySet); - } - } - - /** + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)).thenReturn(null); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); + ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), + writersKeysCaptor.capture()); + List allReaderKeySets = readerKeysCaptor.getAllValues(); + List allWritersKeySets = writersKeysCaptor.getAllValues(); + for (Set keySet : allReaderKeySets) + { + assertNull(keySet); + } + for (Set keySet : allWritersKeySets) + { + assertNull(keySet); + } + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void recordsWithExtendedSecurityAspectAndNullWriters() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); + ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), + writersKeysCaptor.capture()); + List allReaderKeySets = readerKeysCaptor.getAllValues(); + List allWritersKeySets = writersKeysCaptor.getAllValues(); + for (Set keySet : allReaderKeySets) + { + assertNotNull(keySet); + } + for (Set keySet : allWritersKeySets) + { + assertNull(keySet); + } + } + + /** * Given I have records that require migration * And I am interested in knowning which records are migrated * When I run the migration tool - * Then I will be returned a CSV file containing the name and node reference of the record migrated - * - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void processWithCSVFile() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - String name = "name" + i; - when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); - }); - - ArgumentCaptor csvFileCaptor = ArgumentCaptor.forClass(File.class); - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", - "true"); - executeWebScript(parameters); - - verify(contentStreamer, times(1)).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), - csvFileCaptor.capture(), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); - - File fileForDownload = csvFileCaptor.getValue(); - assertNotNull(fileForDownload); - } - - /** + * Then I will be returned a CSV file containing the name and node reference of the record migrated + * + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void processWithCSVFile() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + }); + + ArgumentCaptor csvFileCaptor = ArgumentCaptor.forClass(File.class); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", + "true"); + executeWebScript(parameters); + + verify(contentStreamer, times(1)).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + csvFileCaptor.capture(), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + + File fileForDownload = csvFileCaptor.getValue(); + assertNotNull(fileForDownload); + } + + /** * Given that I have record that require migration - * And I'm not interested in knowing which records were migrated + * And I'm not interested in knowing which records were migrated * When I run the migration tool - * Then I will not be returned a CSV file of details. - * - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void processedWithouthCSVFile() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - }); - - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", - "false"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), - any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); - } - - @Test - public void invalidParentNodeRefParameter() throws Exception - { - try - { - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", "invalidNodeRef"); - executeJSONWebScript(parameters); - fail("Expected exception as parameter parentNodeRef is invalid."); - } - catch (WebScriptException e) - { - assertEquals("If parameter parentNodeRef is invalid then 'Internal server error' should be returned.", - Status.STATUS_INTERNAL_SERVER_ERROR, e.getStatus()); - } - } - - @Test - public void inexistentParentNodeRefParameter() throws Exception - { - try - { - NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeService.exists(parentNodeRef)).thenReturn(false); - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", - parentNodeRef.toString()); - executeJSONWebScript(parameters); - fail("Expected exception as parameter parentNodeRef does not exist."); - } - catch (WebScriptException e) - { - assertEquals("If parameter parentNodeRef is does not exist then 'Bad Reequest' should be returned.", - Status.STATUS_BAD_REQUEST, e.getStatus()); - } - } - - @SuppressWarnings("unchecked") - @Test - public void processedWithParentNodeRef() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); - List children = new ArrayList(); - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - String name = "name" + i; - when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); - FileInfo mockedFileInfo = mock(FileInfo.class); - when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); - children.add(mockedFileInfo); - }); - when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) - .thenReturn(children); - - Map parameters = ImmutableMap.of("batchsize", "3", "maxProcessedRecords", "4", "export", - "false", "parentNodeRef", parentNodeRef.toString()); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), - any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); - } - - @SuppressWarnings("unchecked") - @Test - public void processedWithParentNodeRefWithFirstTwoBatchesAlreadyProcessed() throws Exception - { - List ids = Stream.of(1l, 2l, 3l, 4l, 5l, 6l, 7l, 8l).collect(Collectors.toList()); - NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); - List children = new ArrayList(); - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - if (i <= 6l) - { - when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(false); - } - else - { - when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); - } - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - String name = "name" + i; - when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); - FileInfo mockedFileInfo = mock(FileInfo.class); - when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); - children.add(mockedFileInfo); - }); - when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) - .thenReturn(children); - - Map parameters = ImmutableMap.of("batchsize", "3", "parentNodeRef", parentNodeRef.toString()); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 2 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), - any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); - } + * Then I will not be returned a CSV file of details. + * + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void processedWithouthCSVFile() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + }); + + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", + "false"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } + + @Test + public void invalidParentNodeRefParameter() throws Exception + { + try + { + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", "invalidNodeRef"); + executeJSONWebScript(parameters); + fail("Expected exception as parameter parentNodeRef is invalid."); + } + catch (WebScriptException e) + { + assertEquals("If parameter parentNodeRef is invalid then 'Internal server error' should be returned.", + Status.STATUS_INTERNAL_SERVER_ERROR, e.getStatus()); + } + } + + @Test + public void inexistentParentNodeRefParameter() throws Exception + { + try + { + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.exists(parentNodeRef)).thenReturn(false); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", + parentNodeRef.toString()); + executeJSONWebScript(parameters); + fail("Expected exception as parameter parentNodeRef does not exist."); + } + catch (WebScriptException e) + { + assertEquals("If parameter parentNodeRef is does not exist then 'Bad Reequest' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } + } + + @SuppressWarnings("unchecked") + @Test + public void processedWithParentNodeRef() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + List children = new ArrayList(); + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + FileInfo mockedFileInfo = mock(FileInfo.class); + when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); + children.add(mockedFileInfo); + }); + when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) + .thenReturn(children); + + Map parameters = ImmutableMap.of("batchsize", "3", "maxProcessedRecords", "4", "export", + "false", "parentNodeRef", parentNodeRef.toString()); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } + + @SuppressWarnings("unchecked") + @Test + public void processedWithParentNodeRefWithFirstTwoBatchesAlreadyProcessed() throws Exception + { + List ids = Stream.of(1l, 2l, 3l, 4l, 5l, 6l, 7l, 8l).collect(Collectors.toList()); + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + List children = new ArrayList(); + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + if (i <= 6l) + { + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(false); + } + else + { + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); + } + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + FileInfo mockedFileInfo = mock(FileInfo.class); + when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); + children.add(mockedFileInfo); + }); + when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) + .thenReturn(children); + + Map parameters = ImmutableMap.of("batchsize", "3", "parentNodeRef", parentNodeRef.toString()); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 2 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } } \ No newline at end of file From 98ca0abe19af3c8d59c37f0e06a0a277f5620aca Mon Sep 17 00:00:00 2001 From: Mihai Cozma Date: Thu, 6 Oct 2016 13:03:40 +0300 Subject: [PATCH 23/28] RM-2368 Initial version record does not inherit document type and aspects from original document --- .../record/RecordServiceImpl.java | 2 +- .../version/RecordableVersionService.java | 26 ++- .../version/RecordableVersionServiceImpl.java | 55 +++--- .../version/DeclareAsRecordVersionTest.java | 174 +++++++++++------- 4 files changed, 156 insertions(+), 101 deletions(-) diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/record/RecordServiceImpl.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/record/RecordServiceImpl.java index 6e54dbcf03..f4fa00e1d0 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/record/RecordServiceImpl.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/record/RecordServiceImpl.java @@ -1039,7 +1039,7 @@ public class RecordServiceImpl extends BaseBehaviourBean NodeRef versionRecord = null; - recordableVersionService.createFreezeVersion(nodeRef); + recordableVersionService.createSnapshotVersion(nodeRef); // wire record up to previous record VersionHistory versionHistory = versionService.getVersionHistory(nodeRef); if (versionHistory != null) diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/version/RecordableVersionService.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/version/RecordableVersionService.java index 63aad9852f..d33e5ac532 100755 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/version/RecordableVersionService.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/version/RecordableVersionService.java @@ -64,7 +64,7 @@ public interface RecordableVersionService Version getRecordedVersion(NodeRef record); /** - * Creates a record from the latest version, marking it as recorded. + * Creates a record from the latest frozen version, marking it as recorded. *

* Does not create a record if the node is not versionable or the latest version is already recorded. * @@ -72,6 +72,19 @@ public interface RecordableVersionService * @return NodeRef node reference to the created record. */ NodeRef createRecordFromLatestVersion(NodeRef filePlan, NodeRef nodeRef); + + /** + * Creates a record from the latest version, marking it as recorded. + *

+ * Does not create a record if the node is not versionable or the latest version is already recorded. + * + * @param nodeRef parent node reference + * @param nodeRef node reference + * @param autoVersion true, create new record version from latest version, false creates a record from the latest frozen version + * @return NodeRef node reference to the created record. + * + */ + NodeRef createRecordFromLatestVersion(NodeRef filePlan, NodeRef nodeRef, boolean autoVersion); /** * Indicates whether a record version is destroyed or not. @@ -91,17 +104,16 @@ public interface RecordableVersionService void destroyRecordedVersion(Version version); /** - * Flag that indicate to create new version on record creation if current state of node is modified + * Flag that indicate to create new version on record creation if current node is modified * - * @return boolean + * @return boolean true to auto-version on record creation, false to use latest versioned version */ - public boolean isEnableAutoVersionOnRecordCreation(); + boolean isEnableAutoVersionOnRecordCreation(); /** - * Create a snapshot - 'freeze' version of current node + * Create a snapshot version of current node * * @param nodeRef node reference - * @return version version or null */ - Version createFreezeVersion(NodeRef nodeRef); + void createSnapshotVersion(NodeRef nodeRef); } diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/version/RecordableVersionServiceImpl.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/version/RecordableVersionServiceImpl.java index b2508898fb..8463159849 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/version/RecordableVersionServiceImpl.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/version/RecordableVersionServiceImpl.java @@ -673,6 +673,17 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl { return new NodeRef(convertStoreRef(nodeRef.getStoreRef()), nodeRef.getId()); } + + /** + * @see org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService#createRecordFromLatestVersion(org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.cmr.repository.NodeRef, autoVersion) + */ + @Override + public NodeRef createRecordFromLatestVersion(final NodeRef filePlan, final NodeRef nodeRef, final boolean isEnableAutoVersionOnRecordCreation) + { + setEnableAutoVersionOnRecordCreation(isEnableAutoVersionOnRecordCreation); + + return createRecordFromLatestVersion(filePlan, nodeRef); + } /** * @see org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService#createRecordFromLatestVersion(org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.cmr.repository.NodeRef) @@ -688,7 +699,7 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl // check for versionable aspect if (nodeService.hasAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE)) { - createFreezeVersion(nodeRef); + createSnapshotVersion(nodeRef); // get the latest version final Version currentVersion = getCurrentVersion(nodeRef); @@ -902,37 +913,27 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl */ public boolean isCurrentVersionDirty(NodeRef nodeRef) { + if (!nodeService.hasAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE)) { return false; } - if (nodeService.hasAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE) == true) - { - // get the latest version - Version currentVersion = getCurrentVersion(nodeRef); - Date modificationDate = (Date) nodeService.getProperty(nodeRef, ContentModel.PROP_MODIFIED); - if (currentVersion != null) - { - // grab the frozen state - NodeRef currentFrozenState = currentVersion.getFrozenStateNodeRef(); - Date frozenModificationDate = (Date) nodeService.getProperty(currentFrozenState, ContentModel.PROP_MODIFIED); - if (frozenModificationDate != null) - { - if (modificationDate.getTime() > frozenModificationDate.getTime()) { return true; } - } - } - else - { - return true; - } + // get the latest version + Version currentVersion = getCurrentVersion(nodeRef); + Date modificationDate = (Date) nodeService.getProperty(nodeRef, ContentModel.PROP_MODIFIED); - } - return false; + if (currentVersion == null) { return true; } + + // grab the frozen state + NodeRef currentFrozenState = currentVersion.getFrozenStateNodeRef(); + Date frozenModificationDate = (Date) nodeService.getProperty(currentFrozenState, ContentModel.PROP_MODIFIED); + + boolean versionStoreOutdated = ((frozenModificationDate != null) && (modificationDate.getTime() > frozenModificationDate.getTime())); + return versionStoreOutdated; } /** - * @see RecordableVersionService#createFreezeVersion(NodeRef) + * @see RecordableVersionService#createSnapshotVersion(NodeRef) */ - public Version createFreezeVersion(NodeRef nodeRef) + public void createSnapshotVersion(NodeRef nodeRef) { - Version newVersion = null; boolean autoVersion = isEnableAutoVersionOnRecordCreation(); // if the flag autoversion on record creation set, create new version on dirty nodes if (autoVersion && isCurrentVersionDirty(nodeRef)) @@ -940,8 +941,8 @@ public class RecordableVersionServiceImpl extends Version2ServiceImpl Map autoVersionProperties = new HashMap(2); autoVersionProperties.put(VersionModel.PROP_VERSION_TYPE, VersionType.MINOR); autoVersionProperties.put(VersionModel.PROP_DESCRIPTION, I18NUtil.getMessage(AUTO_VERSION_ON_RECORD_CREATION)); - newVersion = createVersion(nodeRef, autoVersionProperties); + createVersion(nodeRef, autoVersionProperties); } - return newVersion; + } } diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java index 3e2d83f0f6..6010515860 100644 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java @@ -102,9 +102,10 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest } /** - * Given versionable content with a recorded latest version - * When I declare a version record - * Then nothing happens since the latest version is already recorded And a warning is logged + * Given versionable content with a recorded latest version + * When I declare a version record + * Then nothing happens since the latest version is already recorded + * And a warning is logged */ public void testDeclareLatestVersionAsRecordButAlreadyRecorded() { @@ -215,6 +216,98 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest * * @see https://issues.alfresco.com/jira/browse/RM-2368 */ + + public void testCreateRecordFromLatestVersionAutoTrue() + { + doBehaviourDrivenTest(new BehaviourDrivenTest(dmCollaborator) + { + private NodeRef myDocument; + private NodeRef versionedRecord; + private Map versionProperties; + private Date createdDate; + private Date modificationDate; + private String record_name = "initial_name"; + private String AUTO_VERSION_DESCRIPTION = "Auto Version on Record Creation"; + private boolean autoVersion = true; + + public void given() throws Exception + { + // create a document + myDocument = fileFolderService.create(dmFolder, GUID.generate(), ContentModel.TYPE_CONTENT).getNodeRef(); + createdDate = (Date) nodeService.getProperty(myDocument, ContentModel.PROP_CREATED); + modificationDate = (Date) nodeService.getProperty(myDocument, ContentModel.PROP_MODIFIED); + assertTrue("Modified date must be after or on creation date", createdDate.getTime() == modificationDate.getTime()); + + // Set initial set of properties + Map properties = new HashMap(3); + // Ensure default behaviour autoversion on change properties is set to false + properties.put(ContentModel.PROP_AUTO_VERSION_PROPS, false); + // Set initial name + properties.put(ContentModel.PROP_NAME, "initial_name"); + nodeService.setProperties(myDocument, properties); + nodeService.setProperty(myDocument, ContentModel.PROP_DESCRIPTION, DESCRIPTION); + nodeService.addAspect(myDocument, ContentModel.ASPECT_OWNABLE, null); + // make sure document is versionable + nodeService.addAspect(myDocument, ContentModel.ASPECT_VERSIONABLE, null); + // Change Type to a custom document + nodeService.setType(myDocument, TYPE_CUSTOM_TYPE); + + // setup version properties + versionProperties = new HashMap(2); + versionProperties.put(Version.PROP_DESCRIPTION, DESCRIPTION); + versionProperties.put(VersionModel.PROP_VERSION_TYPE, VersionType.MAJOR); + + // create initial version + versionService.createVersion(myDocument, versionProperties); + } + + public void when() + { + // Apply a custom aspect + nodeService.addAspect(myDocument, ContentModel.ASPECT_TITLED, null); + // Update properties + nodeService.setProperty(myDocument, ContentModel.PROP_NAME, "updated_name"); + nodeService.setProperty(myDocument, ContentModel.PROP_DESCRIPTION, DESCRIPTION); + // test RM-2368 + versionedRecord = recordableVersionService.createRecordFromLatestVersion(filePlan, myDocument, autoVersion); + } + + public void then() + { + // Properties updated / flag as modified + // check the created record + assertNotNull(versionedRecord); + assertTrue(recordService.isRecord(versionedRecord)); + + // check the record type is correct + assertEquals(TYPE_CUSTOM_TYPE, nodeService.getType(versionedRecord)); + + // assert the current version is recorded + assertTrue(recordableVersionService.isCurrentVersionRecorded(myDocument)); + + // get name of record + record_name = (String) nodeService.getProperty(versionedRecord, ContentModel.PROP_NAME); + + // new version is create, current node was modified + assertTrue("Name was updated:", record_name.contains("updated_name")); + // check record + checkRecordedVersion(myDocument, AUTO_VERSION_DESCRIPTION, "1.1"); + + } + + }); + + } + + + /** + * + * Given versionable content with a recorded latest version + * When I declare this version record + * Then a new minor version is created for document + * + * @see https://issues.alfresco.com/jira/browse/RM-2368 + */ public void testCreateRecordFromLatestVersion() { doBehaviourDrivenTest(new BehaviourDrivenTest(dmCollaborator) @@ -223,10 +316,8 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest private NodeRef versionedRecord; private Map versionProperties; private Date createdDate; - private Date frozenModifDate; private Date modificationDate; private String record_name = "initial_name"; - private String AUTO_VERSION_DESCRIPTION = "Auto Version on Record Creation"; private boolean autoVersion = false; public void given() throws Exception @@ -251,8 +342,6 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest // Change Type to a custom document nodeService.setType(myDocument, TYPE_CUSTOM_TYPE); - Date modificationDate1 = (Date) nodeService.getProperty(myDocument, ContentModel.PROP_MODIFIED); - assertTrue("Frozen modification date", modificationDate.getTime() == modificationDate1.getTime()); // setup version properties versionProperties = new HashMap(2); versionProperties.put(Version.PROP_DESCRIPTION, DESCRIPTION); @@ -260,35 +349,17 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest // create initial version versionService.createVersion(myDocument, versionProperties); - Version version = versionService.getCurrentVersion(myDocument); - frozenModifDate = version.getFrozenModifiedDate(); - - // get autoversion flag from cofiguratie - autoVersion = recordableVersionService.isEnableAutoVersionOnRecordCreation(); } public void when() { - // current node is not dirty - assertFalse(isCurrentVersionDirty(myDocument)); - - if (autoVersion) - { - // Apply a custom aspect - nodeService.addAspect(myDocument, ContentModel.ASPECT_TITLED, null); - // Update properties - nodeService.setProperty(myDocument, ContentModel.PROP_NAME, "updated_name"); - nodeService.setProperty(myDocument, ContentModel.PROP_DESCRIPTION, DESCRIPTION); - // node should be modified - assertTrue(isCurrentVersionDirty(myDocument)); - } - else - { - assertFalse(isCurrentVersionDirty(myDocument)); - } + // Apply a custom aspect + nodeService.addAspect(myDocument, ContentModel.ASPECT_TITLED, null); + // Update properties + nodeService.setProperty(myDocument, ContentModel.PROP_NAME, "initial_name"); + nodeService.setProperty(myDocument, ContentModel.PROP_DESCRIPTION, DESCRIPTION); // test RM-2368 - versionedRecord = recordableVersionService.createRecordFromLatestVersion(filePlan, myDocument); - + versionedRecord = recordableVersionService.createRecordFromLatestVersion(filePlan, myDocument, autoVersion); } public void then() @@ -307,45 +378,16 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest // get name of record record_name = (String) nodeService.getProperty(versionedRecord, ContentModel.PROP_NAME); - if (autoVersion) - { - // new version is create, current node was modified - assertTrue("Name was updated:", record_name.contains("updated_name")); - // check record - checkRecordedVersion(myDocument, AUTO_VERSION_DESCRIPTION, "1.1"); - } - else - { - // record is created based on existing frozen, which does not contain any modification of node - assertTrue("Name is not modified: ", record_name.contains("initial_name")); - checkRecordedVersion(myDocument, DESCRIPTION, "1.0"); - } - } - - public boolean isCurrentVersionDirty(NodeRef nodeRef) - { - if (nodeService.hasAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE) == true) - { - // get the latest version - Version currentVersion = versionService.getCurrentVersion(nodeRef); - Date modificationDate = (Date) nodeService.getProperty(nodeRef, ContentModel.PROP_MODIFIED); - if (currentVersion != null) - { - // grab the frozen state - NodeRef currentFrozenState = currentVersion.getFrozenStateNodeRef(); - Date frozenModificationDate = (Date) nodeService.getProperty(currentFrozenState, ContentModel.PROP_MODIFIED); - if (modificationDate.getTime() > frozenModificationDate.getTime()) { return true; } - } - else - { - return true; - } - } - return false; + // record is created based on existing frozen, which does not contain any modification of node + assertTrue("Name is not modified: ", record_name.contains("initial_name")); + checkRecordedVersion(myDocument, DESCRIPTION, "1.0"); + } + }); } + } From 3a74bc28f52e9278fa1e22de8ca49dd8139850bc Mon Sep 17 00:00:00 2001 From: Mihai Cozma Date: Thu, 6 Oct 2016 13:17:48 +0300 Subject: [PATCH 24/28] fix ' And a warning is logged' --- .../test/integration/version/DeclareAsRecordVersionTest.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java index 3b51e4af56..6c1e212dba 100644 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java @@ -104,7 +104,8 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest /** * Given versionable content with a recorded latest version When I declare a version record Then nothing happens * When I declare a version record - * since the latest version is already recorded And a warning is logged + * since the latest version is already recorded + * And a warning is logged */ public void testDeclareLatestVersionAsRecordButAlreadyRecorded() { From 6155c71e249fb4a99580900e70f40465d36b8bf0 Mon Sep 17 00:00:00 2001 From: Mihai Cozma Date: Thu, 6 Oct 2016 13:42:10 +0300 Subject: [PATCH 25/28] Add comments to the method --- .../version/DeclareAsRecordVersionTest.java | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java index 6c1e212dba..9e0fe9ddae 100644 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/version/DeclareAsRecordVersionTest.java @@ -102,9 +102,9 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest } /** - * Given versionable content with a recorded latest version When I declare a version record Then nothing happens + * Given versionable content with a recorded latest version * When I declare a version record - * since the latest version is already recorded + * Then nothing happens since the latest version is already recorded * And a warning is logged */ public void testDeclareLatestVersionAsRecordButAlreadyRecorded() @@ -209,11 +209,12 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest } /** + * Given versionable content with a recorded latest version and autoversion is true + * When I declare this version record and contains local modifications + * Then a new minor version is created for document * - * * @see https://issues.alfresco.com/jira/browse/RM-2368 */ - public void testCreateRecordFromLatestVersionAutoTrue() { doBehaviourDrivenTest(new BehaviourDrivenTest(dmCollaborator) @@ -299,9 +300,9 @@ public class DeclareAsRecordVersionTest extends RecordableVersionsBaseTest /** * - * Given versionable content with a recorded latest version - * When I declare this version record - * Then a new minor version is created for document + * Given versionable content with a recorded latest version and autoversion is false + * When I declare this version record and contains local modifications + * Then a record is created from latest version * * @see https://issues.alfresco.com/jira/browse/RM-2368 */ From cba239d48853c2ba904f822d8730d4e3e93355a7 Mon Sep 17 00:00:00 2001 From: Tuna Aksoy Date: Fri, 7 Oct 2016 14:42:43 +0100 Subject: [PATCH 26/28] Merged V2.4 to V2.5 --- .../roles/rm-dynamicauthorities.get.desc.xml | 4 +- .../scripts/roles/DynamicAuthoritiesGet.java | 810 +++++++++--------- .../test/util/BaseWebScriptUnitTest.java | 8 +- .../roles/DynamicAuthoritiesGetUnitTest.java | 694 +++++++-------- 4 files changed, 758 insertions(+), 758 deletions(-) diff --git a/rm-community/rm-community-repo/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml b/rm-community/rm-community-repo/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml index 6cf7174dff..c84c7fb643 100644 --- a/rm-community/rm-community-repo/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml +++ b/rm-community/rm-community-repo/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml @@ -5,10 +5,10 @@ URL parameter batchsize is mandatory, and represents the maximum number of records that can be processed in one transaction.
URL parameter maxProcessedRecords is optional, and represents the maximum number of records that will be processed in one request.
URL parameter export is optional, and if the it's value is true, will export the processed records into a csv file.
- URL parameter parentNodeRef is optional, and represents the nodeRef of the folder that contains the records to be processed.
+ URL parameter parentNodeRef is optional, and represents the nodeRef of the folder that contains the records to be processed.
]]> - /api/rm/rm-dynamicauthorities?batchsize={batchsize}&maxProcessedRecords={maxProcessedRecords?}&export={export?}&parentNodeRef={parentNodeRef?} + /api/rm/rm-dynamicauthorities?batchsize={batchsize}&maxProcessedRecords={maxProcessedRecords?}&export={export?}&parentNodeRef={parentNodeRef?} argument admin required diff --git a/rm-community/rm-community-repo/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java b/rm-community/rm-community-repo/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java index 09457fc5cb..1860742470 100644 --- a/rm-community/rm-community-repo/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java +++ b/rm-community/rm-community-repo/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java @@ -44,22 +44,22 @@ */ package org.alfresco.repo.web.scripts.roles; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.io.PrintWriter; -import java.io.StringWriter; -import java.io.Writer; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.io.Writer; import java.text.MessageFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; +import java.util.Set; + +import javax.servlet.http.HttpServletResponse; -import javax.servlet.http.HttpServletResponse; - import org.alfresco.model.ContentModel; import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; @@ -69,26 +69,26 @@ import org.alfresco.repo.domain.node.NodeDAO; import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; -import org.alfresco.repo.web.scripts.content.ContentStreamer; -import org.alfresco.service.cmr.model.FileFolderService; -import org.alfresco.service.cmr.model.FileInfo; +import org.alfresco.repo.web.scripts.content.ContentStreamer; +import org.alfresco.service.cmr.model.FileFolderService; +import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.security.PermissionService; import org.alfresco.service.namespace.QName; import org.alfresco.service.transaction.TransactionService; import org.alfresco.util.Pair; -import org.alfresco.util.TempFileProvider; +import org.alfresco.util.TempFileProvider; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.springframework.extensions.webscripts.AbstractWebScript; +import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Cache; -import org.springframework.extensions.webscripts.Format; +import org.springframework.extensions.webscripts.Format; import org.springframework.extensions.webscripts.Status; -import org.springframework.extensions.webscripts.WebScriptException; +import org.springframework.extensions.webscripts.WebScriptException; import org.springframework.extensions.webscripts.WebScriptRequest; -import org.springframework.extensions.webscripts.WebScriptResponse; +import org.springframework.extensions.webscripts.WebScriptResponse; /** * Webscript used for removing dynamic authorities from the records. @@ -97,7 +97,7 @@ import org.springframework.extensions.webscripts.WebScriptResponse; * @since 2.3.0.7 */ @SuppressWarnings("deprecation") -public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel +public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel { private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0."; private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN"; @@ -106,7 +106,7 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM private static final String MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE = "Processing record {0} - BEGIN"; private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid."; private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory"; - private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist."; + private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist."; private static final String SUCCESS_STATUS = "success"; /** * The logger @@ -114,8 +114,8 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class); private static final String BATCH_SIZE = "batchsize"; private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords"; - private static final String PARAM_EXPORT = "export"; - private static final String PARAM_PARENT_NODE_REF = "parentNodeRef"; + private static final String PARAM_EXPORT = "export"; + private static final String PARAM_PARENT_NODE_REF = "parentNodeRef"; private static final String MODEL_STATUS = "responsestatus"; private static final String MODEL_MESSAGE = "message"; private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records."; @@ -131,60 +131,60 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM private PermissionService permissionService; private ExtendedSecurityService extendedSecurityService; private TransactionService transactionService; - /** Content Streamer */ - protected ContentStreamer contentStreamer; - private FileFolderService fileFolderService; - - /** service setters */ - public void setPatchDAO(PatchDAO patchDAO) - { - this.patchDAO = patchDAO; - } + /** Content Streamer */ + protected ContentStreamer contentStreamer; + private FileFolderService fileFolderService; - public void setNodeDAO(NodeDAO nodeDAO) - { - this.nodeDAO = nodeDAO; - } + /** service setters */ + public void setPatchDAO(PatchDAO patchDAO) + { + this.patchDAO = patchDAO; + } - public void setQnameDAO(QNameDAO qnameDAO) + public void setNodeDAO(NodeDAO nodeDAO) { - this.qnameDAO = qnameDAO; - } + this.nodeDAO = nodeDAO; + } - public void setNodeService(NodeService nodeService) + public void setQnameDAO(QNameDAO qnameDAO) { - this.nodeService = nodeService; + this.qnameDAO = qnameDAO; } - - public void setPermissionService(PermissionService permissionService) + + public void setNodeService(NodeService nodeService) { - this.permissionService = permissionService; - } - - public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) + this.nodeService = nodeService; + } + + public void setPermissionService(PermissionService permissionService) { - this.extendedSecurityService = extendedSecurityService; + this.permissionService = permissionService; } - - public void setTransactionService(TransactionService transactionService) - { - this.transactionService = transactionService; - } - - public void setContentStreamer(ContentStreamer contentStreamer) + + public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) { - this.contentStreamer = contentStreamer; + this.extendedSecurityService = extendedSecurityService; } - - public void setFileFolderService(FileFolderService fileFolderService) - { - this.fileFolderService = fileFolderService; - } - - protected Map buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException - { - Map model = new HashMap(); - final Long batchSize = getBatchSizeParameter(req); + + public void setTransactionService(TransactionService transactionService) + { + this.transactionService = transactionService; + } + + public void setContentStreamer(ContentStreamer contentStreamer) + { + this.contentStreamer = contentStreamer; + } + + public void setFileFolderService(FileFolderService fileFolderService) + { + this.fileFolderService = fileFolderService; + } + + protected Map buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException + { + Map model = new HashMap(); + final Long batchSize = getBatchSizeParameter(req); // get the max node id and the extended security aspect Long maxNodeId = patchDAO.getMaxAdmNodeID(); final Pair recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY); @@ -196,201 +196,201 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM return model; } - Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize); - - boolean attach = getExportParameter(req); - - File file = TempFileProvider.createTempFile("processedNodes_", ".csv"); - FileWriter writer = new FileWriter(file); - BufferedWriter out = new BufferedWriter(writer); - List processedNodes = new ArrayList(); - try - { - NodeRef parentNodeRef = getParentNodeRefParameter(req); - if (parentNodeRef != null) - { - processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair, - totalNumberOfRecordsToProcess.intValue(), out, attach); - } - else - { - processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess, - out, attach); - } - } - finally - { - out.close(); - } - - int processedNodesSize = processedNodes.size(); - - String message = ""; - if (totalNumberOfRecordsToProcess == 0 - || (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess)) - { - message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize); - } - if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize) - { - message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess); - } - model.put(MODEL_STATUS, SUCCESS_STATUS); - model.put(MODEL_MESSAGE, message); - logger.info(message); - - if (attach) - { - try - { - String fileName = file.getName(); - contentStreamer.streamContent(req, res, file, null, attach, fileName, model); - model = null; - } - finally - { - if (file != null) - { - file.delete(); - } - } - } - return model; - } - - /** - * Get export parameter from the request - * - * @param req - * @return - */ - protected boolean getExportParameter(WebScriptRequest req) - { - boolean attach = false; - String export = req.getParameter(PARAM_EXPORT); - if (export != null && Boolean.parseBoolean(export)) - { - attach = true; - } - return attach; - } - - /* - * (non-Javadoc) - * @see org.alfresco.repo.web.scripts.content.StreamContent#execute(org.springframework.extensions.webscripts. - * WebScriptRequest, org.springframework.extensions.webscripts.WebScriptResponse) - */ - @Override - public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException - { - // retrieve requested format - String format = req.getFormat(); - - try - { - String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format); + Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize); + + boolean attach = getExportParameter(req); + + File file = TempFileProvider.createTempFile("processedNodes_", ".csv"); + FileWriter writer = new FileWriter(file); + BufferedWriter out = new BufferedWriter(writer); + List processedNodes = new ArrayList(); + try + { + NodeRef parentNodeRef = getParentNodeRefParameter(req); + if (parentNodeRef != null) + { + processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair, + totalNumberOfRecordsToProcess.intValue(), out, attach); + } + else + { + processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess, + out, attach); + } + } + finally + { + out.close(); + } + + int processedNodesSize = processedNodes.size(); + + String message = ""; + if (totalNumberOfRecordsToProcess == 0 + || (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess)) + { + message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize); + } + if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize) + { + message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess); + } + model.put(MODEL_STATUS, SUCCESS_STATUS); + model.put(MODEL_MESSAGE, message); + logger.info(message); + + if (attach) + { + try + { + String fileName = file.getName(); + contentStreamer.streamContent(req, res, file, null, attach, fileName, model); + model = null; + } + finally + { + if (file != null) + { + file.delete(); + } + } + } + return model; + } + + /** + * Get export parameter from the request + * + * @param req + * @return + */ + protected boolean getExportParameter(WebScriptRequest req) + { + boolean attach = false; + String export = req.getParameter(PARAM_EXPORT); + if (export != null && Boolean.parseBoolean(export)) + { + attach = true; + } + return attach; + } + + /* + * (non-Javadoc) + * @see org.alfresco.repo.web.scripts.content.StreamContent#execute(org.springframework.extensions.webscripts. + * WebScriptRequest, org.springframework.extensions.webscripts.WebScriptResponse) + */ + @Override + public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException + { + // retrieve requested format + String format = req.getFormat(); + + try + { + String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format); if (mimetype == null) { throw new WebScriptException("Web Script format '" + format + "' is not registered"); - } - - // construct model for script / template - Status status = new Status(); - Cache cache = new Cache(getDescription().getRequiredCache()); - Map model = buildModel(req, res); - if (model == null) { return; } - model.put("status", status); - model.put("cache", cache); - - Map templateModel = createTemplateParameters(req, res, model); - - // render output - int statusCode = status.getCode(); - if (statusCode != HttpServletResponse.SC_OK && !req.forceSuccessStatus()) - { - if (logger.isDebugEnabled()) - { - logger.debug("Force success status header in response: " + req.forceSuccessStatus()); - logger.debug("Setting status " + statusCode); - } - res.setStatus(statusCode); - } - - // apply location - String location = status.getLocation(); - if (location != null && location.length() > 0) - { - if (logger.isDebugEnabled()) logger.debug("Setting location to " + location); - res.setHeader(WebScriptResponse.HEADER_LOCATION, location); - } - - // apply cache - res.setCache(cache); - - String callback = null; - if (getContainer().allowCallbacks()) - { - callback = req.getJSONCallback(); - } - if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) - { - if (logger.isDebugEnabled()) logger.debug("Rendering JSON callback response: content type=" - + Format.JAVASCRIPT.mimetype() + ", status=" + statusCode + ", callback=" + callback); - - // NOTE: special case for wrapping JSON results in a javascript function callback - res.setContentType(Format.JAVASCRIPT.mimetype() + ";charset=UTF-8"); - res.getWriter().write((callback + "(")); - } - else - { - if (logger.isDebugEnabled()) - logger.debug("Rendering response: content type=" + mimetype + ", status=" + statusCode); - - res.setContentType(mimetype + ";charset=UTF-8"); - } - - // render response according to requested format - renderFormatTemplate(format, templateModel, res.getWriter()); - - if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) - { - // NOTE: special case for wrapping JSON results in a javascript function callback - res.getWriter().write(")"); - } - } - catch (Throwable e) - { - if (logger.isDebugEnabled()) - { - StringWriter stack = new StringWriter(); - e.printStackTrace(new PrintWriter(stack)); - logger.debug("Caught exception; decorating with appropriate status template : " + stack.toString()); - } - - throw createStatusException(e, req, res); - } - } - - protected void renderFormatTemplate(String format, Map model, Writer writer) - { - format = (format == null) ? "" : format; - - String templatePath = getDescription().getId() + "." + format; - - if (logger.isDebugEnabled()) logger.debug("Rendering template '" + templatePath + "'"); - - renderTemplate(templatePath, model, writer); - } - - /** - * Obtain maximum of the records to be processed from the request if it is specified or bachsize value otherwise - * - * @param req - * @return maximum of the records to be processed from the request if it is specified or bachsize value otherwise - */ - protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize) - { - String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); + } + + // construct model for script / template + Status status = new Status(); + Cache cache = new Cache(getDescription().getRequiredCache()); + Map model = buildModel(req, res); + if (model == null) { return; } + model.put("status", status); + model.put("cache", cache); + + Map templateModel = createTemplateParameters(req, res, model); + + // render output + int statusCode = status.getCode(); + if (statusCode != HttpServletResponse.SC_OK && !req.forceSuccessStatus()) + { + if (logger.isDebugEnabled()) + { + logger.debug("Force success status header in response: " + req.forceSuccessStatus()); + logger.debug("Setting status " + statusCode); + } + res.setStatus(statusCode); + } + + // apply location + String location = status.getLocation(); + if (location != null && location.length() > 0) + { + if (logger.isDebugEnabled()) logger.debug("Setting location to " + location); + res.setHeader(WebScriptResponse.HEADER_LOCATION, location); + } + + // apply cache + res.setCache(cache); + + String callback = null; + if (getContainer().allowCallbacks()) + { + callback = req.getJSONCallback(); + } + if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) + { + if (logger.isDebugEnabled()) logger.debug("Rendering JSON callback response: content type=" + + Format.JAVASCRIPT.mimetype() + ", status=" + statusCode + ", callback=" + callback); + + // NOTE: special case for wrapping JSON results in a javascript function callback + res.setContentType(Format.JAVASCRIPT.mimetype() + ";charset=UTF-8"); + res.getWriter().write((callback + "(")); + } + else + { + if (logger.isDebugEnabled()) + logger.debug("Rendering response: content type=" + mimetype + ", status=" + statusCode); + + res.setContentType(mimetype + ";charset=UTF-8"); + } + + // render response according to requested format + renderFormatTemplate(format, templateModel, res.getWriter()); + + if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) + { + // NOTE: special case for wrapping JSON results in a javascript function callback + res.getWriter().write(")"); + } + } + catch (Throwable e) + { + if (logger.isDebugEnabled()) + { + StringWriter stack = new StringWriter(); + e.printStackTrace(new PrintWriter(stack)); + logger.debug("Caught exception; decorating with appropriate status template : " + stack.toString()); + } + + throw createStatusException(e, req, res); + } + } + + protected void renderFormatTemplate(String format, Map model, Writer writer) + { + format = (format == null) ? "" : format; + + String templatePath = getDescription().getId() + "." + format; + + if (logger.isDebugEnabled()) logger.debug("Rendering template '" + templatePath + "'"); + + renderTemplate(templatePath, model, writer); + } + + /** + * Obtain maximum of the records to be processed from the request if it is specified or bachsize value otherwise + * + * @param req + * @return maximum of the records to be processed from the request if it is specified or bachsize value otherwise + */ + protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize) + { + String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); //default total number of records to be processed to batch size value Long totalNumberOfRecordsToProcess = batchSize; if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr)) @@ -404,77 +404,77 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM //do nothing here, the value will remain 0L in this case } } - return totalNumberOfRecordsToProcess; - } - - /** - * Obtain batchsize parameter from the request. - * - * @param req - * @return batchsize parameter from the request - */ - protected Long getBatchSizeParameter(WebScriptRequest req) - { - String batchSizeStr = req.getParameter(BATCH_SIZE); - Long size = 0L; - if (StringUtils.isBlank(batchSizeStr)) - { - logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY); - throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_MANDATORY); - } - try - { - size = Long.parseLong(batchSizeStr); - if (size <= 0) - { - logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); - throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); - } - } - catch (NumberFormatException ex) - { - logger.info(MESSAGE_BATCHSIZE_IS_INVALID); - throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_INVALID); - } - return size; - } + return totalNumberOfRecordsToProcess; + } - /** - * Get parentNodeRef parameter from the request - * - * @param req - * @return - */ - protected NodeRef getParentNodeRefParameter(WebScriptRequest req) - { - String parentNodeRefStr = req.getParameter(PARAM_PARENT_NODE_REF); - NodeRef parentNodeRef = null; - if (StringUtils.isNotBlank(parentNodeRefStr)) - { - parentNodeRef = new NodeRef(parentNodeRefStr); - if(!nodeService.exists(parentNodeRef)) - { - String message = MessageFormat.format(MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE, parentNodeRef.toString()); - logger.info(message); - throw new WebScriptException(Status.STATUS_BAD_REQUEST, message); - } - } - return parentNodeRef; - } - - /** - * Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess - * parameters - * - * @param batchSize - * @param maxNodeId - * @param recordAspectPair - * @param totalNumberOfRecordsToProcess - * @return the list of processed nodes - */ - protected List processNodes(final Long batchSize, Long maxNodeId, final Pair recordAspectPair, - Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach) - { + /** + * Obtain batchsize parameter from the request. + * + * @param req + * @return batchsize parameter from the request + */ + protected Long getBatchSizeParameter(WebScriptRequest req) + { + String batchSizeStr = req.getParameter(BATCH_SIZE); + Long size = 0L; + if (StringUtils.isBlank(batchSizeStr)) + { + logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_MANDATORY); + } + try + { + size = Long.parseLong(batchSizeStr); + if (size <= 0) + { + logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); + } + } + catch (NumberFormatException ex) + { + logger.info(MESSAGE_BATCHSIZE_IS_INVALID); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_INVALID); + } + return size; + } + + /** + * Get parentNodeRef parameter from the request + * + * @param req + * @return + */ + protected NodeRef getParentNodeRefParameter(WebScriptRequest req) + { + String parentNodeRefStr = req.getParameter(PARAM_PARENT_NODE_REF); + NodeRef parentNodeRef = null; + if (StringUtils.isNotBlank(parentNodeRefStr)) + { + parentNodeRef = new NodeRef(parentNodeRefStr); + if(!nodeService.exists(parentNodeRef)) + { + String message = MessageFormat.format(MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE, parentNodeRef.toString()); + logger.info(message); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, message); + } + } + return parentNodeRef; + } + + /** + * Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess + * parameters + * + * @param batchSize + * @param maxNodeId + * @param recordAspectPair + * @param totalNumberOfRecordsToProcess + * @return the list of processed nodes + */ + protected List processNodes(final Long batchSize, Long maxNodeId, final Pair recordAspectPair, + Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach) + { final Long maxRecordsToProcess = totalNumberOfRecordsToProcess; final List processedNodes = new ArrayList(); logger.info(MESSAGE_PROCESSING_BEGIN); @@ -492,8 +492,8 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM public Void execute() throws Throwable { // get the nodes with the extended security aspect applied - List nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, - currentIndex + batchSize); + List nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, + currentIndex + batchSize); // process each one for (Long nodeId : nodeIds) @@ -508,79 +508,79 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM processNode(record); logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); processedNodes.add(record); - if (attach) - { - out.write(recordName); - out.write(","); - out.write(record.toString()); - out.write("\n"); + if (attach) + { + out.write(recordName); + out.write(","); + out.write(record.toString()); + out.write("\n"); } - } + } return null; } - }, false, // read only + }, false, // read only true); // requires new } logger.info(MESSAGE_PROCESSING_END); - return processedNodes; + return processedNodes; + } + + protected List processChildrenNodes(NodeRef parentNodeRef, final int batchSize, + final Pair recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out, + final boolean attach) + { + final List processedNodes = new ArrayList(); + final List children = fileFolderService.search(parentNodeRef, "*", /*filesSearch*/true, /*folderSearch*/true, /*includeSubfolders*/true); + logger.info(MESSAGE_PROCESSING_BEGIN); + // by batch size + for (int i = 0; i < children.size(); i += batchSize) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + final int currentIndex = i; + + transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback() + { + public Void execute() throws Throwable + { + List nodes = children.subList(currentIndex, Math.min(currentIndex + batchSize, children.size())); + // process each one + for (FileInfo node : nodes) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + NodeRef record = node.getNodeRef(); + if (nodeService.hasAspect(record, recordAspectPair.getSecond())) + { + String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName)); + processNode(record); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); + processedNodes.add(record); + if (attach) + { + out.write(recordName); + out.write(","); + out.write(record.toString()); + out.write("\n"); + } + } + } + + return null; + } + }, false, // read only + true); // requires new + } + logger.info(MESSAGE_PROCESSING_END); + return processedNodes; } - protected List processChildrenNodes(NodeRef parentNodeRef, final int batchSize, - final Pair recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out, - final boolean attach) - { - final List processedNodes = new ArrayList(); - final List children = fileFolderService.search(parentNodeRef, "*", /*filesSearch*/true, /*folderSearch*/true, /*includeSubfolders*/true); - logger.info(MESSAGE_PROCESSING_BEGIN); - // by batch size - for (int i = 0; i < children.size(); i += batchSize) - { - if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) - { - break; - } - final int currentIndex = i; - - transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback() - { - public Void execute() throws Throwable - { - List nodes = children.subList(currentIndex, Math.min(currentIndex + batchSize, children.size())); - // process each one - for (FileInfo node : nodes) - { - if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) - { - break; - } - NodeRef record = node.getNodeRef(); - if (nodeService.hasAspect(record, recordAspectPair.getSecond())) - { - String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME); - logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName)); - processNode(record); - logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); - processedNodes.add(record); - if (attach) - { - out.write(recordName); - out.write(","); - out.write(record.toString()); - out.write("\n"); - } - } - } - - return null; - } - }, false, // read only - true); // requires new - } - logger.info(MESSAGE_PROCESSING_END); - return processedNodes; - } - /** * Process each node * @@ -601,20 +601,20 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM permissionService.clearPermission(nodeRef, ExtendedWriterDynamicAuthority.EXTENDED_WRITER); // if record then ... - if (nodeService.hasAspect(nodeRef, ASPECT_RECORD)) - { - Set readersKeySet = null; - if (readers != null) + if (nodeService.hasAspect(nodeRef, ASPECT_RECORD)) { - readersKeySet = readers.keySet(); - } - Set writersKeySet = null; - if (writers != null) - { - writersKeySet = writers.keySet(); - } + Set readersKeySet = null; + if (readers != null) + { + readersKeySet = readers.keySet(); + } + Set writersKeySet = null; + if (writers != null) + { + writersKeySet = writers.keySet(); + } // re-set extended security via API - extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet); + extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet); } } } diff --git a/rm-community/rm-community-repo/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java b/rm-community/rm-community-repo/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java index 3f3105f9b8..7ca4481384 100644 --- a/rm-community/rm-community-repo/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java +++ b/rm-community/rm-community-repo/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java @@ -46,7 +46,7 @@ import org.json.JSONObject; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.springframework.extensions.surf.util.Content; -import org.springframework.extensions.webscripts.AbstractWebScript; +import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Container; import org.springframework.extensions.webscripts.Description; import org.springframework.extensions.webscripts.Description.RequiredCache; @@ -83,7 +83,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest /** * @return declarative webscript */ - protected abstract AbstractWebScript getWebScript(); + protected abstract AbstractWebScript getWebScript(); /** * @return classpath location of webscript template @@ -151,7 +151,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest */ protected String executeWebScript(Map parameters, String content) throws Exception { - AbstractWebScript webScript = getWebScript(); + AbstractWebScript webScript = getWebScript(); String template = getWebScriptTemplate(); // initialise webscript @@ -173,7 +173,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest * @return {@link WebScriptRequest} mocked web script request */ @SuppressWarnings("rawtypes") - protected WebScriptRequest getMockedWebScriptRequest(AbstractWebScript webScript, final Map parameters, String content) throws Exception + protected WebScriptRequest getMockedWebScriptRequest(AbstractWebScript webScript, final Map parameters, String content) throws Exception { Match match = new Match(null, parameters, null, webScript); org.springframework.extensions.webscripts.Runtime mockedRuntime = mock(org.springframework.extensions.webscripts.Runtime.class); diff --git a/rm-community/rm-community-repo/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java b/rm-community/rm-community-repo/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java index 4f903a2b75..38d3da056b 100644 --- a/rm-community/rm-community-repo/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java +++ b/rm-community/rm-community-repo/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java @@ -49,14 +49,14 @@ import static java.util.Collections.emptyMap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyLong; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -64,7 +64,7 @@ import static org.mockito.Mockito.when; import java.io.File; import java.io.Serializable; -import java.util.ArrayList; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -87,9 +87,9 @@ import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.transaction.RetryingTransactionHelper; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; -import org.alfresco.repo.web.scripts.content.ContentStreamer; -import org.alfresco.service.cmr.model.FileFolderService; -import org.alfresco.service.cmr.model.FileInfo; +import org.alfresco.repo.web.scripts.content.ContentStreamer; +import org.alfresco.service.cmr.model.FileFolderService; +import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.security.PermissionService; @@ -105,11 +105,11 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import org.springframework.extensions.webscripts.AbstractWebScript; -import org.springframework.extensions.webscripts.Status; +import org.springframework.extensions.webscripts.AbstractWebScript; +import org.springframework.extensions.webscripts.Status; import org.springframework.extensions.webscripts.WebScriptException; import org.springframework.extensions.webscripts.WebScriptRequest; -import org.springframework.extensions.webscripts.WebScriptResponse; +import org.springframework.extensions.webscripts.WebScriptResponse; /** * DynamicAuthoritiesGet Unit Test @@ -140,17 +140,17 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme private TransactionService mockedTransactionService; @Mock private RetryingTransactionHelper mockedRetryingTransactionHelper; - @Mock - private ContentStreamer contentStreamer; - @Mock - private FileFolderService mockedFileFolderService; + @Mock + private ContentStreamer contentStreamer; + @Mock + private FileFolderService mockedFileFolderService; /** test component */ @InjectMocks private DynamicAuthoritiesGet webScript; @Override - protected AbstractWebScript getWebScript() + protected AbstractWebScript getWebScript() { return webScript; } @@ -172,7 +172,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme webScript.setNodeService(mockedNodeService); webScript.setPermissionService(mockedPermissionService); webScript.setExtendedSecurityService(mockedExtendedSecurityService); - webScript.setFileFolderService(mockedFileFolderService); + webScript.setFileFolderService(mockedFileFolderService); // setup retrying transaction helper Answer doInTransactionAnswer = new Answer() { @@ -200,7 +200,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme /** * Given that there are no nodes with the extended security aspect * When the action is executed Nothing happens - * + * * @throws Exception */ @SuppressWarnings({ "unchecked" }) @@ -237,7 +237,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme * Then the aspect is removed * And the dynamic authorities permissions are cleared * And extended security is set via the updated API - * + * * @throws Exception */ @SuppressWarnings("unchecked") @@ -246,7 +246,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -282,8 +282,8 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme /** * Given that there are non-records with the extended security aspect * When the web script is executed - * Then the aspect is removed And the dynamic authorities permissions are cleared - * + * Then the aspect is removed And the dynamic authorities permissions are cleared + * * @throws Exception */ @SuppressWarnings("unchecked") @@ -292,7 +292,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -328,50 +328,50 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme @Test public void missingBatchSizeParameter() throws Exception { - try - { - executeJSONWebScript(emptyMap()); - fail("Expected exception as parameter batchsize is mandatory."); - } - catch (WebScriptException e) - { - assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.", - Status.STATUS_BAD_REQUEST, e.getStatus()); - } + try + { + executeJSONWebScript(emptyMap()); + fail("Expected exception as parameter batchsize is mandatory."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test public void invalidBatchSizeParameter() throws Exception { - try - { + try + { // Set up parameters. Map parameters = ImmutableMap.of("batchsize", "dd"); - executeJSONWebScript(parameters); - fail("Expected exception as parameter batchsize is invalid."); - } - catch (WebScriptException e) - { - assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.", - Status.STATUS_BAD_REQUEST, e.getStatus()); - } + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is invalid."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test public void batchSizeShouldBeGraterThanZero() throws Exception { - try - { + try + { // Set up parameters. Map parameters = ImmutableMap.of("batchsize", "0"); - executeJSONWebScript(parameters); - fail("Expected exception as parameter batchsize is not a number greater than 0."); - } - catch (WebScriptException e) - { - assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.", - Status.STATUS_BAD_REQUEST, e.getStatus()); - } + executeJSONWebScript(parameters); + fail("Expected exception as parameter batchsize is not a number greater than 0."); + } + catch (WebScriptException e) + { + assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } } @Test @@ -393,7 +393,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l,4l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -421,7 +421,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme { List ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) .thenReturn(Collections.emptyList()); ids.stream().forEach((i) -> { @@ -443,302 +443,302 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}"; assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)).thenReturn(null); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); - ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); - - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), - writersKeysCaptor.capture()); - List allReaderKeySets = readerKeysCaptor.getAllValues(); - List allWritersKeySets = writersKeysCaptor.getAllValues(); - for (Set keySet : allReaderKeySets) - { - assertNull(keySet); - } - for (Set keySet : allWritersKeySets) - { - assertNull(keySet); - } - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void recordsWithExtendedSecurityAspectAndNullWriters() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); - - }); - - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); - ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); - - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); - verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); - verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); - verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), - eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); - verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), - writersKeysCaptor.capture()); - List allReaderKeySets = readerKeysCaptor.getAllValues(); - List allWritersKeySets = writersKeysCaptor.getAllValues(); - for (Set keySet : allReaderKeySets) - { - assertNotNull(keySet); - } - for (Set keySet : allWritersKeySets) - { - assertNull(keySet); - } - } - - /** + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)).thenReturn(null); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); + ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), + writersKeysCaptor.capture()); + List allReaderKeySets = readerKeysCaptor.getAllValues(); + List allWritersKeySets = writersKeysCaptor.getAllValues(); + for (Set keySet : allReaderKeySets) + { + assertNull(keySet); + } + for (Set keySet : allWritersKeySets) + { + assertNull(keySet); + } + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void recordsWithExtendedSecurityAspectAndNullWriters() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); + + }); + + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + ArgumentCaptor readerKeysCaptor = ArgumentCaptor.forClass(Set.class); + ArgumentCaptor writersKeysCaptor = ArgumentCaptor.forClass(Set.class); + + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); + verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); + verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); + verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), + eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); + verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), + writersKeysCaptor.capture()); + List allReaderKeySets = readerKeysCaptor.getAllValues(); + List allWritersKeySets = writersKeysCaptor.getAllValues(); + for (Set keySet : allReaderKeySets) + { + assertNotNull(keySet); + } + for (Set keySet : allWritersKeySets) + { + assertNull(keySet); + } + } + + /** * Given I have records that require migration * And I am interested in knowning which records are migrated * When I run the migration tool - * Then I will be returned a CSV file containing the name and node reference of the record migrated - * - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void processWithCSVFile() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - String name = "name" + i; - when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); - }); - - ArgumentCaptor csvFileCaptor = ArgumentCaptor.forClass(File.class); - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", - "true"); - executeWebScript(parameters); - - verify(contentStreamer, times(1)).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), - csvFileCaptor.capture(), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); - - File fileForDownload = csvFileCaptor.getValue(); - assertNotNull(fileForDownload); - } - - /** + * Then I will be returned a CSV file containing the name and node reference of the record migrated + * + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void processWithCSVFile() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + }); + + ArgumentCaptor csvFileCaptor = ArgumentCaptor.forClass(File.class); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", + "true"); + executeWebScript(parameters); + + verify(contentStreamer, times(1)).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + csvFileCaptor.capture(), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + + File fileForDownload = csvFileCaptor.getValue(); + assertNotNull(fileForDownload); + } + + /** * Given that I have record that require migration - * And I'm not interested in knowing which records were migrated + * And I'm not interested in knowing which records were migrated * When I run the migration tool - * Then I will not be returned a CSV file of details. - * - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void processedWithouthCSVFile() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) - .thenReturn(Collections.emptyList()); - - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - }); - - Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", - "false"); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), - any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); - } - - @Test - public void invalidParentNodeRefParameter() throws Exception - { - try - { - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", "invalidNodeRef"); - executeJSONWebScript(parameters); - fail("Expected exception as parameter parentNodeRef is invalid."); - } - catch (WebScriptException e) - { - assertEquals("If parameter parentNodeRef is invalid then 'Internal server error' should be returned.", - Status.STATUS_INTERNAL_SERVER_ERROR, e.getStatus()); - } - } - - @Test - public void inexistentParentNodeRefParameter() throws Exception - { - try - { - NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeService.exists(parentNodeRef)).thenReturn(false); - // Set up parameters. - Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", - parentNodeRef.toString()); - executeJSONWebScript(parameters); - fail("Expected exception as parameter parentNodeRef does not exist."); - } - catch (WebScriptException e) - { - assertEquals("If parameter parentNodeRef is does not exist then 'Bad Reequest' should be returned.", - Status.STATUS_BAD_REQUEST, e.getStatus()); - } - } - - @SuppressWarnings("unchecked") - @Test - public void processedWithParentNodeRef() throws Exception - { - List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); - NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); - List children = new ArrayList(); - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - String name = "name" + i; - when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); - FileInfo mockedFileInfo = mock(FileInfo.class); - when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); - children.add(mockedFileInfo); - }); - when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) - .thenReturn(children); - - Map parameters = ImmutableMap.of("batchsize", "3", "maxProcessedRecords", "4", "export", - "false", "parentNodeRef", parentNodeRef.toString()); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), - any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); - } - - @SuppressWarnings("unchecked") - @Test - public void processedWithParentNodeRefWithFirstTwoBatchesAlreadyProcessed() throws Exception - { - List ids = Stream.of(1l, 2l, 3l, 4l, 5l, 6l, 7l, 8l).collect(Collectors.toList()); - NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); - List children = new ArrayList(); - ids.stream().forEach((i) -> { - NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); - when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); - if (i <= 6l) - { - when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(false); - } - else - { - when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); - } - when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) - .thenReturn((Serializable) Collections.emptyMap()); - when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) - .thenReturn((Serializable) Collections.emptyMap()); - String name = "name" + i; - when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); - FileInfo mockedFileInfo = mock(FileInfo.class); - when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); - children.add(mockedFileInfo); - }); - when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) - .thenReturn(children); - - Map parameters = ImmutableMap.of("batchsize", "3", "parentNodeRef", parentNodeRef.toString()); - JSONObject json = executeJSONWebScript(parameters); - assertNotNull(json); - String actualJSONString = json.toString(); - ObjectMapper mapper = new ObjectMapper(); - String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 2 records.\"}"; - assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); - - verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), - any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); - } + * Then I will not be returned a CSV file of details. + * + * @throws Exception + */ + @SuppressWarnings("unchecked") + @Test + public void processedWithouthCSVFile() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) + .thenReturn(Collections.emptyList()); + + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair(i, nodeRef)); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + }); + + Map parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", + "false"); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } + + @Test + public void invalidParentNodeRefParameter() throws Exception + { + try + { + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", "invalidNodeRef"); + executeJSONWebScript(parameters); + fail("Expected exception as parameter parentNodeRef is invalid."); + } + catch (WebScriptException e) + { + assertEquals("If parameter parentNodeRef is invalid then 'Internal server error' should be returned.", + Status.STATUS_INTERNAL_SERVER_ERROR, e.getStatus()); + } + } + + @Test + public void inexistentParentNodeRefParameter() throws Exception + { + try + { + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.exists(parentNodeRef)).thenReturn(false); + // Set up parameters. + Map parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", + parentNodeRef.toString()); + executeJSONWebScript(parameters); + fail("Expected exception as parameter parentNodeRef does not exist."); + } + catch (WebScriptException e) + { + assertEquals("If parameter parentNodeRef is does not exist then 'Bad Reequest' should be returned.", + Status.STATUS_BAD_REQUEST, e.getStatus()); + } + } + + @SuppressWarnings("unchecked") + @Test + public void processedWithParentNodeRef() throws Exception + { + List ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + List children = new ArrayList(); + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + FileInfo mockedFileInfo = mock(FileInfo.class); + when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); + children.add(mockedFileInfo); + }); + when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) + .thenReturn(children); + + Map parameters = ImmutableMap.of("batchsize", "3", "maxProcessedRecords", "4", "export", + "false", "parentNodeRef", parentNodeRef.toString()); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } + + @SuppressWarnings("unchecked") + @Test + public void processedWithParentNodeRefWithFirstTwoBatchesAlreadyProcessed() throws Exception + { + List ids = Stream.of(1l, 2l, 3l, 4l, 5l, 6l, 7l, 8l).collect(Collectors.toList()); + NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); + List children = new ArrayList(); + ids.stream().forEach((i) -> { + NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); + when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); + if (i <= 6l) + { + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(false); + } + else + { + when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); + } + when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) + .thenReturn((Serializable) Collections.emptyMap()); + when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) + .thenReturn((Serializable) Collections.emptyMap()); + String name = "name" + i; + when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); + FileInfo mockedFileInfo = mock(FileInfo.class); + when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); + children.add(mockedFileInfo); + }); + when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) + .thenReturn(children); + + Map parameters = ImmutableMap.of("batchsize", "3", "parentNodeRef", parentNodeRef.toString()); + JSONObject json = executeJSONWebScript(parameters); + assertNotNull(json); + String actualJSONString = json.toString(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 2 records.\"}"; + assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); + + verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), + any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); + } } \ No newline at end of file From 4439f9bc18a94b14115873404995e9c7abc8e775 Mon Sep 17 00:00:00 2001 From: Tom Page Date: Fri, 7 Oct 2016 14:51:38 +0100 Subject: [PATCH 27/28] RM-3386 Integration test to check that updating the disposition period works. This test is the scenario given in the bug report. Create a record that has a next disposition step in a year. Update the step to be three years, and check that the record's 'disposition as of' date is updated accordingly. --- .../disposition/DispositionTestSuite.java | 3 +- .../UpdateDispositionScheduleTest.java | 164 ++++++++++++++++++ .../test/util/CommonRMTestUtils.java | 36 ++-- 3 files changed, 185 insertions(+), 18 deletions(-) create mode 100644 rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/UpdateDispositionScheduleTest.java diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/DispositionTestSuite.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/DispositionTestSuite.java index 71fe483156..e1938d1d28 100644 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/DispositionTestSuite.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/DispositionTestSuite.java @@ -31,7 +31,8 @@ import org.junit.runners.Suite.SuiteClasses; @RunWith(Suite.class) @SuiteClasses( { - CutOffTest.class + CutOffTest.class, + UpdateDispositionScheduleTest.class }) public class DispositionTestSuite { diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/UpdateDispositionScheduleTest.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/UpdateDispositionScheduleTest.java new file mode 100644 index 0000000000..06ce6a5399 --- /dev/null +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/UpdateDispositionScheduleTest.java @@ -0,0 +1,164 @@ +/* + * Copyright (C) 2005-2014 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ +package org.alfresco.module.org_alfresco_module_rm.test.integration.disposition; + +import static org.alfresco.module.org_alfresco_module_rm.test.util.bdt.BehaviourTest.test; + +import java.io.Serializable; +import java.util.Date; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import com.google.common.collect.ImmutableMap; + +import org.alfresco.model.ContentModel; +import org.alfresco.module.org_alfresco_module_rm.action.impl.CutOffAction; +import org.alfresco.module.org_alfresco_module_rm.action.impl.DestroyAction; +import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionActionDefinition; +import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionSchedule; +import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService; +import org.alfresco.module.org_alfresco_module_rm.job.publish.DispositionActionDefinitionPublishExecutor; +import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase; +import org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils; +import org.alfresco.module.org_alfresco_module_rm.test.util.bdt.BehaviourTest; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.namespace.QName; +import org.alfresco.util.ApplicationContextHelper; +import org.springframework.extensions.webscripts.GUID; + +/** + * Integration tests for updating the disposition schedule. + * + * @author Tom Page + * @since 2.3.1 + */ +public class UpdateDispositionScheduleTest extends BaseRMTestCase +{ + /** A unique prefix for the constants in this test. */ + protected static final String TEST_PREFIX = UpdateDispositionScheduleTest.class.getName() + GUID.generate() + "_"; + /** The name to use for the category. */ + protected static final String CATEGORY_NAME = TEST_PREFIX + "Category"; + /** The name to use for the folder. */ + protected static final String FOLDER_NAME = TEST_PREFIX + "Folder"; + /** The name to use for the record. */ + protected static final String RECORD_NAME = TEST_PREFIX + "Record"; + + /** The executor for the disposition update job. */ + private DispositionActionDefinitionPublishExecutor dispositionActionDefinitionPublishExecutor; + /** The internal disposition service is used to avoid permissions issues when updating the record. */ + private DispositionService internalDispositionService; + + /** The category node. */ + private NodeRef category; + /** The folder node. */ + private NodeRef folder; + /** The record node. */ + private NodeRef record; + /** The 'disposition as of' date from before the 'when' step. */ + private Date originalAsOfDate; + + @Override + protected void setUp() throws Exception + { + super.setUp(); + + BehaviourTest.initBehaviourTests(retryingTransactionHelper); + + // Get the application context + applicationContext = ApplicationContextHelper.getApplicationContext(getConfigLocations()); + dispositionActionDefinitionPublishExecutor = applicationContext.getBean(DispositionActionDefinitionPublishExecutor.class); + internalDispositionService = (DispositionService) applicationContext.getBean("dispositionService"); + } + + /** + * RM-3386 + *

+     * Given a record subject to a disposition schedule
+     * And the next step is due to run at some period after the date the content was created
+     * When I update the period of the next step (and wait for this to be processed)
+     * Then the "as of" date is updated to be at the new period after the creation date.
+     * 
+ */ + public void testUpdatePeriod() + { + test() + .given(() -> { + // Create a category. + category = filePlanService.createRecordCategory(filePlan, CATEGORY_NAME); + // Create a disposition schedule for the category (Cut off immediately, then Destroy 1 year after the creation date). + DispositionSchedule dispSched = utils.createBasicDispositionSchedule(category, "instructions", "authority", true, false); + Map cutOffParams = ImmutableMap.of(PROP_DISPOSITION_ACTION_NAME, CutOffAction.NAME, + PROP_DISPOSITION_DESCRIPTION, "description", + PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_IMMEDIATELY); + dispositionService.addDispositionActionDefinition(dispSched, cutOffParams); + Map destroyParams = ImmutableMap.of(PROP_DISPOSITION_ACTION_NAME, DestroyAction.NAME, + PROP_DISPOSITION_DESCRIPTION, "description", + PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_ONE_YEAR, + PROP_DISPOSITION_PERIOD_PROPERTY, ContentModel.PROP_CREATED); + dispositionService.addDispositionActionDefinition(dispSched, destroyParams); + // Create a folder containing a record within the category. + folder = recordFolderService.createRecordFolder(category, FOLDER_NAME); + record = fileFolderService.create(folder, RECORD_NAME, ContentModel.TYPE_CONTENT).getNodeRef(); + + dispositionService.cutoffDisposableItem(record); + // Ensure the update has been applied to the record. + internalDispositionService.updateNextDispositionAction(record); + + originalAsOfDate = dispositionService.getNextDispositionAction(record).getAsOfDate(); + }) + .when(() -> { + // Update the Destroy step to be 3 years after the creation date. + DispositionSchedule dispSched = dispositionService.getDispositionSchedule(category); + DispositionActionDefinition destroy = dispSched.getDispositionActionDefinitionByName(DestroyAction.NAME); + Map destroyParams = ImmutableMap.of(PROP_DISPOSITION_ACTION_NAME, DestroyAction.NAME, + PROP_DISPOSITION_DESCRIPTION, "description", + PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_THREE_YEARS, + PROP_DISPOSITION_PERIOD_PROPERTY, ContentModel.PROP_CREATED); + dispositionService.updateDispositionActionDefinition(destroy, destroyParams); + + // Make the disposition action definition update job run. + dispositionActionDefinitionPublishExecutor.publish(destroy.getNodeRef()); + }) + .then() + .expect(true) + .from(() -> aboutTwoYearsApart(originalAsOfDate, dispositionService.getNextDispositionAction(record).getAsOfDate())) + .because("Increasing the destroy period by two years should increase the 'as of' date by two years."); + } + + /** + * Check that the two given dates are approximately two years apart. + *

+ * This actually just checks that they're more than one and less than three years apart, because leap years make + * things hard to calculate. + * + * @return true if the two dates are about two years apart. + */ + private boolean aboutTwoYearsApart(Date start, Date end) + { + long days = daysBetween(start, end); + long yearInDays = 365; + return (yearInDays < days) && (days < 3 * yearInDays); + } + + /** Find the number of days between the two dates. */ + private long daysBetween(Date start, Date end) + { + return TimeUnit.MILLISECONDS.toDays(end.getTime() - start.getTime()); + } +} diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/util/CommonRMTestUtils.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/util/CommonRMTestUtils.java index 3d66cb81bb..275dc089da 100644 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/util/CommonRMTestUtils.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/util/CommonRMTestUtils.java @@ -56,7 +56,7 @@ import org.springframework.context.ApplicationContext; /** * Common RM test utility methods. - * + * * @author Roy Wetherall */ public class CommonRMTestUtils implements RecordsManagementModel @@ -76,10 +76,12 @@ public class CommonRMTestUtils implements RecordsManagementModel public static final String DEFAULT_EVENT_NAME = "case_closed"; public static final String PERIOD_NONE = "none|0"; public static final String PERIOD_IMMEDIATELY = "immediately|0"; + public static final String PERIOD_ONE_YEAR = "year|1"; + public static final String PERIOD_THREE_YEARS = "year|3"; /** * Constructor - * + * * @param applicationContext application context */ public CommonRMTestUtils(ApplicationContext applicationContext) @@ -95,7 +97,7 @@ public class CommonRMTestUtils implements RecordsManagementModel /** * Create a disposition schedule - * + * * @param container record category * @return {@link DispositionSchedule} created disposition schedule node reference */ @@ -129,15 +131,15 @@ public class CommonRMTestUtils implements RecordsManagementModel boolean extendedDispositionSchedule) { return createDispositionSchedule( - container, - dispositionInstructions, - dispositionAuthority, - isRecordLevel, - defaultDispositionActions, - extendedDispositionSchedule, + container, + dispositionInstructions, + dispositionAuthority, + isRecordLevel, + defaultDispositionActions, + extendedDispositionSchedule, DEFAULT_EVENT_NAME); } - + /** * Create test disposition schedule */ @@ -241,8 +243,8 @@ public class CommonRMTestUtils implements RecordsManagementModel modelSecurityService.setEnabled(false); try { - nodeService.setProperty(record, RecordsManagementModel.PROP_DATE_FILED, new Date()); - nodeService.setProperty(record, ContentModel.PROP_TITLE, "titleValue"); + nodeService.setProperty(record, RecordsManagementModel.PROP_DATE_FILED, new Date()); + nodeService.setProperty(record, ContentModel.PROP_TITLE, "titleValue"); actionService.executeRecordsManagementAction(record, "declareRecord"); } finally @@ -255,7 +257,7 @@ public class CommonRMTestUtils implements RecordsManagementModel }, AuthenticationUtil.getAdminUserName()); - } + } public void closeFolder(final NodeRef recordFolder) { @@ -293,10 +295,10 @@ public class CommonRMTestUtils implements RecordsManagementModel return filePlanRoleService.createRole(filePlan, roleName, roleName, capabilities); } - + /** * Helper method to complete event on disposable item - * + * * @param disposableItem disposable item (record or record folder) * @param eventName event name */ @@ -305,8 +307,8 @@ public class CommonRMTestUtils implements RecordsManagementModel // build action properties Map params = new HashMap(1); params.put(CompleteEventAction.PARAM_EVENT_NAME, eventName); - + // complete event - actionService.executeRecordsManagementAction(disposableItem, CompleteEventAction.NAME, params); + actionService.executeRecordsManagementAction(disposableItem, CompleteEventAction.NAME, params); } } From 9a9600c7825403a2770dc55b06f416e7d465988e Mon Sep 17 00:00:00 2001 From: Tom Page Date: Fri, 7 Oct 2016 15:14:36 +0100 Subject: [PATCH 28/28] RM-3386 Fix the logger to be private static final. --- .../disposition/DispositionServiceImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java index 05cb50ba52..2f44f18bbe 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java @@ -68,7 +68,7 @@ public class DispositionServiceImpl extends ServiceBaseImpl RecordsManagementPolicies.OnFileRecord { /** Logger */ - Logger LOGGER = LoggerFactory.getLogger(DispositionServiceImpl.class); + private static final Logger LOGGER = LoggerFactory.getLogger(DispositionServiceImpl.class); /** Behaviour filter */ private BehaviourFilter behaviourFilter;