Merged branch for v2.5 -> master

This commit is contained in:
Tuna Aksoy
2016-10-05 22:05:30 +01:00
parent 524ec7ba84
commit ffc926b37b
4 changed files with 758 additions and 758 deletions

View File

@@ -5,10 +5,10 @@
URL parameter batchsize is mandatory, and represents the maximum number of records that can be processed in one transaction.<br/> URL parameter batchsize is mandatory, and represents the maximum number of records that can be processed in one transaction.<br/>
URL parameter maxProcessedRecords is optional, and represents the maximum number of records that will be processed in one request.<br/> URL parameter maxProcessedRecords is optional, and represents the maximum number of records that will be processed in one request.<br/>
URL parameter export is optional, and if the it's value is true, will export the processed records into a csv file.<br/> URL parameter export is optional, and if the it's value is true, will export the processed records into a csv file.<br/>
URL parameter parentNodeRef is optional, and represents the nodeRef of the folder that contains the records to be processed.<br/> URL parameter parentNodeRef is optional, and represents the nodeRef of the folder that contains the records to be processed.<br/>
]]> ]]>
</description> </description>
<url>/api/rm/rm-dynamicauthorities?batchsize={batchsize}&amp;maxProcessedRecords={maxProcessedRecords?}&amp;export={export?}&amp;parentNodeRef={parentNodeRef?}</url> <url>/api/rm/rm-dynamicauthorities?batchsize={batchsize}&amp;maxProcessedRecords={maxProcessedRecords?}&amp;export={export?}&amp;parentNodeRef={parentNodeRef?}</url>
<format default="json">argument</format> <format default="json">argument</format>
<authentication>admin</authentication> <authentication>admin</authentication>
<transaction allow="readonly">required</transaction> <transaction allow="readonly">required</transaction>

View File

@@ -44,22 +44,22 @@
*/ */
package org.alfresco.repo.web.scripts.roles; package org.alfresco.repo.web.scripts.roles;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.File; import java.io.File;
import java.io.FileWriter; import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter; import java.io.PrintWriter;
import java.io.StringWriter; import java.io.StringWriter;
import java.io.Writer; import java.io.Writer;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServletResponse;
import org.alfresco.model.ContentModel; import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority;
@@ -69,26 +69,26 @@ import org.alfresco.repo.domain.node.NodeDAO;
import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.patch.PatchDAO;
import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.domain.qname.QNameDAO;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.repo.web.scripts.content.ContentStreamer; import org.alfresco.repo.web.scripts.content.ContentStreamer;
import org.alfresco.service.cmr.model.FileFolderService; import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.model.FileInfo;
import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.security.PermissionService; import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.namespace.QName; import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService; import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.Pair; import org.alfresco.util.Pair;
import org.alfresco.util.TempFileProvider; import org.alfresco.util.TempFileProvider;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.AbstractWebScript;
import org.springframework.extensions.webscripts.Cache; import org.springframework.extensions.webscripts.Cache;
import org.springframework.extensions.webscripts.Format; import org.springframework.extensions.webscripts.Format;
import org.springframework.extensions.webscripts.Status; import org.springframework.extensions.webscripts.Status;
import org.springframework.extensions.webscripts.WebScriptException; import org.springframework.extensions.webscripts.WebScriptException;
import org.springframework.extensions.webscripts.WebScriptRequest; import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.extensions.webscripts.WebScriptResponse; import org.springframework.extensions.webscripts.WebScriptResponse;
/** /**
* Webscript used for removing dynamic authorities from the records. * Webscript used for removing dynamic authorities from the records.
@@ -97,7 +97,7 @@ import org.springframework.extensions.webscripts.WebScriptResponse;
* @since 2.3.0.7 * @since 2.3.0.7
*/ */
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel
{ {
private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0."; private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0.";
private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN"; private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN";
@@ -106,7 +106,7 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
private static final String MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE = "Processing record {0} - BEGIN"; private static final String MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE = "Processing record {0} - BEGIN";
private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid."; private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid.";
private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory"; private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory";
private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist."; private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist.";
private static final String SUCCESS_STATUS = "success"; private static final String SUCCESS_STATUS = "success";
/** /**
* The logger * The logger
@@ -114,8 +114,8 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class); private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class);
private static final String BATCH_SIZE = "batchsize"; private static final String BATCH_SIZE = "batchsize";
private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords"; private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords";
private static final String PARAM_EXPORT = "export"; private static final String PARAM_EXPORT = "export";
private static final String PARAM_PARENT_NODE_REF = "parentNodeRef"; private static final String PARAM_PARENT_NODE_REF = "parentNodeRef";
private static final String MODEL_STATUS = "responsestatus"; private static final String MODEL_STATUS = "responsestatus";
private static final String MODEL_MESSAGE = "message"; private static final String MODEL_MESSAGE = "message";
private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records."; private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records.";
@@ -131,60 +131,60 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
private PermissionService permissionService; private PermissionService permissionService;
private ExtendedSecurityService extendedSecurityService; private ExtendedSecurityService extendedSecurityService;
private TransactionService transactionService; private TransactionService transactionService;
/** Content Streamer */ /** Content Streamer */
protected ContentStreamer contentStreamer; protected ContentStreamer contentStreamer;
private FileFolderService fileFolderService; private FileFolderService fileFolderService;
/** service setters */
public void setPatchDAO(PatchDAO patchDAO)
{
this.patchDAO = patchDAO;
}
public void setNodeDAO(NodeDAO nodeDAO) /** service setters */
{ public void setPatchDAO(PatchDAO patchDAO)
this.nodeDAO = nodeDAO; {
} this.patchDAO = patchDAO;
}
public void setQnameDAO(QNameDAO qnameDAO) public void setNodeDAO(NodeDAO nodeDAO)
{ {
this.qnameDAO = qnameDAO; this.nodeDAO = nodeDAO;
} }
public void setNodeService(NodeService nodeService) public void setQnameDAO(QNameDAO qnameDAO)
{ {
this.nodeService = nodeService; this.qnameDAO = qnameDAO;
} }
public void setPermissionService(PermissionService permissionService) public void setNodeService(NodeService nodeService)
{ {
this.permissionService = permissionService; this.nodeService = nodeService;
} }
public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) public void setPermissionService(PermissionService permissionService)
{ {
this.extendedSecurityService = extendedSecurityService; this.permissionService = permissionService;
} }
public void setTransactionService(TransactionService transactionService) public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService)
{
this.transactionService = transactionService;
}
public void setContentStreamer(ContentStreamer contentStreamer)
{ {
this.contentStreamer = contentStreamer; this.extendedSecurityService = extendedSecurityService;
} }
public void setFileFolderService(FileFolderService fileFolderService) public void setTransactionService(TransactionService transactionService)
{ {
this.fileFolderService = fileFolderService; this.transactionService = transactionService;
} }
protected Map<String, Object> buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException public void setContentStreamer(ContentStreamer contentStreamer)
{ {
Map<String, Object> model = new HashMap<String, Object>(); this.contentStreamer = contentStreamer;
final Long batchSize = getBatchSizeParameter(req); }
public void setFileFolderService(FileFolderService fileFolderService)
{
this.fileFolderService = fileFolderService;
}
protected Map<String, Object> buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException
{
Map<String, Object> model = new HashMap<String, Object>();
final Long batchSize = getBatchSizeParameter(req);
// get the max node id and the extended security aspect // get the max node id and the extended security aspect
Long maxNodeId = patchDAO.getMaxAdmNodeID(); Long maxNodeId = patchDAO.getMaxAdmNodeID();
final Pair<Long, QName> recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY); final Pair<Long, QName> recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY);
@@ -196,201 +196,201 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
return model; return model;
} }
Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize); Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize);
boolean attach = getExportParameter(req); boolean attach = getExportParameter(req);
File file = TempFileProvider.createTempFile("processedNodes_", ".csv"); File file = TempFileProvider.createTempFile("processedNodes_", ".csv");
FileWriter writer = new FileWriter(file); FileWriter writer = new FileWriter(file);
BufferedWriter out = new BufferedWriter(writer); BufferedWriter out = new BufferedWriter(writer);
List<NodeRef> processedNodes = new ArrayList<NodeRef>(); List<NodeRef> processedNodes = new ArrayList<NodeRef>();
try try
{ {
NodeRef parentNodeRef = getParentNodeRefParameter(req); NodeRef parentNodeRef = getParentNodeRefParameter(req);
if (parentNodeRef != null) if (parentNodeRef != null)
{ {
processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair, processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair,
totalNumberOfRecordsToProcess.intValue(), out, attach); totalNumberOfRecordsToProcess.intValue(), out, attach);
} }
else else
{ {
processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess, processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess,
out, attach); out, attach);
} }
} }
finally finally
{ {
out.close(); out.close();
} }
int processedNodesSize = processedNodes.size(); int processedNodesSize = processedNodes.size();
String message = ""; String message = "";
if (totalNumberOfRecordsToProcess == 0 if (totalNumberOfRecordsToProcess == 0
|| (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess)) || (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess))
{ {
message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize); message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize);
} }
if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize) if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize)
{ {
message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess); message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess);
} }
model.put(MODEL_STATUS, SUCCESS_STATUS); model.put(MODEL_STATUS, SUCCESS_STATUS);
model.put(MODEL_MESSAGE, message); model.put(MODEL_MESSAGE, message);
logger.info(message); logger.info(message);
if (attach) if (attach)
{ {
try try
{ {
String fileName = file.getName(); String fileName = file.getName();
contentStreamer.streamContent(req, res, file, null, attach, fileName, model); contentStreamer.streamContent(req, res, file, null, attach, fileName, model);
model = null; model = null;
} }
finally finally
{ {
if (file != null) if (file != null)
{ {
file.delete(); file.delete();
} }
} }
} }
return model; return model;
} }
/** /**
* Get export parameter from the request * Get export parameter from the request
* *
* @param req * @param req
* @return * @return
*/ */
protected boolean getExportParameter(WebScriptRequest req) protected boolean getExportParameter(WebScriptRequest req)
{ {
boolean attach = false; boolean attach = false;
String export = req.getParameter(PARAM_EXPORT); String export = req.getParameter(PARAM_EXPORT);
if (export != null && Boolean.parseBoolean(export)) if (export != null && Boolean.parseBoolean(export))
{ {
attach = true; attach = true;
} }
return attach; return attach;
} }
/* /*
* (non-Javadoc) * (non-Javadoc)
* @see org.alfresco.repo.web.scripts.content.StreamContent#execute(org.springframework.extensions.webscripts. * @see org.alfresco.repo.web.scripts.content.StreamContent#execute(org.springframework.extensions.webscripts.
* WebScriptRequest, org.springframework.extensions.webscripts.WebScriptResponse) * WebScriptRequest, org.springframework.extensions.webscripts.WebScriptResponse)
*/ */
@Override @Override
public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException
{ {
// retrieve requested format // retrieve requested format
String format = req.getFormat(); String format = req.getFormat();
try try
{ {
String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format); String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format);
if (mimetype == null) if (mimetype == null)
{ {
throw new WebScriptException("Web Script format '" + format + "' is not registered"); throw new WebScriptException("Web Script format '" + format + "' is not registered");
} }
// construct model for script / template // construct model for script / template
Status status = new Status(); Status status = new Status();
Cache cache = new Cache(getDescription().getRequiredCache()); Cache cache = new Cache(getDescription().getRequiredCache());
Map<String, Object> model = buildModel(req, res); Map<String, Object> model = buildModel(req, res);
if (model == null) { return; } if (model == null) { return; }
model.put("status", status); model.put("status", status);
model.put("cache", cache); model.put("cache", cache);
Map<String, Object> templateModel = createTemplateParameters(req, res, model); Map<String, Object> templateModel = createTemplateParameters(req, res, model);
// render output // render output
int statusCode = status.getCode(); int statusCode = status.getCode();
if (statusCode != HttpServletResponse.SC_OK && !req.forceSuccessStatus()) if (statusCode != HttpServletResponse.SC_OK && !req.forceSuccessStatus())
{ {
if (logger.isDebugEnabled()) if (logger.isDebugEnabled())
{ {
logger.debug("Force success status header in response: " + req.forceSuccessStatus()); logger.debug("Force success status header in response: " + req.forceSuccessStatus());
logger.debug("Setting status " + statusCode); logger.debug("Setting status " + statusCode);
} }
res.setStatus(statusCode); res.setStatus(statusCode);
} }
// apply location // apply location
String location = status.getLocation(); String location = status.getLocation();
if (location != null && location.length() > 0) if (location != null && location.length() > 0)
{ {
if (logger.isDebugEnabled()) logger.debug("Setting location to " + location); if (logger.isDebugEnabled()) logger.debug("Setting location to " + location);
res.setHeader(WebScriptResponse.HEADER_LOCATION, location); res.setHeader(WebScriptResponse.HEADER_LOCATION, location);
} }
// apply cache // apply cache
res.setCache(cache); res.setCache(cache);
String callback = null; String callback = null;
if (getContainer().allowCallbacks()) if (getContainer().allowCallbacks())
{ {
callback = req.getJSONCallback(); callback = req.getJSONCallback();
} }
if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null)
{ {
if (logger.isDebugEnabled()) logger.debug("Rendering JSON callback response: content type=" if (logger.isDebugEnabled()) logger.debug("Rendering JSON callback response: content type="
+ Format.JAVASCRIPT.mimetype() + ", status=" + statusCode + ", callback=" + callback); + Format.JAVASCRIPT.mimetype() + ", status=" + statusCode + ", callback=" + callback);
// NOTE: special case for wrapping JSON results in a javascript function callback // NOTE: special case for wrapping JSON results in a javascript function callback
res.setContentType(Format.JAVASCRIPT.mimetype() + ";charset=UTF-8"); res.setContentType(Format.JAVASCRIPT.mimetype() + ";charset=UTF-8");
res.getWriter().write((callback + "(")); res.getWriter().write((callback + "("));
} }
else else
{ {
if (logger.isDebugEnabled()) if (logger.isDebugEnabled())
logger.debug("Rendering response: content type=" + mimetype + ", status=" + statusCode); logger.debug("Rendering response: content type=" + mimetype + ", status=" + statusCode);
res.setContentType(mimetype + ";charset=UTF-8"); res.setContentType(mimetype + ";charset=UTF-8");
} }
// render response according to requested format // render response according to requested format
renderFormatTemplate(format, templateModel, res.getWriter()); renderFormatTemplate(format, templateModel, res.getWriter());
if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null)
{ {
// NOTE: special case for wrapping JSON results in a javascript function callback // NOTE: special case for wrapping JSON results in a javascript function callback
res.getWriter().write(")"); res.getWriter().write(")");
} }
} }
catch (Throwable e) catch (Throwable e)
{ {
if (logger.isDebugEnabled()) if (logger.isDebugEnabled())
{ {
StringWriter stack = new StringWriter(); StringWriter stack = new StringWriter();
e.printStackTrace(new PrintWriter(stack)); e.printStackTrace(new PrintWriter(stack));
logger.debug("Caught exception; decorating with appropriate status template : " + stack.toString()); logger.debug("Caught exception; decorating with appropriate status template : " + stack.toString());
} }
throw createStatusException(e, req, res); throw createStatusException(e, req, res);
} }
} }
protected void renderFormatTemplate(String format, Map<String, Object> model, Writer writer) protected void renderFormatTemplate(String format, Map<String, Object> model, Writer writer)
{ {
format = (format == null) ? "" : format; format = (format == null) ? "" : format;
String templatePath = getDescription().getId() + "." + format; String templatePath = getDescription().getId() + "." + format;
if (logger.isDebugEnabled()) logger.debug("Rendering template '" + templatePath + "'"); if (logger.isDebugEnabled()) logger.debug("Rendering template '" + templatePath + "'");
renderTemplate(templatePath, model, writer); renderTemplate(templatePath, model, writer);
} }
/** /**
* Obtain maximum of the records to be processed from the request if it is specified or bachsize value otherwise * Obtain maximum of the records to be processed from the request if it is specified or bachsize value otherwise
* *
* @param req * @param req
* @return maximum of the records to be processed from the request if it is specified or bachsize value otherwise * @return maximum of the records to be processed from the request if it is specified or bachsize value otherwise
*/ */
protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize) protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize)
{ {
String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS);
//default total number of records to be processed to batch size value //default total number of records to be processed to batch size value
Long totalNumberOfRecordsToProcess = batchSize; Long totalNumberOfRecordsToProcess = batchSize;
if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr)) if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr))
@@ -404,77 +404,77 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
//do nothing here, the value will remain 0L in this case //do nothing here, the value will remain 0L in this case
} }
} }
return totalNumberOfRecordsToProcess; return totalNumberOfRecordsToProcess;
} }
/**
* Obtain batchsize parameter from the request.
*
* @param req
* @return batchsize parameter from the request
*/
protected Long getBatchSizeParameter(WebScriptRequest req)
{
String batchSizeStr = req.getParameter(BATCH_SIZE);
Long size = 0L;
if (StringUtils.isBlank(batchSizeStr))
{
logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_MANDATORY);
}
try
{
size = Long.parseLong(batchSizeStr);
if (size <= 0)
{
logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
}
}
catch (NumberFormatException ex)
{
logger.info(MESSAGE_BATCHSIZE_IS_INVALID);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_INVALID);
}
return size;
}
/** /**
* Get parentNodeRef parameter from the request * Obtain batchsize parameter from the request.
* *
* @param req * @param req
* @return * @return batchsize parameter from the request
*/ */
protected NodeRef getParentNodeRefParameter(WebScriptRequest req) protected Long getBatchSizeParameter(WebScriptRequest req)
{ {
String parentNodeRefStr = req.getParameter(PARAM_PARENT_NODE_REF); String batchSizeStr = req.getParameter(BATCH_SIZE);
NodeRef parentNodeRef = null; Long size = 0L;
if (StringUtils.isNotBlank(parentNodeRefStr)) if (StringUtils.isBlank(batchSizeStr))
{ {
parentNodeRef = new NodeRef(parentNodeRefStr); logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY);
if(!nodeService.exists(parentNodeRef)) throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_MANDATORY);
{ }
String message = MessageFormat.format(MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE, parentNodeRef.toString()); try
logger.info(message); {
throw new WebScriptException(Status.STATUS_BAD_REQUEST, message); size = Long.parseLong(batchSizeStr);
} if (size <= 0)
} {
return parentNodeRef; logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
} throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
}
/** }
* Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess catch (NumberFormatException ex)
* parameters {
* logger.info(MESSAGE_BATCHSIZE_IS_INVALID);
* @param batchSize throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_INVALID);
* @param maxNodeId }
* @param recordAspectPair return size;
* @param totalNumberOfRecordsToProcess }
* @return the list of processed nodes
*/ /**
protected List<NodeRef> processNodes(final Long batchSize, Long maxNodeId, final Pair<Long, QName> recordAspectPair, * Get parentNodeRef parameter from the request
Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach) *
{ * @param req
* @return
*/
protected NodeRef getParentNodeRefParameter(WebScriptRequest req)
{
String parentNodeRefStr = req.getParameter(PARAM_PARENT_NODE_REF);
NodeRef parentNodeRef = null;
if (StringUtils.isNotBlank(parentNodeRefStr))
{
parentNodeRef = new NodeRef(parentNodeRefStr);
if(!nodeService.exists(parentNodeRef))
{
String message = MessageFormat.format(MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE, parentNodeRef.toString());
logger.info(message);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, message);
}
}
return parentNodeRef;
}
/**
* Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess
* parameters
*
* @param batchSize
* @param maxNodeId
* @param recordAspectPair
* @param totalNumberOfRecordsToProcess
* @return the list of processed nodes
*/
protected List<NodeRef> processNodes(final Long batchSize, Long maxNodeId, final Pair<Long, QName> recordAspectPair,
Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach)
{
final Long maxRecordsToProcess = totalNumberOfRecordsToProcess; final Long maxRecordsToProcess = totalNumberOfRecordsToProcess;
final List<NodeRef> processedNodes = new ArrayList<NodeRef>(); final List<NodeRef> processedNodes = new ArrayList<NodeRef>();
logger.info(MESSAGE_PROCESSING_BEGIN); logger.info(MESSAGE_PROCESSING_BEGIN);
@@ -492,8 +492,8 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
public Void execute() throws Throwable public Void execute() throws Throwable
{ {
// get the nodes with the extended security aspect applied // get the nodes with the extended security aspect applied
List<Long> nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, List<Long> nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex,
currentIndex + batchSize); currentIndex + batchSize);
// process each one // process each one
for (Long nodeId : nodeIds) for (Long nodeId : nodeIds)
@@ -508,79 +508,79 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
processNode(record); processNode(record);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName));
processedNodes.add(record); processedNodes.add(record);
if (attach) if (attach)
{ {
out.write(recordName); out.write(recordName);
out.write(","); out.write(",");
out.write(record.toString()); out.write(record.toString());
out.write("\n"); out.write("\n");
} }
} }
return null; return null;
} }
}, false, // read only }, false, // read only
true); // requires new true); // requires new
} }
logger.info(MESSAGE_PROCESSING_END); logger.info(MESSAGE_PROCESSING_END);
return processedNodes; return processedNodes;
}
protected List<NodeRef> processChildrenNodes(NodeRef parentNodeRef, final int batchSize,
final Pair<Long, QName> recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out,
final boolean attach)
{
final List<NodeRef> processedNodes = new ArrayList<NodeRef>();
final List<FileInfo> children = fileFolderService.search(parentNodeRef, "*", /*filesSearch*/true, /*folderSearch*/true, /*includeSubfolders*/true);
logger.info(MESSAGE_PROCESSING_BEGIN);
// by batch size
for (int i = 0; i < children.size(); i += batchSize)
{
if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess)
{
break;
}
final int currentIndex = i;
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>()
{
public Void execute() throws Throwable
{
List<FileInfo> nodes = children.subList(currentIndex, Math.min(currentIndex + batchSize, children.size()));
// process each one
for (FileInfo node : nodes)
{
if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess)
{
break;
}
NodeRef record = node.getNodeRef();
if (nodeService.hasAspect(record, recordAspectPair.getSecond()))
{
String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName));
processNode(record);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName));
processedNodes.add(record);
if (attach)
{
out.write(recordName);
out.write(",");
out.write(record.toString());
out.write("\n");
}
}
}
return null;
}
}, false, // read only
true); // requires new
}
logger.info(MESSAGE_PROCESSING_END);
return processedNodes;
} }
protected List<NodeRef> processChildrenNodes(NodeRef parentNodeRef, final int batchSize,
final Pair<Long, QName> recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out,
final boolean attach)
{
final List<NodeRef> processedNodes = new ArrayList<NodeRef>();
final List<FileInfo> children = fileFolderService.search(parentNodeRef, "*", /*filesSearch*/true, /*folderSearch*/true, /*includeSubfolders*/true);
logger.info(MESSAGE_PROCESSING_BEGIN);
// by batch size
for (int i = 0; i < children.size(); i += batchSize)
{
if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess)
{
break;
}
final int currentIndex = i;
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>()
{
public Void execute() throws Throwable
{
List<FileInfo> nodes = children.subList(currentIndex, Math.min(currentIndex + batchSize, children.size()));
// process each one
for (FileInfo node : nodes)
{
if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess)
{
break;
}
NodeRef record = node.getNodeRef();
if (nodeService.hasAspect(record, recordAspectPair.getSecond()))
{
String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName));
processNode(record);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName));
processedNodes.add(record);
if (attach)
{
out.write(recordName);
out.write(",");
out.write(record.toString());
out.write("\n");
}
}
}
return null;
}
}, false, // read only
true); // requires new
}
logger.info(MESSAGE_PROCESSING_END);
return processedNodes;
}
/** /**
* Process each node * Process each node
* *
@@ -601,20 +601,20 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
permissionService.clearPermission(nodeRef, ExtendedWriterDynamicAuthority.EXTENDED_WRITER); permissionService.clearPermission(nodeRef, ExtendedWriterDynamicAuthority.EXTENDED_WRITER);
// if record then ... // if record then ...
if (nodeService.hasAspect(nodeRef, ASPECT_RECORD)) if (nodeService.hasAspect(nodeRef, ASPECT_RECORD))
{
Set<String> readersKeySet = null;
if (readers != null)
{ {
readersKeySet = readers.keySet(); Set<String> readersKeySet = null;
} if (readers != null)
Set<String> writersKeySet = null; {
if (writers != null) readersKeySet = readers.keySet();
{ }
writersKeySet = writers.keySet(); Set<String> writersKeySet = null;
} if (writers != null)
{
writersKeySet = writers.keySet();
}
// re-set extended security via API // re-set extended security via API
extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet); extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet);
} }
} }
} }

View File

@@ -46,7 +46,7 @@ import org.json.JSONObject;
import org.mockito.invocation.InvocationOnMock; import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer; import org.mockito.stubbing.Answer;
import org.springframework.extensions.surf.util.Content; import org.springframework.extensions.surf.util.Content;
import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.AbstractWebScript;
import org.springframework.extensions.webscripts.Container; import org.springframework.extensions.webscripts.Container;
import org.springframework.extensions.webscripts.Description; import org.springframework.extensions.webscripts.Description;
import org.springframework.extensions.webscripts.Description.RequiredCache; import org.springframework.extensions.webscripts.Description.RequiredCache;
@@ -83,7 +83,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest
/** /**
* @return declarative webscript * @return declarative webscript
*/ */
protected abstract AbstractWebScript getWebScript(); protected abstract AbstractWebScript getWebScript();
/** /**
* @return classpath location of webscript template * @return classpath location of webscript template
@@ -151,7 +151,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest
*/ */
protected String executeWebScript(Map<String, String> parameters, String content) throws Exception protected String executeWebScript(Map<String, String> parameters, String content) throws Exception
{ {
AbstractWebScript webScript = getWebScript(); AbstractWebScript webScript = getWebScript();
String template = getWebScriptTemplate(); String template = getWebScriptTemplate();
// initialise webscript // initialise webscript
@@ -173,7 +173,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest
* @return {@link WebScriptRequest} mocked web script request * @return {@link WebScriptRequest} mocked web script request
*/ */
@SuppressWarnings("rawtypes") @SuppressWarnings("rawtypes")
protected WebScriptRequest getMockedWebScriptRequest(AbstractWebScript webScript, final Map<String, String> parameters, String content) throws Exception protected WebScriptRequest getMockedWebScriptRequest(AbstractWebScript webScript, final Map<String, String> parameters, String content) throws Exception
{ {
Match match = new Match(null, parameters, null, webScript); Match match = new Match(null, parameters, null, webScript);
org.springframework.extensions.webscripts.Runtime mockedRuntime = mock(org.springframework.extensions.webscripts.Runtime.class); org.springframework.extensions.webscripts.Runtime mockedRuntime = mock(org.springframework.extensions.webscripts.Runtime.class);

View File

@@ -49,14 +49,14 @@ import static java.util.Collections.emptyMap;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import static org.mockito.Matchers.any; import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyLong; import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.eq; import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never; import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times; import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
@@ -64,7 +64,7 @@ import static org.mockito.Mockito.when;
import java.io.File; import java.io.File;
import java.io.Serializable; import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@@ -87,9 +87,9 @@ import org.alfresco.repo.domain.patch.PatchDAO;
import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.domain.qname.QNameDAO;
import org.alfresco.repo.transaction.RetryingTransactionHelper; import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.repo.web.scripts.content.ContentStreamer; import org.alfresco.repo.web.scripts.content.ContentStreamer;
import org.alfresco.service.cmr.model.FileFolderService; import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.model.FileInfo;
import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.security.PermissionService; import org.alfresco.service.cmr.security.PermissionService;
@@ -105,11 +105,11 @@ import org.mockito.Mock;
import org.mockito.MockitoAnnotations; import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock; import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer; import org.mockito.stubbing.Answer;
import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.AbstractWebScript;
import org.springframework.extensions.webscripts.Status; import org.springframework.extensions.webscripts.Status;
import org.springframework.extensions.webscripts.WebScriptException; import org.springframework.extensions.webscripts.WebScriptException;
import org.springframework.extensions.webscripts.WebScriptRequest; import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.extensions.webscripts.WebScriptResponse; import org.springframework.extensions.webscripts.WebScriptResponse;
/** /**
* DynamicAuthoritiesGet Unit Test * DynamicAuthoritiesGet Unit Test
@@ -140,17 +140,17 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
private TransactionService mockedTransactionService; private TransactionService mockedTransactionService;
@Mock @Mock
private RetryingTransactionHelper mockedRetryingTransactionHelper; private RetryingTransactionHelper mockedRetryingTransactionHelper;
@Mock @Mock
private ContentStreamer contentStreamer; private ContentStreamer contentStreamer;
@Mock @Mock
private FileFolderService mockedFileFolderService; private FileFolderService mockedFileFolderService;
/** test component */ /** test component */
@InjectMocks @InjectMocks
private DynamicAuthoritiesGet webScript; private DynamicAuthoritiesGet webScript;
@Override @Override
protected AbstractWebScript getWebScript() protected AbstractWebScript getWebScript()
{ {
return webScript; return webScript;
} }
@@ -172,7 +172,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
webScript.setNodeService(mockedNodeService); webScript.setNodeService(mockedNodeService);
webScript.setPermissionService(mockedPermissionService); webScript.setPermissionService(mockedPermissionService);
webScript.setExtendedSecurityService(mockedExtendedSecurityService); webScript.setExtendedSecurityService(mockedExtendedSecurityService);
webScript.setFileFolderService(mockedFileFolderService); webScript.setFileFolderService(mockedFileFolderService);
// setup retrying transaction helper // setup retrying transaction helper
Answer<Object> doInTransactionAnswer = new Answer<Object>() Answer<Object> doInTransactionAnswer = new Answer<Object>()
{ {
@@ -200,7 +200,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
/** /**
* Given that there are no nodes with the extended security aspect * Given that there are no nodes with the extended security aspect
* When the action is executed Nothing happens * When the action is executed Nothing happens
* *
* @throws Exception * @throws Exception
*/ */
@SuppressWarnings({ "unchecked" }) @SuppressWarnings({ "unchecked" })
@@ -237,7 +237,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
* Then the aspect is removed * Then the aspect is removed
* And the dynamic authorities permissions are cleared * And the dynamic authorities permissions are cleared
* And extended security is set via the updated API * And extended security is set via the updated API
* *
* @throws Exception * @throws Exception
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@@ -246,7 +246,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
{ {
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList()); .thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> { ids.stream().forEach((i) -> {
@@ -282,8 +282,8 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
/** /**
* Given that there are non-records with the extended security aspect * Given that there are non-records with the extended security aspect
* When the web script is executed * When the web script is executed
* Then the aspect is removed And the dynamic authorities permissions are cleared * Then the aspect is removed And the dynamic authorities permissions are cleared
* *
* @throws Exception * @throws Exception
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@@ -292,7 +292,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
{ {
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList()); .thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> { ids.stream().forEach((i) -> {
@@ -328,50 +328,50 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
@Test @Test
public void missingBatchSizeParameter() throws Exception public void missingBatchSizeParameter() throws Exception
{ {
try try
{ {
executeJSONWebScript(emptyMap()); executeJSONWebScript(emptyMap());
fail("Expected exception as parameter batchsize is mandatory."); fail("Expected exception as parameter batchsize is mandatory.");
} }
catch (WebScriptException e) catch (WebScriptException e)
{ {
assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.", assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus()); Status.STATUS_BAD_REQUEST, e.getStatus());
} }
} }
@Test @Test
public void invalidBatchSizeParameter() throws Exception public void invalidBatchSizeParameter() throws Exception
{ {
try try
{ {
// Set up parameters. // Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "dd"); Map<String, String> parameters = ImmutableMap.of("batchsize", "dd");
executeJSONWebScript(parameters); executeJSONWebScript(parameters);
fail("Expected exception as parameter batchsize is invalid."); fail("Expected exception as parameter batchsize is invalid.");
} }
catch (WebScriptException e) catch (WebScriptException e)
{ {
assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.", assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus()); Status.STATUS_BAD_REQUEST, e.getStatus());
} }
} }
@Test @Test
public void batchSizeShouldBeGraterThanZero() throws Exception public void batchSizeShouldBeGraterThanZero() throws Exception
{ {
try try
{ {
// Set up parameters. // Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "0"); Map<String, String> parameters = ImmutableMap.of("batchsize", "0");
executeJSONWebScript(parameters); executeJSONWebScript(parameters);
fail("Expected exception as parameter batchsize is not a number greater than 0."); fail("Expected exception as parameter batchsize is not a number greater than 0.");
} }
catch (WebScriptException e) catch (WebScriptException e)
{ {
assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.", assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus()); Status.STATUS_BAD_REQUEST, e.getStatus());
} }
} }
@Test @Test
@@ -393,7 +393,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
{ {
List<Long> ids = Stream.of(1l, 2l, 3l,4l).collect(Collectors.toList()); List<Long> ids = Stream.of(1l, 2l, 3l,4l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList()); .thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> { ids.stream().forEach((i) -> {
@@ -421,7 +421,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
{ {
List<Long> ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList()); List<Long> ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList()); .thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> { ids.stream().forEach((i) -> {
@@ -443,302 +443,302 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}"; String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
} }
@SuppressWarnings({ "unchecked", "rawtypes" }) @SuppressWarnings({ "unchecked", "rawtypes" })
@Test @Test
public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception
{ {
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList()); .thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> { ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef)); when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS)).thenReturn(null); when(mockedNodeService.getProperty(nodeRef, PROP_READERS)).thenReturn(null);
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null);
}); });
// Set up parameters. // Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4");
JSONObject json = executeJSONWebScript(parameters); JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json); assertNotNull(json);
String actualJSONString = json.toString(); String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
ArgumentCaptor<Set> readerKeysCaptor = ArgumentCaptor.forClass(Set.class); ArgumentCaptor<Set> readerKeysCaptor = ArgumentCaptor.forClass(Set.class);
ArgumentCaptor<Set> writersKeysCaptor = ArgumentCaptor.forClass(Set.class); ArgumentCaptor<Set> writersKeysCaptor = ArgumentCaptor.forClass(Set.class);
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS));
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS));
verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); eq(ExtendedReaderDynamicAuthority.EXTENDED_READER));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER));
verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(),
writersKeysCaptor.capture()); writersKeysCaptor.capture());
List<Set> allReaderKeySets = readerKeysCaptor.getAllValues(); List<Set> allReaderKeySets = readerKeysCaptor.getAllValues();
List<Set> allWritersKeySets = writersKeysCaptor.getAllValues(); List<Set> allWritersKeySets = writersKeysCaptor.getAllValues();
for (Set keySet : allReaderKeySets) for (Set keySet : allReaderKeySets)
{ {
assertNull(keySet); assertNull(keySet);
} }
for (Set keySet : allWritersKeySets) for (Set keySet : allWritersKeySets)
{ {
assertNull(keySet); assertNull(keySet);
} }
} }
@SuppressWarnings({ "unchecked", "rawtypes" }) @SuppressWarnings({ "unchecked", "rawtypes" })
@Test @Test
public void recordsWithExtendedSecurityAspectAndNullWriters() throws Exception public void recordsWithExtendedSecurityAspectAndNullWriters() throws Exception
{ {
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList()); .thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> { ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef)); when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap()); .thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null); when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null);
}); });
// Set up parameters. // Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4"); Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4");
JSONObject json = executeJSONWebScript(parameters); JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json); assertNotNull(json);
String actualJSONString = json.toString(); String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
ArgumentCaptor<Set> readerKeysCaptor = ArgumentCaptor.forClass(Set.class); ArgumentCaptor<Set> readerKeysCaptor = ArgumentCaptor.forClass(Set.class);
ArgumentCaptor<Set> writersKeysCaptor = ArgumentCaptor.forClass(Set.class); ArgumentCaptor<Set> writersKeysCaptor = ArgumentCaptor.forClass(Set.class);
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS)); verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS));
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS)); verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS));
verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY)); verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedReaderDynamicAuthority.EXTENDED_READER)); eq(ExtendedReaderDynamicAuthority.EXTENDED_READER));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class), verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER)); eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER));
verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(), verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(),
writersKeysCaptor.capture()); writersKeysCaptor.capture());
List<Set> allReaderKeySets = readerKeysCaptor.getAllValues(); List<Set> allReaderKeySets = readerKeysCaptor.getAllValues();
List<Set> allWritersKeySets = writersKeysCaptor.getAllValues(); List<Set> allWritersKeySets = writersKeysCaptor.getAllValues();
for (Set keySet : allReaderKeySets) for (Set keySet : allReaderKeySets)
{ {
assertNotNull(keySet); assertNotNull(keySet);
} }
for (Set keySet : allWritersKeySets) for (Set keySet : allWritersKeySets)
{ {
assertNull(keySet); assertNull(keySet);
} }
} }
/** /**
* Given I have records that require migration * Given I have records that require migration
* And I am interested in knowning which records are migrated * And I am interested in knowning which records are migrated
* When I run the migration tool * When I run the migration tool
* Then I will be returned a CSV file containing the name and node reference of the record migrated * Then I will be returned a CSV file containing the name and node reference of the record migrated
* *
* @throws Exception * @throws Exception
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@Test @Test
public void processWithCSVFile() throws Exception public void processWithCSVFile() throws Exception
{ {
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList()); .thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> { ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef)); when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap()); .thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap()); .thenReturn((Serializable) Collections.emptyMap());
String name = "name" + i; String name = "name" + i;
when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name);
}); });
ArgumentCaptor<File> csvFileCaptor = ArgumentCaptor.forClass(File.class); ArgumentCaptor<File> csvFileCaptor = ArgumentCaptor.forClass(File.class);
// Set up parameters. // Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export",
"true"); "true");
executeWebScript(parameters); executeWebScript(parameters);
verify(contentStreamer, times(1)).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), verify(contentStreamer, times(1)).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
csvFileCaptor.capture(), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); csvFileCaptor.capture(), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
File fileForDownload = csvFileCaptor.getValue(); File fileForDownload = csvFileCaptor.getValue();
assertNotNull(fileForDownload); assertNotNull(fileForDownload);
} }
/** /**
* Given that I have record that require migration * Given that I have record that require migration
* And I'm not interested in knowing which records were migrated * And I'm not interested in knowing which records were migrated
* When I run the migration tool * When I run the migration tool
* Then I will not be returned a CSV file of details. * Then I will not be returned a CSV file of details.
* *
* @throws Exception * @throws Exception
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@Test @Test
public void processedWithouthCSVFile() throws Exception public void processedWithouthCSVFile() throws Exception
{ {
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids) when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList()); .thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> { ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef)); when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap()); .thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap()); .thenReturn((Serializable) Collections.emptyMap());
}); });
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export", Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export",
"false"); "false");
JSONObject json = executeJSONWebScript(parameters); JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json); assertNotNull(json);
String actualJSONString = json.toString(); String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
} }
@Test @Test
public void invalidParentNodeRefParameter() throws Exception public void invalidParentNodeRefParameter() throws Exception
{ {
try try
{ {
// Set up parameters. // Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", "invalidNodeRef"); Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", "invalidNodeRef");
executeJSONWebScript(parameters); executeJSONWebScript(parameters);
fail("Expected exception as parameter parentNodeRef is invalid."); fail("Expected exception as parameter parentNodeRef is invalid.");
} }
catch (WebScriptException e) catch (WebScriptException e)
{ {
assertEquals("If parameter parentNodeRef is invalid then 'Internal server error' should be returned.", assertEquals("If parameter parentNodeRef is invalid then 'Internal server error' should be returned.",
Status.STATUS_INTERNAL_SERVER_ERROR, e.getStatus()); Status.STATUS_INTERNAL_SERVER_ERROR, e.getStatus());
} }
} }
@Test @Test
public void inexistentParentNodeRefParameter() throws Exception public void inexistentParentNodeRefParameter() throws Exception
{ {
try try
{ {
NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeService.exists(parentNodeRef)).thenReturn(false); when(mockedNodeService.exists(parentNodeRef)).thenReturn(false);
// Set up parameters. // Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef",
parentNodeRef.toString()); parentNodeRef.toString());
executeJSONWebScript(parameters); executeJSONWebScript(parameters);
fail("Expected exception as parameter parentNodeRef does not exist."); fail("Expected exception as parameter parentNodeRef does not exist.");
} }
catch (WebScriptException e) catch (WebScriptException e)
{ {
assertEquals("If parameter parentNodeRef is does not exist then 'Bad Reequest' should be returned.", assertEquals("If parameter parentNodeRef is does not exist then 'Bad Reequest' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus()); Status.STATUS_BAD_REQUEST, e.getStatus());
} }
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@Test @Test
public void processedWithParentNodeRef() throws Exception public void processedWithParentNodeRef() throws Exception
{ {
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList()); List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService);
List<FileInfo> children = new ArrayList<FileInfo>(); List<FileInfo> children = new ArrayList<FileInfo>();
ids.stream().forEach((i) -> { ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap()); .thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap()); .thenReturn((Serializable) Collections.emptyMap());
String name = "name" + i; String name = "name" + i;
when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name);
FileInfo mockedFileInfo = mock(FileInfo.class); FileInfo mockedFileInfo = mock(FileInfo.class);
when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef);
children.add(mockedFileInfo); children.add(mockedFileInfo);
}); });
when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true)))
.thenReturn(children); .thenReturn(children);
Map<String, String> parameters = ImmutableMap.of("batchsize", "3", "maxProcessedRecords", "4", "export", Map<String, String> parameters = ImmutableMap.of("batchsize", "3", "maxProcessedRecords", "4", "export",
"false", "parentNodeRef", parentNodeRef.toString()); "false", "parentNodeRef", parentNodeRef.toString());
JSONObject json = executeJSONWebScript(parameters); JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json); assertNotNull(json);
String actualJSONString = json.toString(); String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}"; String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@Test @Test
public void processedWithParentNodeRefWithFirstTwoBatchesAlreadyProcessed() throws Exception public void processedWithParentNodeRefWithFirstTwoBatchesAlreadyProcessed() throws Exception
{ {
List<Long> ids = Stream.of(1l, 2l, 3l, 4l, 5l, 6l, 7l, 8l).collect(Collectors.toList()); List<Long> ids = Stream.of(1l, 2l, 3l, 4l, 5l, 6l, 7l, 8l).collect(Collectors.toList());
NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService); NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService);
List<FileInfo> children = new ArrayList<FileInfo>(); List<FileInfo> children = new ArrayList<FileInfo>();
ids.stream().forEach((i) -> { ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService); NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true); when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
if (i <= 6l) if (i <= 6l)
{ {
when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(false); when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(false);
} }
else else
{ {
when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true); when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true);
} }
when(mockedNodeService.getProperty(nodeRef, PROP_READERS)) when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap()); .thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)) when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap()); .thenReturn((Serializable) Collections.emptyMap());
String name = "name" + i; String name = "name" + i;
when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name); when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name);
FileInfo mockedFileInfo = mock(FileInfo.class); FileInfo mockedFileInfo = mock(FileInfo.class);
when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef); when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef);
children.add(mockedFileInfo); children.add(mockedFileInfo);
}); });
when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true))) when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true)))
.thenReturn(children); .thenReturn(children);
Map<String, String> parameters = ImmutableMap.of("batchsize", "3", "parentNodeRef", parentNodeRef.toString()); Map<String, String> parameters = ImmutableMap.of("batchsize", "3", "parentNodeRef", parentNodeRef.toString());
JSONObject json = executeJSONWebScript(parameters); JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json); assertNotNull(json);
String actualJSONString = json.toString(); String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 2 records.\"}"; String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 2 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString)); assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class), verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class)); any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
} }
} }