Merge branch 'release/V2.3' into merge-2.4/MergeV23ToV24_101016_2

# Conflicts:
#	rm-community/rm-community-repo/config/alfresco/module/org_alfresco_module_rm/rm-action-context.xml
#	rm-community/rm-community-repo/test/java/org/alfresco/module/org_alfresco_module_rm/test/util/CommonRMTestUtils.java
#	rm-community/rm-community-repo/unit-test/java/org/alfresco/module/org_alfresco_module_rm/test/util/BaseWebScriptUnitTest.java
#	rm-community/rm-community-share/config/alfresco/site-webscripts/org/alfresco/rm/components/audit/audit.get.html.ftl
#	rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java
#	rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/DispositionTestSuite.java
#	rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/IssueTestSuite.java
#	rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/record/RecordTestSuite.java
#	rm-server/unit-test/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGetUnitTest.java
This commit is contained in:
Tuna Aksoy
2016-10-10 20:49:13 +01:00
18 changed files with 1307 additions and 685 deletions

View File

@@ -0,0 +1,103 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.module.org_alfresco_module_rm.action.impl;
import static org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel.PROP_DISPOSITION_AS_OF;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Date;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionAction;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionActionDefinition;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.junit.Before;
import org.junit.Test;
/**
* Unit tests for {@link BroadcastDispositionActionDefinitionUpdateAction}.
*
* @author Tom Page
* @since 2.3.1
*/
public class BroadcastDispositionActionDefinitionUpdateActionUnitTest
{
/** The node under the category containing information about the definition of the action. */
private static final NodeRef DISPOSITION_ACTION_DEF_NODE = new NodeRef("disposition://Action/Def");
/** The node containing the details of the next disposition step for the content. */
private static final NodeRef NEXT_ACTION_NODE_REF = new NodeRef("next://Step/");
/** The node being subject to the disposition step. */
private static final NodeRef CONTENT_NODE_REF = new NodeRef("content://Node/Ref");
/** The class under test. */
private BroadcastDispositionActionDefinitionUpdateAction action = new BroadcastDispositionActionDefinitionUpdateAction();
private NodeService mockNodeService = mock(NodeService.class);
private DispositionService mockDispositionService = mock(DispositionService.class);
/** Inject the mock services into the class under test and link the content and next action nodes. */
@Before
public void setUp()
{
action.setNodeService(mockNodeService);
action.setDispositionService(mockDispositionService);
ChildAssociationRef mockAssocRef = mock(ChildAssociationRef.class);
when(mockNodeService.getPrimaryParent(NEXT_ACTION_NODE_REF)).thenReturn(mockAssocRef);
when(mockAssocRef.getParentRef()).thenReturn(CONTENT_NODE_REF);
}
/**
* Check that the disposition service is used to determine the "disposition as of" date when changes are made to the
* disposition period.
*/
@Test
public void testPersistPeriodChanges()
{
// Set up the data associated with the next disposition action.
DispositionAction mockAction = mock(DispositionAction.class);
when(mockAction.getNodeRef()).thenReturn(NEXT_ACTION_NODE_REF);
DispositionActionDefinition mockDispositionActionDefinition = mock(DispositionActionDefinition.class);
when(mockAction.getDispositionActionDefinition()).thenReturn(mockDispositionActionDefinition);
when(mockAction.getName()).thenReturn("mockAction");
// Set up the disposition service to return a known "disposition as of" date.
Date asOfDate = new Date();
when(mockDispositionService.calculateAsOfDate(CONTENT_NODE_REF, mockDispositionActionDefinition, false))
.thenReturn(asOfDate);
// Call the method under test.
action.persistPeriodChanges(DISPOSITION_ACTION_DEF_NODE, mockAction);
// Check that the "disposition as of" date has been set on the next action.
verify(mockNodeService).setProperty(NEXT_ACTION_NODE_REF, PROP_DISPOSITION_AS_OF, asOfDate);
}
}

View File

@@ -0,0 +1,102 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.module.org_alfresco_module_rm.disposition;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Date;
import org.alfresco.model.ContentModel;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.Period;
import org.junit.Before;
import org.junit.Test;
/**
* Unit tests for {@link DispositionServiceImpl}.
*
* @author Tom Page
* @since 2.3.1
*/
public class DispositionServiceImplUnitTest
{
/** The node being subject to the disposition step. */
NodeRef CONTENT_NODE_REF = new NodeRef("content://node/");
/** The class under test. */
private DispositionServiceImpl dispositionService = new DispositionServiceImpl();
private NodeService mockNodeService = mock(NodeService.class);
@Before
public void setUp()
{
dispositionService.setNodeService(mockNodeService);
}
/**
* Check that the relevant information is retrieved from the DispositionActionDefinition in order to determine the
* "disposition as of" date.
*/
@Test
public void testCalculateAsOfDate()
{
// Set up a mock for the disposition action definition.
DispositionActionDefinition mockDispositionActionDefinition = mock(DispositionActionDefinition.class);
Period mockPeriod = mock(Period.class);
when(mockDispositionActionDefinition.getPeriod()).thenReturn(mockPeriod);
when(mockDispositionActionDefinition.getPeriodProperty()).thenReturn(ContentModel.PROP_CREATED);
// Set up a created date and another date that is some Period later.
Date createdDate = new Date(1234567890);
when(mockNodeService.getProperty(CONTENT_NODE_REF, ContentModel.PROP_CREATED)).thenReturn(createdDate);
Date nextDate = new Date(1240000000);
when(mockPeriod.getNextDate(createdDate)).thenReturn(nextDate);
// Call the method under test.
Date asOfDate = dispositionService.calculateAsOfDate(CONTENT_NODE_REF, mockDispositionActionDefinition, true);
assertEquals("Unexpected calculation for 'as of' date", nextDate, asOfDate);
}
/** Check that the calculated "disposition as of" date is null if a null period is given. */
@Test
public void testCalculateAsOfDate_nullPeriod()
{
DispositionActionDefinition mockDispositionActionDefinition = mock(DispositionActionDefinition.class);
when(mockDispositionActionDefinition.getPeriod()).thenReturn(null);
// Call the method under test.
Date asOfDate = dispositionService.calculateAsOfDate(CONTENT_NODE_REF, mockDispositionActionDefinition, true);
assertNull("It should not be possible to determine the 'as of' date.", asOfDate);
}
}

View File

@@ -46,7 +46,7 @@ import org.json.JSONObject;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.springframework.extensions.surf.util.Content;
import org.springframework.extensions.webscripts.AbstractWebScript;
import org.springframework.extensions.webscripts.AbstractWebScript;
import org.springframework.extensions.webscripts.Container;
import org.springframework.extensions.webscripts.Description;
import org.springframework.extensions.webscripts.Description.RequiredCache;
@@ -83,7 +83,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest
/**
* @return declarative webscript
*/
protected abstract AbstractWebScript getWebScript();
protected abstract AbstractWebScript getWebScript();
/**
* @return classpath location of webscript template
@@ -151,7 +151,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest
*/
protected String executeWebScript(Map<String, String> parameters, String content) throws Exception
{
AbstractWebScript webScript = getWebScript();
AbstractWebScript webScript = getWebScript();
String template = getWebScriptTemplate();
// initialise webscript
@@ -173,7 +173,7 @@ public abstract class BaseWebScriptUnitTest extends BaseUnitTest
* @return {@link WebScriptRequest} mocked web script request
*/
@SuppressWarnings("rawtypes")
protected WebScriptRequest getMockedWebScriptRequest(AbstractWebScript webScript, final Map<String, String> parameters, String content) throws Exception
protected WebScriptRequest getMockedWebScriptRequest(AbstractWebScript webScript, final Map<String, String> parameters, String content) throws Exception
{
Match match = new Match(null, parameters, null, webScript);
org.springframework.extensions.webscripts.Runtime mockedRuntime = mock(org.springframework.extensions.webscripts.Runtime.class);

View File

@@ -24,24 +24,6 @@
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
/*
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.web.scripts.roles;
@@ -49,14 +31,14 @@ import static java.util.Collections.emptyMap;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@@ -64,7 +46,7 @@ import static org.mockito.Mockito.when;
import java.io.File;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@@ -87,9 +69,9 @@ import org.alfresco.repo.domain.patch.PatchDAO;
import org.alfresco.repo.domain.qname.QNameDAO;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.repo.web.scripts.content.ContentStreamer;
import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.model.FileInfo;
import org.alfresco.repo.web.scripts.content.ContentStreamer;
import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.model.FileInfo;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.security.PermissionService;
@@ -105,11 +87,11 @@ import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.springframework.extensions.webscripts.AbstractWebScript;
import org.springframework.extensions.webscripts.Status;
import org.springframework.extensions.webscripts.AbstractWebScript;
import org.springframework.extensions.webscripts.Status;
import org.springframework.extensions.webscripts.WebScriptException;
import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.extensions.webscripts.WebScriptResponse;
import org.springframework.extensions.webscripts.WebScriptResponse;
/**
* DynamicAuthoritiesGet Unit Test
@@ -140,17 +122,17 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
private TransactionService mockedTransactionService;
@Mock
private RetryingTransactionHelper mockedRetryingTransactionHelper;
@Mock
private ContentStreamer contentStreamer;
@Mock
private FileFolderService mockedFileFolderService;
@Mock
private ContentStreamer contentStreamer;
@Mock
private FileFolderService mockedFileFolderService;
/** test component */
@InjectMocks
private DynamicAuthoritiesGet webScript;
@Override
protected AbstractWebScript getWebScript()
protected AbstractWebScript getWebScript()
{
return webScript;
}
@@ -172,7 +154,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
webScript.setNodeService(mockedNodeService);
webScript.setPermissionService(mockedPermissionService);
webScript.setExtendedSecurityService(mockedExtendedSecurityService);
webScript.setFileFolderService(mockedFileFolderService);
webScript.setFileFolderService(mockedFileFolderService);
// setup retrying transaction helper
Answer<Object> doInTransactionAnswer = new Answer<Object>()
{
@@ -200,7 +182,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
/**
* Given that there are no nodes with the extended security aspect
* When the action is executed Nothing happens
*
*
* @throws Exception
*/
@SuppressWarnings({ "unchecked" })
@@ -237,7 +219,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
* Then the aspect is removed
* And the dynamic authorities permissions are cleared
* And extended security is set via the updated API
*
*
* @throws Exception
*/
@SuppressWarnings("unchecked")
@@ -246,7 +228,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
@@ -282,8 +264,8 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
/**
* Given that there are non-records with the extended security aspect
* When the web script is executed
* Then the aspect is removed And the dynamic authorities permissions are cleared
*
* Then the aspect is removed And the dynamic authorities permissions are cleared
*
* @throws Exception
*/
@SuppressWarnings("unchecked")
@@ -292,7 +274,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
@@ -328,50 +310,50 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
@Test
public void missingBatchSizeParameter() throws Exception
{
try
{
executeJSONWebScript(emptyMap());
fail("Expected exception as parameter batchsize is mandatory.");
}
catch (WebScriptException e)
{
assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus());
}
try
{
executeJSONWebScript(emptyMap());
fail("Expected exception as parameter batchsize is mandatory.");
}
catch (WebScriptException e)
{
assertEquals("If parameter batchsize is not provided then 'Bad request' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus());
}
}
@Test
public void invalidBatchSizeParameter() throws Exception
{
try
{
try
{
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "dd");
executeJSONWebScript(parameters);
fail("Expected exception as parameter batchsize is invalid.");
}
catch (WebScriptException e)
{
assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus());
}
executeJSONWebScript(parameters);
fail("Expected exception as parameter batchsize is invalid.");
}
catch (WebScriptException e)
{
assertEquals("If parameter batchsize is invalid then 'Bad request' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus());
}
}
@Test
public void batchSizeShouldBeGraterThanZero() throws Exception
{
try
{
try
{
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "0");
executeJSONWebScript(parameters);
fail("Expected exception as parameter batchsize is not a number greater than 0.");
}
catch (WebScriptException e)
{
assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus());
}
executeJSONWebScript(parameters);
fail("Expected exception as parameter batchsize is not a number greater than 0.");
}
catch (WebScriptException e)
{
assertEquals("If parameter batchsize is not a number greater than 0 then 'Bad request' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus());
}
}
@Test
@@ -393,7 +375,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
{
List<Long> ids = Stream.of(1l, 2l, 3l,4l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
@@ -421,7 +403,7 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
{
List<Long> ids = Stream.of(1l, 2l, 3l, 4l, 5l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
@@ -443,302 +425,302 @@ public class DynamicAuthoritiesGetUnitTest extends BaseWebScriptUnitTest impleme
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed first 4 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS)).thenReturn(null);
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null);
});
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4");
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
ArgumentCaptor<Set> readerKeysCaptor = ArgumentCaptor.forClass(Set.class);
ArgumentCaptor<Set> writersKeysCaptor = ArgumentCaptor.forClass(Set.class);
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS));
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS));
verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedReaderDynamicAuthority.EXTENDED_READER));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER));
verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(),
writersKeysCaptor.capture());
List<Set> allReaderKeySets = readerKeysCaptor.getAllValues();
List<Set> allWritersKeySets = writersKeysCaptor.getAllValues();
for (Set keySet : allReaderKeySets)
{
assertNull(keySet);
}
for (Set keySet : allWritersKeySets)
{
assertNull(keySet);
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void recordsWithExtendedSecurityAspectAndNullWriters() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null);
});
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4");
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
ArgumentCaptor<Set> readerKeysCaptor = ArgumentCaptor.forClass(Set.class);
ArgumentCaptor<Set> writersKeysCaptor = ArgumentCaptor.forClass(Set.class);
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS));
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS));
verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedReaderDynamicAuthority.EXTENDED_READER));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER));
verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(),
writersKeysCaptor.capture());
List<Set> allReaderKeySets = readerKeysCaptor.getAllValues();
List<Set> allWritersKeySets = writersKeysCaptor.getAllValues();
for (Set keySet : allReaderKeySets)
{
assertNotNull(keySet);
}
for (Set keySet : allWritersKeySets)
{
assertNull(keySet);
}
}
/**
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void recordsWithExtendedSecurityAspectAndNullWritersAndReaders() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS)).thenReturn(null);
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null);
});
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4");
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
ArgumentCaptor<Set> readerKeysCaptor = ArgumentCaptor.forClass(Set.class);
ArgumentCaptor<Set> writersKeysCaptor = ArgumentCaptor.forClass(Set.class);
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS));
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS));
verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedReaderDynamicAuthority.EXTENDED_READER));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER));
verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(),
writersKeysCaptor.capture());
List<Set> allReaderKeySets = readerKeysCaptor.getAllValues();
List<Set> allWritersKeySets = writersKeysCaptor.getAllValues();
for (Set keySet : allReaderKeySets)
{
assertNull(keySet);
}
for (Set keySet : allWritersKeySets)
{
assertNull(keySet);
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void recordsWithExtendedSecurityAspectAndNullWriters() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS)).thenReturn(null);
});
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4");
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
ArgumentCaptor<Set> readerKeysCaptor = ArgumentCaptor.forClass(Set.class);
ArgumentCaptor<Set> writersKeysCaptor = ArgumentCaptor.forClass(Set.class);
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_READERS));
verify(mockedNodeService, times(3)).getProperty(any(NodeRef.class), eq(PROP_WRITERS));
verify(mockedNodeService, times(3)).removeAspect(any(NodeRef.class), eq(ASPECT_EXTENDED_SECURITY));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedReaderDynamicAuthority.EXTENDED_READER));
verify(mockedPermissionService, times(3)).clearPermission(any(NodeRef.class),
eq(ExtendedWriterDynamicAuthority.EXTENDED_WRITER));
verify(mockedExtendedSecurityService, times(3)).set(any(NodeRef.class), readerKeysCaptor.capture(),
writersKeysCaptor.capture());
List<Set> allReaderKeySets = readerKeysCaptor.getAllValues();
List<Set> allWritersKeySets = writersKeysCaptor.getAllValues();
for (Set keySet : allReaderKeySets)
{
assertNotNull(keySet);
}
for (Set keySet : allWritersKeySets)
{
assertNull(keySet);
}
}
/**
* Given I have records that require migration
* And I am interested in knowning which records are migrated
* When I run the migration tool
* Then I will be returned a CSV file containing the name and node reference of the record migrated
*
* @throws Exception
*/
@SuppressWarnings("unchecked")
@Test
public void processWithCSVFile() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
String name = "name" + i;
when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name);
});
ArgumentCaptor<File> csvFileCaptor = ArgumentCaptor.forClass(File.class);
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export",
"true");
executeWebScript(parameters);
verify(contentStreamer, times(1)).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
csvFileCaptor.capture(), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
File fileForDownload = csvFileCaptor.getValue();
assertNotNull(fileForDownload);
}
/**
* Then I will be returned a CSV file containing the name and node reference of the record migrated
*
* @throws Exception
*/
@SuppressWarnings("unchecked")
@Test
public void processWithCSVFile() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
String name = "name" + i;
when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name);
});
ArgumentCaptor<File> csvFileCaptor = ArgumentCaptor.forClass(File.class);
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export",
"true");
executeWebScript(parameters);
verify(contentStreamer, times(1)).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
csvFileCaptor.capture(), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
File fileForDownload = csvFileCaptor.getValue();
assertNotNull(fileForDownload);
}
/**
* Given that I have record that require migration
* And I'm not interested in knowing which records were migrated
* And I'm not interested in knowing which records were migrated
* When I run the migration tool
* Then I will not be returned a CSV file of details.
*
* @throws Exception
*/
@SuppressWarnings("unchecked")
@Test
public void processedWithouthCSVFile() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
});
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export",
"false");
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
}
@Test
public void invalidParentNodeRefParameter() throws Exception
{
try
{
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", "invalidNodeRef");
executeJSONWebScript(parameters);
fail("Expected exception as parameter parentNodeRef is invalid.");
}
catch (WebScriptException e)
{
assertEquals("If parameter parentNodeRef is invalid then 'Internal server error' should be returned.",
Status.STATUS_INTERNAL_SERVER_ERROR, e.getStatus());
}
}
@Test
public void inexistentParentNodeRefParameter() throws Exception
{
try
{
NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeService.exists(parentNodeRef)).thenReturn(false);
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef",
parentNodeRef.toString());
executeJSONWebScript(parameters);
fail("Expected exception as parameter parentNodeRef does not exist.");
}
catch (WebScriptException e)
{
assertEquals("If parameter parentNodeRef is does not exist then 'Bad Reequest' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus());
}
}
@SuppressWarnings("unchecked")
@Test
public void processedWithParentNodeRef() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService);
List<FileInfo> children = new ArrayList<FileInfo>();
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
String name = "name" + i;
when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name);
FileInfo mockedFileInfo = mock(FileInfo.class);
when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef);
children.add(mockedFileInfo);
});
when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true)))
.thenReturn(children);
Map<String, String> parameters = ImmutableMap.of("batchsize", "3", "maxProcessedRecords", "4", "export",
"false", "parentNodeRef", parentNodeRef.toString());
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
}
@SuppressWarnings("unchecked")
@Test
public void processedWithParentNodeRefWithFirstTwoBatchesAlreadyProcessed() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l, 4l, 5l, 6l, 7l, 8l).collect(Collectors.toList());
NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService);
List<FileInfo> children = new ArrayList<FileInfo>();
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
if (i <= 6l)
{
when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(false);
}
else
{
when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true);
}
when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
String name = "name" + i;
when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name);
FileInfo mockedFileInfo = mock(FileInfo.class);
when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef);
children.add(mockedFileInfo);
});
when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true)))
.thenReturn(children);
Map<String, String> parameters = ImmutableMap.of("batchsize", "3", "parentNodeRef", parentNodeRef.toString());
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 2 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
}
* Then I will not be returned a CSV file of details.
*
* @throws Exception
*/
@SuppressWarnings("unchecked")
@Test
public void processedWithouthCSVFile() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
when(mockedPatchDAO.getNodesByAspectQNameId(eq(ASPECT_ID), anyLong(), anyLong())).thenReturn(ids)
.thenReturn(Collections.emptyList());
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeDAO.getNodePair(i)).thenReturn(new Pair<Long, NodeRef>(i, nodeRef));
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
});
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "maxProcessedRecords", "4", "export",
"false");
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
}
@Test
public void invalidParentNodeRefParameter() throws Exception
{
try
{
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef", "invalidNodeRef");
executeJSONWebScript(parameters);
fail("Expected exception as parameter parentNodeRef is invalid.");
}
catch (WebScriptException e)
{
assertEquals("If parameter parentNodeRef is invalid then 'Internal server error' should be returned.",
Status.STATUS_INTERNAL_SERVER_ERROR, e.getStatus());
}
}
@Test
public void inexistentParentNodeRefParameter() throws Exception
{
try
{
NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeService.exists(parentNodeRef)).thenReturn(false);
// Set up parameters.
Map<String, String> parameters = ImmutableMap.of("batchsize", "10", "parentNodeRef",
parentNodeRef.toString());
executeJSONWebScript(parameters);
fail("Expected exception as parameter parentNodeRef does not exist.");
}
catch (WebScriptException e)
{
assertEquals("If parameter parentNodeRef is does not exist then 'Bad Reequest' should be returned.",
Status.STATUS_BAD_REQUEST, e.getStatus());
}
}
@SuppressWarnings("unchecked")
@Test
public void processedWithParentNodeRef() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l).collect(Collectors.toList());
NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService);
List<FileInfo> children = new ArrayList<FileInfo>();
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true);
when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
String name = "name" + i;
when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name);
FileInfo mockedFileInfo = mock(FileInfo.class);
when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef);
children.add(mockedFileInfo);
});
when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true)))
.thenReturn(children);
Map<String, String> parameters = ImmutableMap.of("batchsize", "3", "maxProcessedRecords", "4", "export",
"false", "parentNodeRef", parentNodeRef.toString());
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 3 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
}
@SuppressWarnings("unchecked")
@Test
public void processedWithParentNodeRefWithFirstTwoBatchesAlreadyProcessed() throws Exception
{
List<Long> ids = Stream.of(1l, 2l, 3l, 4l, 5l, 6l, 7l, 8l).collect(Collectors.toList());
NodeRef parentNodeRef = AlfMock.generateNodeRef(mockedNodeService);
List<FileInfo> children = new ArrayList<FileInfo>();
ids.stream().forEach((i) -> {
NodeRef nodeRef = AlfMock.generateNodeRef(mockedNodeService);
when(mockedNodeService.hasAspect(nodeRef, ASPECT_RECORD)).thenReturn(true);
if (i <= 6l)
{
when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(false);
}
else
{
when(mockedNodeService.hasAspect(nodeRef, ASPECT)).thenReturn(true);
}
when(mockedNodeService.getProperty(nodeRef, PROP_READERS))
.thenReturn((Serializable) Collections.emptyMap());
when(mockedNodeService.getProperty(nodeRef, PROP_WRITERS))
.thenReturn((Serializable) Collections.emptyMap());
String name = "name" + i;
when(mockedNodeService.getProperty(nodeRef, ContentModel.PROP_NAME)).thenReturn((Serializable) name);
FileInfo mockedFileInfo = mock(FileInfo.class);
when(mockedFileInfo.getNodeRef()).thenReturn(nodeRef);
children.add(mockedFileInfo);
});
when(mockedFileFolderService.search(eq(parentNodeRef), eq("*"), eq(true), eq(true), eq(true)))
.thenReturn(children);
Map<String, String> parameters = ImmutableMap.of("batchsize", "3", "parentNodeRef", parentNodeRef.toString());
JSONObject json = executeJSONWebScript(parameters);
assertNotNull(json);
String actualJSONString = json.toString();
ObjectMapper mapper = new ObjectMapper();
String expectedJSONString = "{\"responsestatus\":\"success\",\"message\":\"Processed 2 records.\"}";
assertEquals(mapper.readTree(expectedJSONString), mapper.readTree(actualJSONString));
verify(contentStreamer, never()).streamContent(any(WebScriptRequest.class), any(WebScriptResponse.class),
any(File.class), any(Long.class), any(Boolean.class), any(String.class), any(Map.class));
}
}