Merge branch 'release/V3.4' into merge/MNT-22310

This commit is contained in:
Epure Alexandru-Eusebiu
2021-04-15 10:30:05 +03:00
4 changed files with 109 additions and 82 deletions

View File

@@ -69,6 +69,11 @@ rm.autocompletesuggestion.nodeParameterSuggester.aspectsAndTypes=rma:record,cm:c
#
rm.dispositionlifecycletrigger.cronexpression=0 0/5 * * * ?
#
# Global RM retention lifecycle cron job execution batch size
#
rm.dispositionlifecycletrigger.batchsize=500
#
# Global RM notify of records due for review cron job expression
#

View File

@@ -80,6 +80,7 @@
<property name="searchService" ref="searchService" />
<property name="personService" ref="personService" />
<property name="recordsManagementActionService" ref="recordsManagementActionService" />
<property name="batchSize" value="${rm.dispositionlifecycletrigger.batchsize}"/>
</bean>
<bean id="scheduledDispositionLifecyceleSchedulerAccessor" class="org.alfresco.schedule.AlfrescoSchedulerAccessorBean">

View File

@@ -60,6 +60,9 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
/** logger */
private static Log logger = LogFactory.getLog(DispositionLifecycleJobExecuter.class);
/** batching properties */
private int batchSize;
/** list of disposition actions to automatically execute */
private List<String> dispositionActions;
@@ -88,6 +91,11 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
this.dispositionActions = dispositionActions;
}
public void setBatchSize(int batchSize)
{
this.batchSize = batchSize;
}
/**
* @param recordsManagementActionService records management action service
*/
@@ -167,13 +175,14 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
{
boolean hasMore = true;
int skipCount = 0;
while(hasMore)
while (hasMore)
{
SearchParameters params = new SearchParameters();
params.addStore(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
params.setLanguage(SearchService.LANGUAGE_FTS_ALFRESCO);
params.setQuery(getQuery());
params.setSkipCount(skipCount);
params.setMaxItems(batchSize);
// execute search
ResultSet results = searchService.query(params);
@@ -188,13 +197,12 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
}
// process search results
for (NodeRef node : resultNodes)
if (!resultNodes.isEmpty())
{
executeAction(node);
executeAction(resultNodes);
}
}
}
logger.debug("Job Finished");
}
catch (AlfrescoRuntimeException exception)
@@ -209,57 +217,52 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
/**
* Helper method that executes a disposition action
*
* @param actionNode - the disposition action to execute
* @param actionNodes - the disposition actions to execute
*/
private void executeAction(final NodeRef actionNode)
private void executeAction(final List<NodeRef> actionNodes)
{
RetryingTransactionCallback<Boolean> processTranCB = new RetryingTransactionCallback<Boolean>()
{
public Boolean execute()
RetryingTransactionCallback<Boolean> processTranCB = () -> {
for (NodeRef actionNode : actionNodes)
{
final String dispAction = (String) nodeService.getProperty(actionNode,
RecordsManagementModel.PROP_DISPOSITION_ACTION);
// Run disposition action
if (dispAction != null && dispositionActions.contains(dispAction))
if (nodeService.exists(actionNode))
{
ChildAssociationRef parent = nodeService.getPrimaryParent(actionNode);
if (parent.getTypeQName().equals(RecordsManagementModel.ASSOC_NEXT_DISPOSITION_ACTION))
final String dispAction = (String) nodeService
.getProperty(actionNode, RecordsManagementModel.PROP_DISPOSITION_ACTION);
// Run disposition action
if (dispAction != null && dispositionActions.contains(dispAction))
{
Map<String, Serializable> props = new HashMap<>(1);
props.put(RMDispositionActionExecuterAbstractBase.PARAM_NO_ERROR_CHECK,
Boolean.FALSE);
try
ChildAssociationRef parent = nodeService.getPrimaryParent(actionNode);
if (parent.getTypeQName().equals(RecordsManagementModel.ASSOC_NEXT_DISPOSITION_ACTION))
{
// execute disposition action
recordsManagementActionService.executeRecordsManagementAction(
parent.getParentRef(), dispAction, props);
Map<String, Serializable> props = new HashMap<>(1);
props.put(RMDispositionActionExecuterAbstractBase.PARAM_NO_ERROR_CHECK, Boolean.FALSE);
if (logger.isDebugEnabled())
try
{
logger.debug("Processed action: " + dispAction + "on" + parent);
// execute disposition action
recordsManagementActionService
.executeRecordsManagementAction(parent.getParentRef(), dispAction, props);
if (logger.isDebugEnabled())
{
logger.debug("Processed action: " + dispAction + "on" + parent);
}
}
}
catch (AlfrescoRuntimeException exception)
{
if (logger.isDebugEnabled())
catch (AlfrescoRuntimeException exception)
{
logger.debug(exception);
if (logger.isDebugEnabled())
{
logger.debug(exception);
}
}
}
}
}
return Boolean.TRUE;
}
return Boolean.TRUE;
};
// if exists
if (nodeService.exists(actionNode))
{
retryingTransactionHelper.doInTransaction(processTranCB);
}
retryingTransactionHelper.doInTransaction(processTranCB, false, true);
}
public PersonService getPersonService()

View File

@@ -33,6 +33,7 @@ import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyMap;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
@@ -56,8 +57,8 @@ import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.InjectMocks;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
/**
@@ -73,7 +74,7 @@ public class DispositionLifecycleJobExecuterUnitTest extends BaseUnitTest
private static final String RETAIN = "retain";
private static final String DESTROY = "destroy";
/** test query snipit */
/** test query snippet */
private static final String QUERY = "\"" + CUTOFF + "\" OR \"" + RETAIN + "\"";
/** mocked result set */
@@ -91,9 +92,18 @@ public class DispositionLifecycleJobExecuterUnitTest extends BaseUnitTest
{
super.before();
// Because of the fix implemented in MNT-22310, a new setup for retrying transaction helper is required.
Answer<Object> doInTransactionAnswer = invocation -> {
RetryingTransactionCallback callback = (RetryingTransactionCallback)invocation.getArguments()[0];
return callback.execute();
};
doAnswer(doInTransactionAnswer).when(mockedRetryingTransactionHelper).doInTransaction(any(RetryingTransactionCallback.class),
Matchers.anyBoolean(), Matchers.anyBoolean());
// setup data
List<String> dispositionActions = buildList(CUTOFF, RETAIN);
executer.setDispositionActions(dispositionActions);
executer.setBatchSize(1);
// setup interactions
doReturn(mockedResultSet).when(mockedSearchService).query(any(SearchParameters.class));
@@ -102,14 +112,15 @@ public class DispositionLifecycleJobExecuterUnitTest extends BaseUnitTest
/**
* Helper method to verify that the query has been executed and closed
* @param numberOfInvocation number of times the query has been executed and closed
*/
private void verifyQuery()
private void verifyQueryTimes(int numberOfInvocation)
{
ArgumentCaptor<SearchParameters> paramsCaptor = ArgumentCaptor.forClass(SearchParameters.class);
verify(mockedSearchService, times(1)).query(paramsCaptor.capture());
verify(mockedSearchService, times(numberOfInvocation)).query(paramsCaptor.capture());
assertTrue(paramsCaptor.getValue().getQuery().contains(QUERY));
verify(mockedResultSet, times(1)).getNodeRefs();
verify(mockedResultSet, times(1)).close();
verify(mockedResultSet, times(numberOfInvocation)).getNodeRefs();
verify(mockedResultSet, times(numberOfInvocation)).close();
}
/**
@@ -127,7 +138,7 @@ public class DispositionLifecycleJobExecuterUnitTest extends BaseUnitTest
// then
// ensure the query is executed and closed
verifyQuery();
verifyQueryTimes(1);
// ensure nothing else happens becuase we have no results
verifyZeroInteractions(mockedNodeService, mockedRecordFolderService, mockedRetryingTransactionHelper);
@@ -143,24 +154,31 @@ public class DispositionLifecycleJobExecuterUnitTest extends BaseUnitTest
// test data
NodeRef node1 = generateNodeRef();
NodeRef node2 = generateNodeRef();
List<NodeRef> nodeRefs = buildList(node1, node2);
// given
doReturn(nodeRefs).when(mockedResultSet).getNodeRefs();
doReturn(DESTROY).when(mockedNodeService).getProperty(node1, RecordsManagementModel.PROP_DISPOSITION_ACTION);
doReturn(DESTROY).when(mockedNodeService).getProperty(node2, RecordsManagementModel.PROP_DISPOSITION_ACTION);
when(mockedResultSet.getNodeRefs())
.thenReturn(buildList(node1))
.thenReturn(buildList(node2));
when(mockedResultSet.hasMore())
.thenReturn(true)
.thenReturn(false);
// when
executer.executeImpl();
// then
// ensure the query is executed and closed
verifyQuery();
verifyQueryTimes(2);
// ensure work is executed in transaction for each node processed
verify(mockedNodeService, times(2)).exists(any(NodeRef.class));
verify(mockedRetryingTransactionHelper, times(2)).<Object>doInTransaction(any(RetryingTransactionCallback.class));
verify(mockedRetryingTransactionHelper, times(2)).doInTransaction(any(RetryingTransactionCallback.class),
Matchers.anyBoolean(), Matchers.anyBoolean());
// ensure each node is process correctly
verify(mockedNodeService, times(1)).getProperty(node1, RecordsManagementModel.PROP_DISPOSITION_ACTION);
@@ -191,14 +209,14 @@ public class DispositionLifecycleJobExecuterUnitTest extends BaseUnitTest
// then
// ensure the query is executed and closed
verifyQuery();
verifyQueryTimes(1);
// ensure the node exist check is made for the node
verify(mockedNodeService, times(1)).exists(any(NodeRef.class));
// ensure no more interactions
verifyNoMoreInteractions(mockedNodeService);
verifyZeroInteractions(mockedRecordsManagementActionService, mockedRetryingTransactionHelper);
verifyZeroInteractions(mockedRecordsManagementActionService);
}
/**
@@ -211,27 +229,33 @@ public class DispositionLifecycleJobExecuterUnitTest extends BaseUnitTest
// test data
NodeRef node1 = generateNodeRef();
NodeRef node2 = generateNodeRef();
List<NodeRef> nodeRefs = buildList(node1, node2);
NodeRef parent = generateNodeRef();
ChildAssociationRef parentAssoc = new ChildAssociationRef(ASSOC_NEXT_DISPOSITION_ACTION, parent, generateQName(), generateNodeRef());
// given
doReturn(nodeRefs).when(mockedResultSet).getNodeRefs();
doReturn(CUTOFF).when(mockedNodeService).getProperty(node1, RecordsManagementModel.PROP_DISPOSITION_ACTION);
doReturn(RETAIN).when(mockedNodeService).getProperty(node2, RecordsManagementModel.PROP_DISPOSITION_ACTION);
doReturn(parentAssoc).when(mockedNodeService).getPrimaryParent(any(NodeRef.class));
when(mockedResultSet.getNodeRefs())
.thenReturn(buildList(node1))
.thenReturn(buildList(node2));
when(mockedResultSet.hasMore())
.thenReturn(true)
.thenReturn(false);
// when
executer.executeImpl();
// then
// ensure the query is executed and closed
verifyQuery();
verifyQueryTimes(2);
// ensure work is executed in transaction for each node processed
verify(mockedNodeService, times(2)).exists(any(NodeRef.class));
verify(mockedRetryingTransactionHelper, times(2)).<Object>doInTransaction(any(RetryingTransactionCallback.class));
verify(mockedRetryingTransactionHelper, times(2)).doInTransaction(any(RetryingTransactionCallback.class),
Matchers.anyBoolean(), Matchers.anyBoolean());
// ensure each node is process correctly
// node1
@@ -279,32 +303,26 @@ public class DispositionLifecycleJobExecuterUnitTest extends BaseUnitTest
final NodeRef node4 = generateNodeRef();
// mock the search service to return the right page
when(mockedSearchService.query(any(SearchParameters.class))).thenAnswer(
new Answer<ResultSet>()
when(mockedSearchService.query(any(SearchParameters.class))).thenAnswer((Answer<ResultSet>) invocation -> {
SearchParameters params = invocation.getArgumentAt(0, SearchParameters.class);
if (params.getSkipCount() == 0)
{
@Override
public ResultSet answer(InvocationOnMock invocation)
{
SearchParameters params = invocation.getArgumentAt(0, SearchParameters.class);
if (params.getSkipCount() == 0)
{
// mock first page
ResultSet result1 = mock(ResultSet.class);
when(result1.getNodeRefs()).thenReturn(Arrays.asList(node1, node2));
when(result1.hasMore()).thenReturn(true);
return result1;
}
else if (params.getSkipCount() == 2)
{
// mock second page
ResultSet result2 = mock(ResultSet.class);
when(result2.getNodeRefs()).thenReturn(Arrays.asList(node3, node4));
when(result2.hasMore()).thenReturn(false);
return result2;
}
throw new IndexOutOfBoundsException("Pagination did not stop after the second page!");
}
});
// mock first page
ResultSet result1 = mock(ResultSet.class);
when(result1.getNodeRefs()).thenReturn(Arrays.asList(node1, node2));
when(result1.hasMore()).thenReturn(true);
return result1;
}
else if (params.getSkipCount() == 2)
{
// mock second page
ResultSet result2 = mock(ResultSet.class);
when(result2.getNodeRefs()).thenReturn(Arrays.asList(node3, node4));
when(result2.hasMore()).thenReturn(false);
return result2;
}
throw new IndexOutOfBoundsException("Pagination did not stop after the second page!");
});
// call the service
executer.executeImpl();