diff --git a/pom.xml b/pom.xml index e5f5f35ebe..70593388a6 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.alfresco alfresco-rm-parent pom - 2.3.1-SNAPSHOT + 2.3.2-SNAPSHOT Alfresco Records Management http://www.alfresco.org/ @@ -60,9 +60,27 @@ + + + + org.alfresco + alfresco-platform-distribution + 5.0.2 + pom + import + + + org.reflections + reflections + 0.9.10 + + + + rm-server rm-share + rm-automation diff --git a/rm-automation/pom.xml b/rm-automation/pom.xml index 40d6f5cf74..02900fdb16 100644 --- a/rm-automation/pom.xml +++ b/rm-automation/pom.xml @@ -1,312 +1,360 @@ - - org.alfresco - alfresco-rm-parent - 2.3.1-SNAPSHOT - - 4.0.0 - alfresco-rm-automation - - 2.43.1 - 4.0.5.RELEASE - - - - + + 4.0.0 + alfresco-rm-automation + Alfresco Records Management Automation + + + org.alfresco + alfresco-rm-parent + 2.3.2-SNAPSHOT + + + + 2.45.0 + 4.0.5.RELEASE + 1.8 + testng.xml + true + + + + + + org.codehaus.mojo + build-helper-maven-plugin + + + add-test-source + + add-test-source + + + + src/unit-test/java + + + + + + + maven-surefire-plugin + + false + + ${project.build.testOutputDirectory}/${suiteXmlFile} + + ${skip.automationtests} + + + + maven-antrun-plugin + + + default-cli + + + Stopping Alfresco... + + + + + + + + + + org.codehaus.mojo + license-maven-plugin + + alfresco_enterprise + file:${project.parent.basedir}/license + ${project.parent.basedir}/license/description.ftl + + + + + - org.codehaus.mojo - build-helper-maven-plugin - - - add-test-source + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + org.codehaus.mojo + license-maven-plugin + [1.8,) + + update-file-header + + + + + + + + + + + + + + + + + org.alfresco.test + dataprep + 1.8 + + + org.alfresco.test + alfresco-testng + 1.1 + + + org.alfresco + selenium-grid + 1.8 + + + org.springframework + spring-beans + ${spring.version} + + + org.springframework + spring-core + ${spring.version} + + + org.springframework + spring-context + ${spring.version} + + + org.springframework + spring-tx + ${spring.version} + test + + + org.springframework + spring-test + ${spring.version} + test + + + org.testng + testng + 6.8.8 + + + ru.yandex.qatools.htmlelements + htmlelements-all + 1.15 + + + ru.yandex.qatools.properties + properties-loader + 1.5 + test + + + com.github.tomakehurst + wiremock + 1.56 + + + org.mockito + mockito-all + test + + + org.slf4j + slf4j-log4j12 + test + + + org.slf4j + jul-to-slf4j + 1.7.21 + test + + + + + + install-alfresco + + + + maven-antrun-plugin + + + fetch-installer + generate-test-resources - add-test-source + run - - src/unit-test/java - + + Recreating database... + drop database if exists alfresco; create database alfresco + Downloading Alfresco installer... + + + Installing Alfresco... + + + + - - - - - maven-surefire-plugin - - false - - - usedefaultlisteners - false - - - listener - org.uncommons.reportng.HTMLReporter, org.uncommons.reportng.JUnitXMLReporter - - - - ${project.build.testOutputDirectory}/testng.xml - - - - - - maven-antrun-plugin - - - default-cli + + + + + org.apache.ant + ant-jsch + 1.8.2 + + + postgresql + postgresql + 9.1-901-1.jdbc4 + + + + + maven-dependency-plugin + + + fetch-amps + process-test-resources + + copy + - - Stopping Alfresco... - - - - + + + org.alfresco + alfresco-rm-share + ${project.version} + amp + amp + + + org.alfresco + alfresco-rm-server + ${project.version} + amp + amp + + + ${project.build.directory}/amps + true - - - - - - - - org.alfresco - webdrone - 2.6.1 - - - org.seleniumhq.selenium - selenium-java - ${selenium.version} - - - org.seleniumhq.selenium - selenium-server - ${selenium.version} - test - - - org.springframework - spring-beans - ${spring.version} - - - org.springframework - spring-context - ${spring.version} - - - org.springframework - spring-tx - ${spring.version} - test - - - org.springframework - spring-test - ${spring.version} - test - - - org.testng - testng - 6.8.8 - test - - - org.uncommons - reportng - 1.1.4 - test - - - ru.yandex.qatools.htmlelements - htmlelements-all - 1.12 - - - ru.yandex.qatools.properties - properties-loader - 1.5 - test - - - - - install-alfresco - - - - - maven-antrun-plugin - - - fetch-installer - generate-test-resources - - run - - - - Recreating database... - drop database if exists alfresco; create database alfresco - Downloading Alfresco installer... - - - - Installing Alfresco... - - - - - - - - - - org.apache.ant - ant-jsch - 1.8.2 - - - postgresql - postgresql - 9.1-901-1.jdbc4 - - - - - maven-dependency-plugin - - - fetch-amps - process-test-resources - - copy - - - - - org.alfresco - alfresco-rm-share - ${project.version} - amp - amp - - - org.alfresco - alfresco-rm-server - ${project.version} - amp - amp - - - ${project.build.directory}/amps - true - - - - - - org.alfresco.maven.plugin - alfresco-maven-plugin - true - - - install-server-amp - - install - - process-test-resources - - true - ${project.build.directory}/amps/alfresco-rm-server-${project.version}-amp.amp - ${project.build.directory}/alf-installation/tomcat/webapps/alfresco.war - amp - - - - install-share-amp - - install - - process-test-resources - - true - ${project.build.directory}/amps/alfresco-rm-share-${project.version}-amp.amp - ${project.build.directory}/alf-installation/tomcat/webapps/share.war - amp - - - - - - - - - run-alfresco - - - - - org.jacoco - jacoco-maven-plugin - 0.6.3.201306030806 - - - prepare-jacoco - - prepare-agent - - - + + + + + org.alfresco.maven.plugin + alfresco-maven-plugin + true + + + install-server-amp + + install + + process-test-resources - - org.alfresco.* - + true + ${project.build.directory}/amps/alfresco-rm-server-${project.version}-amp.amp + ${project.build.directory}/alf-installation/tomcat/webapps/alfresco.war - - - - maven-antrun-plugin - - - start-alfresco - process-test-classes - - run - - - - Starting Alfresco... - - - - - - - - - - stop-alfresco - post-integration-test - - run - - - - Stopping Alfresco... - - - - - - - - - - - - - \ No newline at end of file + + + install-share-amp + + install + + process-test-resources + + true + ${project.build.directory}/amps/alfresco-rm-share-${project.version}-amp.amp + ${project.build.directory}/alf-installation/tomcat/webapps/share.war + + + + + + + + + run-alfresco + + + + org.jacoco + jacoco-maven-plugin + 0.7.5.201505241946 + + + prepare-jacoco + + prepare-agent + + + + + + org.alfresco.* + + + + + maven-antrun-plugin + + + start-alfresco + process-test-classes + + run + + + + Starting Alfresco... + + + + + + + + + + stop-alfresco + post-integration-test + + run + + + + Stopping Alfresco... + + + + + + + + + + + + + diff --git a/rm-server/config/alfresco/module/org_alfresco_module_rm/alfresco-global.properties b/rm-server/config/alfresco/module/org_alfresco_module_rm/alfresco-global.properties index 8116b69d78..b7758de742 100644 --- a/rm-server/config/alfresco/module/org_alfresco_module_rm/alfresco-global.properties +++ b/rm-server/config/alfresco/module/org_alfresco_module_rm/alfresco-global.properties @@ -72,6 +72,12 @@ rm.completerecord.mandatorypropertiescheck.enabled=true # rm.patch.v22.convertToStandardFilePlan=false +# Permission mapping +# these take a comma separated string of permissions from org.alfresco.service.cmr.security.PermissionService +# read maps to ReadRecords and write to FileRecords +rm.haspermissionmap.read=ReadProperties,ReadChildren +rm.haspermissionmap.write=WriteProperties,AddChildren + # # Extended auto-version behaviour. If true and other auto-version properties are satisfied, then # a document will be auto-versioned when its type is changed. diff --git a/rm-server/config/alfresco/module/org_alfresco_module_rm/extended-repository-context.xml b/rm-server/config/alfresco/module/org_alfresco_module_rm/extended-repository-context.xml index 3882036382..3d1e3db821 100644 --- a/rm-server/config/alfresco/module/org_alfresco_module_rm/extended-repository-context.xml +++ b/rm-server/config/alfresco/module/org_alfresco_module_rm/extended-repository-context.xml @@ -138,6 +138,12 @@ + + ${rm.haspermissionmap.read} + + + ${rm.haspermissionmap.write} + diff --git a/rm-server/config/alfresco/module/org_alfresco_module_rm/model/recordsModel.xml b/rm-server/config/alfresco/module/org_alfresco_module_rm/model/recordsModel.xml index 6ade285156..8cbdbed4ff 100644 --- a/rm-server/config/alfresco/module/org_alfresco_module_rm/model/recordsModel.xml +++ b/rm-server/config/alfresco/module/org_alfresco_module_rm/model/recordsModel.xml @@ -392,6 +392,11 @@ d:date false + + Manually Set Disposition Date Flag + d:boolean + false + Disposition Events Eligible d:boolean diff --git a/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-service-context.xml b/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-service-context.xml index 271b40b23c..97f6a97480 100644 --- a/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-service-context.xml +++ b/rm-server/config/alfresco/module/org_alfresco_module_rm/rm-service-context.xml @@ -39,12 +39,6 @@ - - - - - @@ -96,9 +90,6 @@ - - - @@ -139,6 +130,7 @@ org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.registerDispositionProperty=RM_ALLOW org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getDispositionProperties=RM_ALLOW org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getDispositionSchedule=RM.Read.0 + org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getOriginDispositionSchedule=RM.Read.0 org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getAssociatedDispositionSchedule=RM.Read.0 org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getAssociatedRecordsManagementContainer=RM_ALLOW org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.hasDisposableItems=RM_ALLOW diff --git a/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml b/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml index 6cf7174dff..c84c7fb643 100644 --- a/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml +++ b/rm-server/config/alfresco/templates/webscripts/org/alfresco/repository/roles/rm-dynamicauthorities.get.desc.xml @@ -5,10 +5,10 @@ URL parameter batchsize is mandatory, and represents the maximum number of records that can be processed in one transaction.
URL parameter maxProcessedRecords is optional, and represents the maximum number of records that will be processed in one request.
URL parameter export is optional, and if the it's value is true, will export the processed records into a csv file.
- URL parameter parentNodeRef is optional, and represents the nodeRef of the folder that contains the records to be processed.
+ URL parameter parentNodeRef is optional, and represents the nodeRef of the folder that contains the records to be processed.
]]> - /api/rm/rm-dynamicauthorities?batchsize={batchsize}&maxProcessedRecords={maxProcessedRecords?}&export={export?}&parentNodeRef={parentNodeRef?} + /api/rm/rm-dynamicauthorities?batchsize={batchsize}&maxProcessedRecords={maxProcessedRecords?}&export={export?}&parentNodeRef={parentNodeRef?} argument admin required diff --git a/rm-server/pom.xml b/rm-server/pom.xml index a4554801db..8803399995 100644 --- a/rm-server/pom.xml +++ b/rm-server/pom.xml @@ -5,7 +5,7 @@ org.alfresco alfresco-rm-parent - 2.3.1-SNAPSHOT + 2.3.2-SNAPSHOT 4.0.0 alfresco-rm-server @@ -19,7 +19,13 @@ config - true + true + + **/module.properties + + + + config @@ -225,7 +231,6 @@ org.mockito mockito-all - 1.9.5 test diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateAction.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateAction.java index cf4ff4cb8d..3781be92f0 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateAction.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/BroadcastDispositionActionDefinitionUpdateAction.java @@ -18,6 +18,8 @@ */ package org.alfresco.module.org_alfresco_module_rm.action.impl; +import static org.apache.commons.lang3.BooleanUtils.isNotTrue; + import java.io.Serializable; import java.util.ArrayList; import java.util.Date; @@ -185,7 +187,8 @@ public class BroadcastDispositionActionDefinitionUpdateAction extends RMActionEx { // the change does effect the nextAction for this node // so go ahead and determine what needs updating - if (changedProps.contains(PROP_DISPOSITION_PERIOD) || changedProps.contains(PROP_DISPOSITION_PERIOD_PROPERTY)) + if ((changedProps.contains(PROP_DISPOSITION_PERIOD) || changedProps.contains(PROP_DISPOSITION_PERIOD_PROPERTY)) + && isNotTrue((Boolean) getNodeService().getProperty(nextAction.getNodeRef(), PROP_MANUALLY_SET_AS_OF))) { persistPeriodChanges(dispositionActionDef, nextAction); } diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/EditDispositionActionAsOfDateAction.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/EditDispositionActionAsOfDateAction.java index 59976aa365..90f0987eae 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/EditDispositionActionAsOfDateAction.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/action/impl/EditDispositionActionAsOfDateAction.java @@ -38,6 +38,9 @@ public class EditDispositionActionAsOfDateAction extends RMActionExecuterAbstrac private static final String MSG_VALID_DATE_DISP_ASOF = "rm.action.valid-date-disp-asof"; private static final String MSG_DISP_ASOF_LIFECYCLE_APPLIED = "rm.action.disp-asof-lifecycle-applied"; + /** Action name */ + public static final String NAME = "editDispositionActionAsOfDate"; + /** Action parameters */ public static final String PARAM_AS_OF_DATE = "asOfDate"; @@ -62,6 +65,7 @@ public class EditDispositionActionAsOfDateAction extends RMActionExecuterAbstrac if (da != null) { getNodeService().setProperty(da.getNodeRef(), PROP_DISPOSITION_AS_OF, asOfDate); + getNodeService().setProperty(da.getNodeRef(), PROP_MANUALLY_SET_AS_OF, true); } } else diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionSelectionStrategy.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionSelectionStrategy.java deleted file mode 100644 index d341c71295..0000000000 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionSelectionStrategy.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright (C) 2005-2014 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.module.org_alfresco_module_rm.disposition; - -import java.util.Comparator; -import java.util.Date; -import java.util.List; -import java.util.SortedSet; -import java.util.TreeSet; - -import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; -import org.alfresco.repo.security.authentication.AuthenticationUtil; -import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork; -import org.alfresco.service.cmr.repository.NodeRef; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -/** - * This class offers the default implementation of a strategy for selection of - * disposition schedule for a record when there is more than one which is applicable. - * An example of where this strategy might be used would be in the case of a record - * which was multiply filed. - * - * @author neilm - */ -public class DispositionSelectionStrategy implements RecordsManagementModel -{ - /** Logger */ - private static Log logger = LogFactory.getLog(DispositionSelectionStrategy.class); - - /** Disposition service */ - private DispositionService dispositionService; - - /** - * Set the disposition service - * - * @param dispositionService disposition service - */ - public void setDispositionService(DispositionService dispositionService) - { - this.dispositionService = dispositionService; - } - - /** - * Select the disposition schedule to use given there is more than one - * - * @param recordFolders - * @return - */ - public NodeRef selectDispositionScheduleFrom(List recordFolders) - { - if (recordFolders == null || recordFolders.isEmpty()) - { - return null; - } - else - { - // 46 CHAPTER 2 - // Records assigned more than 1 disposition must be retained and linked to the record folder (category) with the longest - // retention period. - - // Assumption: an event-based disposition action has a longer retention - // period than a time-based one - as we cannot know when an event will occur - // TODO Automatic events? - - NodeRef recordFolder = null; - if (recordFolders.size() == 1) - { - recordFolder = recordFolders.get(0); - } - else - { - SortedSet sortedFolders = new TreeSet(new DispositionableNodeRefComparator()); - sortedFolders.addAll(recordFolders); - recordFolder = sortedFolders.first(); - } - - DispositionSchedule dispSchedule = dispositionService.getDispositionSchedule(recordFolder); - - if (logger.isDebugEnabled()) - { - logger.debug("Selected disposition schedule: " + dispSchedule); - } - - NodeRef result = null; - if (dispSchedule != null) - { - result = dispSchedule.getNodeRef(); - } - return result; - } - } - - /** - * This class defines a natural comparison order between NodeRefs that have - * the dispositionLifecycle aspect applied. - * This order has the following meaning: NodeRefs with a 'lesser' value are considered - * to have a shorter retention period, although the actual retention period may - * not be straightforwardly determined in all cases. - */ - class DispositionableNodeRefComparator implements Comparator - { - public int compare(final NodeRef f1, final NodeRef f2) - { - // Run as admin user - return AuthenticationUtil.runAs(new RunAsWork() - { - public Integer doWork() - { - return compareImpl(f1, f2); - } - }, AuthenticationUtil.getAdminUserName()); - } - - private int compareImpl(NodeRef f1, NodeRef f2) - { - // quick check to see if the node references are the same - if (f1.equals(f2)) - { - return 0; - } - - // get the disposition schedules for the folders - DispositionSchedule ds1 = dispositionService.getDispositionSchedule(f1); - DispositionSchedule ds2 = dispositionService.getDispositionSchedule(f2); - - // make sure each folder has a disposition schedule - if (ds1 == null && ds2 != null) - { - return 1; - } - else if (ds1 != null && ds2 == null) - { - return -1; - } - else if (ds1 == null && ds2 == null) - { - return 0; - } - - // TODO this won't work correctly if we are trying to compare schedules that are record based!! - DispositionAction da1 = dispositionService.getNextDispositionAction(f1); - DispositionAction da2 = dispositionService.getNextDispositionAction(f2); - - if (da1 != null && da2 != null) - { - Date asOfDate1 = da1.getAsOfDate(); - Date asOfDate2 = da2.getAsOfDate(); - // If both record(Folder)s have asOfDates, then use these to compare - if (asOfDate1 != null && asOfDate2 != null) - { - return asOfDate1.compareTo(asOfDate2); - } - // If one has a date and the other doesn't, the one with the date is "less". - // (Defined date is 'shorter' than undefined date as an undefined date means it may be retained forever - theoretically) - else if (asOfDate1 != null || asOfDate2 != null) - { - return asOfDate1 == null ? +1 : -1; - } - else - { - // Neither has an asOfDate. (Somewhat arbitrarily) we'll use the number of events to compare now. - DispositionActionDefinition dad1 = da1.getDispositionActionDefinition(); - DispositionActionDefinition dad2 = da2.getDispositionActionDefinition(); - int eventsCount1 = 0; - int eventsCount2 = 0; - - if (dad1 != null) - { - eventsCount1 = dad1.getEvents().size(); - } - if (dad2 != null) - { - eventsCount2 = dad2.getEvents().size(); - } - return Integer.valueOf(eventsCount1).compareTo(eventsCount2); - } - } - - return 0; - } - } -} \ No newline at end of file diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionService.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionService.java index 734cb286ec..ed2ed79ecd 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionService.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionService.java @@ -233,6 +233,17 @@ public interface DispositionService * @param nodeRef node reference */ void refreshDispositionAction(NodeRef nodeRef); + + /** + * Gets date of the disposition action for the given + * disposition schedule with the given action name + * + * @param record + * @param dispositionSchedule nodeRef + * @param dispositionActionName + * @return date + */ + Date getDispositionActionDate(NodeRef record, NodeRef dispositionSchedule, String dispositionActionName); /** * Compute the "disposition as of" date (if necessary) for a disposition action and a node. @@ -244,4 +255,13 @@ public interface DispositionService */ Date calculateAsOfDate(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition, boolean allowContextFromAsOf); + + /** + * Gets the origin disposition schedule for the record, not the calculated one + * in case of multiple dispositions applied to record + * + * @param nodeRef record + * @return the initial disposition + */ + DispositionSchedule getOriginDispositionSchedule(NodeRef nodeRef); } diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java index 2f44f18bbe..3a6e1d5beb 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/DispositionServiceImpl.java @@ -18,6 +18,8 @@ */ package org.alfresco.module.org_alfresco_module_rm.disposition; +import static org.apache.commons.lang3.BooleanUtils.isNotTrue; + import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; @@ -27,6 +29,7 @@ import java.util.List; import java.util.Map; import org.alfresco.error.AlfrescoRuntimeException; +import org.alfresco.model.ContentModel; import org.alfresco.module.org_alfresco_module_rm.RecordsManagementPolicies; import org.alfresco.module.org_alfresco_module_rm.RecordsManagementServiceRegistry; import org.alfresco.module.org_alfresco_module_rm.disposition.property.DispositionProperty; @@ -38,6 +41,7 @@ import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; import org.alfresco.module.org_alfresco_module_rm.record.RecordService; import org.alfresco.module.org_alfresco_module_rm.recordfolder.RecordFolderService; import org.alfresco.module.org_alfresco_module_rm.util.ServiceBaseImpl; +import org.alfresco.repo.dictionary.types.period.Immediately; import org.alfresco.repo.policy.BehaviourFilter; import org.alfresco.repo.policy.annotation.Behaviour; import org.alfresco.repo.policy.annotation.BehaviourBean; @@ -70,15 +74,26 @@ public class DispositionServiceImpl extends ServiceBaseImpl /** Logger */ private static final Logger LOGGER = LoggerFactory.getLogger(DispositionServiceImpl.class); + /** Transaction mode for setting next action */ + public enum WriteMode + { + /** Do not update any data. */ + READ_ONLY, + /** Only set the "disposition as of" date. */ + DATE_ONLY, + /** + * Set the "disposition as of" date and the name of the next action. This only happens during the creation of a + * disposition schedule impl node under a record or folder. + */ + DATE_AND_NAME + }; + /** Behaviour filter */ private BehaviourFilter behaviourFilter; /** Records management service registry */ private RecordsManagementServiceRegistry serviceRegistry; - /** Disposition selection strategy */ - private DispositionSelectionStrategy dispositionSelectionStrategy; - /** File plan service */ private FilePlanService filePlanService; @@ -168,16 +183,6 @@ public class DispositionServiceImpl extends ServiceBaseImpl this.freezeService = freezeService; } - /** - * Set the dispositionSelectionStrategy bean. - * - * @param dispositionSelectionStrategy - */ - public void setDispositionSelectionStrategy(DispositionSelectionStrategy dispositionSelectionStrategy) - { - this.dispositionSelectionStrategy = dispositionSelectionStrategy; - } - /** * Behavior to initialize the disposition schedule of a newly filed record. * @@ -267,31 +272,76 @@ public class DispositionServiceImpl extends ServiceBaseImpl * @see org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService#getDispositionSchedule(org.alfresco.service.cmr.repository.NodeRef) */ @Override - public DispositionSchedule getDispositionSchedule(NodeRef nodeRef) + public DispositionSchedule getDispositionSchedule(final NodeRef nodeRef) { - DispositionSchedule di = null; - NodeRef diNodeRef = null; + DispositionSchedule ds = null; + NodeRef dsNodeRef = null; if (isRecord(nodeRef)) { - // Get the record folders for the record - List recordFolders = recordFolderService.getRecordFolders(nodeRef); - // At this point, we may have disposition instruction objects from 1..n folders. - diNodeRef = dispositionSelectionStrategy.selectDispositionScheduleFrom(recordFolders); + // calculate disposition schedule without taking into account the user + DispositionSchedule originDispositionSchedule = AuthenticationUtil.runAsSystem(new RunAsWork() + { + @Override + public DispositionSchedule doWork() + { + return getOriginDispositionSchedule(nodeRef); + } + }); + // if the initial disposition schedule of the record is folder based + if (originDispositionSchedule == null || + isNotTrue(originDispositionSchedule.isRecordLevelDisposition())) + { + return null; + } + + final NextActionFromDisposition dsNextAction = getDispositionActionByNameForRecord(nodeRef); + + if (dsNextAction != null) + { + final NodeRef action = dsNextAction.getNextActionNodeRef(); + if (isNotTrue((Boolean)nodeService.getProperty(action, PROP_MANUALLY_SET_AS_OF))) + { + if (!dsNextAction.getWriteMode().equals(WriteMode.READ_ONLY)) + { + final String dispositionActionName = dsNextAction.getNextActionName(); + final Date dispositionActionDate = dsNextAction.getNextActionDateAsOf(); + + AuthenticationUtil.runAsSystem(new RunAsWork() + { + @Override + public Void doWork() + { + nodeService.setProperty(action, PROP_DISPOSITION_AS_OF, dispositionActionDate); + if (dsNextAction.getWriteMode().equals(WriteMode.DATE_AND_NAME)) + { + nodeService.setProperty(action, PROP_DISPOSITION_ACTION_NAME, dispositionActionName); + } + return null; + } + }); + } + } + + dsNodeRef = dsNextAction.getDispositionNodeRef(); + } } else { // Get the disposition instructions for the node reference provided - diNodeRef = getDispositionScheduleImpl(nodeRef); + dsNodeRef = getDispositionScheduleImpl(nodeRef); } - if (diNodeRef != null) + if (dsNodeRef != null) { - di = new DispositionScheduleImpl(serviceRegistry, nodeService, diNodeRef); + ds = new DispositionScheduleImpl(serviceRegistry, nodeService, dsNodeRef); } - return di; + return ds; } + + + /** * This method returns a NodeRef * Gets the disposition instructions @@ -313,6 +363,28 @@ public class DispositionServiceImpl extends ServiceBaseImpl } return result; } + + public DispositionSchedule getOriginDispositionSchedule(NodeRef nodeRef) + { + NodeRef parent = this.nodeService.getPrimaryParent(nodeRef).getParentRef(); + if (parent != null) + { + if (filePlanService.isRecordCategory(parent)) + { + NodeRef result = getAssociatedDispositionScheduleImpl(parent); + if (result == null) + { + return null; + } + return new DispositionScheduleImpl(serviceRegistry, nodeService, result); + } + else + { + return getOriginDispositionSchedule(parent); + } + } + return null; + } /** * @see org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService#getAssociatedDispositionSchedule(org.alfresco.service.cmr.repository.NodeRef) @@ -620,8 +692,14 @@ public class DispositionServiceImpl extends ServiceBaseImpl * @param dispositionActionDefinition disposition action definition * @param allowContextFromAsOf true if the context date is allowed to be obtained from the disposition "as of" property. */ - private void initialiseDispositionAction(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition, boolean allowContextFromAsOf) + private DispositionAction initialiseDispositionAction(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition, boolean allowContextFromAsOf) { + List childAssocs = nodeService.getChildAssocs(nodeRef, ASSOC_NEXT_DISPOSITION_ACTION, ASSOC_NEXT_DISPOSITION_ACTION, 1, true); + if (childAssocs != null && childAssocs.size() > 0) + { + return new DispositionActionImpl(serviceRegistry, childAssocs.get(0).getChildRef()); + } + // Create the properties Map props = new HashMap(10); @@ -651,6 +729,7 @@ public class DispositionServiceImpl extends ServiceBaseImpl // For every event create an entry on the action da.addEventCompletionDetails(event); } + return da; } /** @@ -683,9 +762,16 @@ public class DispositionServiceImpl extends ServiceBaseImpl } else { - // for now use 'NOW' as the default context date - // TODO set the default period property ... cut off date or last disposition date depending on context - contextDate = new Date(); + if (period.getPeriodType().equals(Immediately.PERIOD_TYPE)) + { + contextDate = (Date)nodeService.getProperty(nodeRef, ContentModel.PROP_CREATED); + } + else + { + // for now use 'NOW' as the default context date + // TODO set the default period property ... cut off date or last disposition date depending on context + contextDate = new Date(); + } } // Calculate the as of date @@ -897,6 +983,14 @@ public class DispositionServiceImpl extends ServiceBaseImpl String currentADId = (String) nodeService.getProperty(currentDispositionAction, PROP_DISPOSITION_ACTION_ID); currentDispositionActionDefinition = di.getDispositionActionDefinition(currentADId); + // When the record has multiple disposition schedules the current disposition action may not be found by id + // In this case it will be searched by name + if(currentDispositionActionDefinition == null) + { + String currentADName = (String) nodeService.getProperty(currentDispositionAction, PROP_DISPOSITION_ACTION); + currentDispositionActionDefinition = di.getDispositionActionDefinitionByName(currentADName); + } + // Get the next disposition action int index = currentDispositionActionDefinition.getIndex(); index++; @@ -983,6 +1077,24 @@ public class DispositionServiceImpl extends ServiceBaseImpl } } + public Date getDispositionActionDate(NodeRef record, NodeRef dispositionSchedule, String dispositionActionName) + { + DispositionSchedule ds = new DispositionScheduleImpl(serviceRegistry, nodeService, dispositionSchedule); + List assocs = nodeService.getChildAssocs(dispositionSchedule); + if (assocs != null && assocs.size() > 0) + { + for (ChildAssociationRef assoc : assocs) + { + if (assoc != null && assoc.getQName().getLocalName().contains(dispositionActionName)) + { + DispositionActionDefinition actionDefinition = ds.getDispositionActionDefinition(assoc.getChildRef().getId()); + return calculateAsOfDate(record, actionDefinition, true); + } + } + } + return null; + } + /** * Helper method to determine if a node is frozen or has frozen children * @@ -1030,4 +1142,169 @@ public class DispositionServiceImpl extends ServiceBaseImpl } }); } + + + + /** + * Calculate next disposition action for a record + * + * @param record + * @return next disposition action (name, date) and the disposition associated + */ + + protected NextActionFromDisposition getDispositionActionByNameForRecord(NodeRef record) + { + List recordFolders = recordFolderService.getRecordFolders(record); + DispositionAction nextDispositionAction = getNextDispositionAction(record); + + if (nextDispositionAction == null) + { + DispositionAction lastCompletedDispositionAction = getLastCompletedDispostionAction(record); + if (lastCompletedDispositionAction != null) + { + // all disposition actions upon the given record were completed + return null; + } + + return getFirstDispositionAction(record, recordFolders); + } + else + { + return getNextDispositionAction(record, recordFolders, nextDispositionAction); + } + } + + /** + * Calculate next disposition action when the record already has one + * @param recordFolders + * @param nextDispositionAction + * @return next disposition action and the associated disposition schedule + */ + private NextActionFromDisposition getNextDispositionAction(NodeRef record, List recordFolders, DispositionAction nextDispositionAction) + { + String recordNextDispositionActionName = nextDispositionAction.getName(); + Date recordNextDispositionActionDate = nextDispositionAction.getAsOfDate(); + // We're looking for the latest date, so initially start with a very early one. + Date nextDispositionActionDate = new Date(Long.MIN_VALUE); + NodeRef dispositionNodeRef = null; + + // Find the latest "disposition as of" date from all the schedules this record is subject to. + for (NodeRef folder : recordFolders) + { + NodeRef dsNodeRef = getDispositionScheduleImpl(folder); + if (dsNodeRef != null) + { + Date dispActionDate = getDispositionActionDate(record, dsNodeRef, recordNextDispositionActionName); + if (dispActionDate == null || (nextDispositionActionDate != null + && nextDispositionActionDate.before(dispActionDate))) + { + nextDispositionActionDate = dispActionDate; + dispositionNodeRef = dsNodeRef; + if (dispActionDate == null) + { + // Treat null as the latest date possible (so stop searching further). + break; + } + } + } + } + if (dispositionNodeRef == null) + { + return null; + } + WriteMode mode = determineWriteMode(recordNextDispositionActionDate, nextDispositionActionDate); + + return new NextActionFromDisposition(dispositionNodeRef, nextDispositionAction.getNodeRef(), + recordNextDispositionActionName, nextDispositionActionDate, mode); + } + + /** + * Determine what should be updated for an existing disposition schedule impl. We only update the date if the + * existing date is earlier than the calculated one. + * + * @param recordNextDispositionActionDate The next action date found on the record node (or folder node). + * @param nextDispositionActionDate The next action date calculated from the current disposition schedule(s) + * affecting the node. + * @return READ_ONLY if nothing should be updated, or DATE_ONLY if the date needs updating. + */ + private WriteMode determineWriteMode(Date recordNextDispositionActionDate, Date nextDispositionActionDate) + { + // Treat null dates as being the latest possible date. + Date maxDate = new Date(Long.MAX_VALUE); + Date recordDate = (recordNextDispositionActionDate != null ? recordNextDispositionActionDate : maxDate); + Date calculatedDate = (nextDispositionActionDate != null ? nextDispositionActionDate : maxDate); + + // We only need to update the date if the current one is too early. + if (recordDate.before(calculatedDate)) + { + return WriteMode.DATE_ONLY; + } + else + { + return WriteMode.READ_ONLY; + } + } + + /** + * Calculate first disposition action when the record doesn't have one + * @param recordFolders + * @return next disposition action and the associated disposition schedule + */ + private NextActionFromDisposition getFirstDispositionAction(NodeRef record, List recordFolders) + { + NodeRef newAction = null; + String newDispositionActionName = null; + // We're looking for the latest date, so start with a very early one. + Date newDispositionActionDateAsOf = new Date(Long.MIN_VALUE); + NodeRef dispositionNodeRef = null; + for (NodeRef folder : recordFolders) + { + NodeRef folderDS = getDispositionScheduleImpl(folder); + if (folderDS != null) + { + DispositionSchedule ds = new DispositionScheduleImpl(serviceRegistry, nodeService, folderDS); + List dispositionActionDefinitions = ds.getDispositionActionDefinitions(); + + if (dispositionActionDefinitions != null && dispositionActionDefinitions.size() > 0) + { + DispositionActionDefinition firstDispositionActionDef = dispositionActionDefinitions.get(0); + dispositionNodeRef = folderDS; + + if (newAction == null) + { + NodeRef recordOrFolder = record; + if (!ds.isRecordLevelDisposition()) + { + recordOrFolder = folder; + } + DispositionAction firstDispositionAction = initialiseDispositionAction(recordOrFolder, firstDispositionActionDef, true); + newAction = firstDispositionAction.getNodeRef(); + newDispositionActionName = (String)nodeService.getProperty(newAction, PROP_DISPOSITION_ACTION_NAME); + newDispositionActionDateAsOf = firstDispositionAction.getAsOfDate(); + } + else if (firstDispositionActionDef.getPeriod() != null) + { + Date firstActionDate = calculateAsOfDate(record, firstDispositionActionDef, true); + if (firstActionDate == null || (newDispositionActionDateAsOf != null + && newDispositionActionDateAsOf.before(firstActionDate))) + { + newDispositionActionName = firstDispositionActionDef.getName(); + newDispositionActionDateAsOf = firstActionDate; + if (firstActionDate == null) + { + // Treat null as the latest date possible, so there's no point searching further. + break; + } + } + } + } + } + } + if (newDispositionActionName == null || dispositionNodeRef == null || newAction == null) + { + return null; + } + return new NextActionFromDisposition(dispositionNodeRef, newAction, + newDispositionActionName, newDispositionActionDateAsOf, WriteMode.DATE_AND_NAME); + } } diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/NextActionFromDisposition.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/NextActionFromDisposition.java new file mode 100644 index 0000000000..377b0a23fd --- /dev/null +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/NextActionFromDisposition.java @@ -0,0 +1,80 @@ +package org.alfresco.module.org_alfresco_module_rm.disposition; + +import java.util.Date; + +import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionServiceImpl.WriteMode; +import org.alfresco.service.cmr.repository.NodeRef; + +public class NextActionFromDisposition +{ + public NextActionFromDisposition(NodeRef dispositionNodeRef, NodeRef nextActionNodeRef, String nextActionName, Date nextActionDateAsOf, + WriteMode writeMode) + { + super(); + this.dispositionNodeRef = dispositionNodeRef; + this.nextActionNodeRef = nextActionNodeRef; + this.nextActionName = nextActionName; + this.nextActionDateAsOf = nextActionDateAsOf; + this.writeMode = writeMode; + } + + private NodeRef dispositionNodeRef; + + private NodeRef nextActionNodeRef; + + private String nextActionName; + + private Date nextActionDateAsOf; + + private WriteMode writeMode; + + public WriteMode getWriteMode() + { + return writeMode; + } + + public void setWriteMode(WriteMode writeMode) + { + this.writeMode = writeMode; + } + + public NodeRef getNextActionNodeRef() + { + return nextActionNodeRef; + } + + public void setNextActionNodeRef(NodeRef nextActionNodeRef) + { + this.nextActionNodeRef = nextActionNodeRef; + } + + public NodeRef getDispositionNodeRef() + { + return dispositionNodeRef; + } + + public void setDispositionNodeRef(NodeRef dispositionNodeRef) + { + this.dispositionNodeRef = dispositionNodeRef; + } + + public String getNextActionName() + { + return nextActionName; + } + + public void setNextActionName(String nextActionName) + { + this.nextActionName = nextActionName; + } + + public Date getNextActionDateAsOf() + { + return nextActionDateAsOf; + } + + public void setNextActionDateAsOf(Date nextActionDateAsOf) + { + this.nextActionDateAsOf = nextActionDateAsOf; + } +} diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/property/DispositionProperty.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/property/DispositionProperty.java index 642efc6265..02ef7aabff 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/property/DispositionProperty.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/disposition/property/DispositionProperty.java @@ -18,6 +18,8 @@ */ package org.alfresco.module.org_alfresco_module_rm.disposition.property; +import static org.apache.commons.lang3.BooleanUtils.isNotTrue; + import java.io.Serializable; import java.util.Date; import java.util.Map; @@ -211,7 +213,11 @@ public class DispositionProperty extends BaseBehaviourBean // update asOf date on the disposition action based on the new property value NodeRef daNodeRef = dispositionAction.getNodeRef(); - nodeService.setProperty(daNodeRef, PROP_DISPOSITION_AS_OF, updatedAsOf); + // Don't overwrite a manually set "disposition as of" date. + if (isNotTrue((Boolean) nodeService.getProperty(daNodeRef, PROP_MANUALLY_SET_AS_OF))) + { + nodeService.setProperty(daNodeRef, PROP_DISPOSITION_AS_OF, updatedAsOf); + } } } } diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/model/RecordsManagementModel.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/model/RecordsManagementModel.java index 406282d648..d57cc2f818 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/model/RecordsManagementModel.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/model/RecordsManagementModel.java @@ -149,6 +149,8 @@ public interface RecordsManagementModel extends RecordsManagementCustomModel QName PROP_DISPOSITION_ACTION_ID = QName.createQName(RM_URI, "dispositionActionId"); QName PROP_DISPOSITION_ACTION = QName.createQName(RM_URI, "dispositionAction"); QName PROP_DISPOSITION_AS_OF = QName.createQName(RM_URI, "dispositionAsOf"); + /** A flag indicating that the "disposition as of" date has been manually set and shouldn't be changed. */ + QName PROP_MANUALLY_SET_AS_OF = QName.createQName(RM_URI, "manuallySetAsOf"); QName PROP_DISPOSITION_EVENTS_ELIGIBLE = QName.createQName(RM_URI, "dispositionEventsEligible"); QName PROP_DISPOSITION_ACTION_STARTED_AT = QName.createQName(RM_URI, "dispositionActionStartedAt"); QName PROP_DISPOSITION_ACTION_STARTED_BY = QName.createQName(RM_URI, "dispositionActionStartedBy"); diff --git a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/record/RecordServiceImpl.java b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/record/RecordServiceImpl.java index f4fa00e1d0..a523ac93b6 100644 --- a/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/record/RecordServiceImpl.java +++ b/rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/record/RecordServiceImpl.java @@ -34,12 +34,14 @@ import java.util.Set; import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.model.ContentModel; +import org.alfresco.module.org_alfresco_module_rm.RecordsManagementServiceRegistry; import org.alfresco.module.org_alfresco_module_rm.RecordsManagementPolicies.BeforeFileRecord; import org.alfresco.module.org_alfresco_module_rm.RecordsManagementPolicies.OnFileRecord; import org.alfresco.module.org_alfresco_module_rm.capability.Capability; import org.alfresco.module.org_alfresco_module_rm.capability.CapabilityService; import org.alfresco.module.org_alfresco_module_rm.capability.RMPermissionModel; import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionSchedule; +import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionScheduleImpl; import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService; import org.alfresco.module.org_alfresco_module_rm.dod5015.DOD5015Model; import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService; @@ -228,7 +230,7 @@ public class RecordServiceImpl extends BaseBehaviourBean /** recordable version service */ private RecordableVersionService recordableVersionService; - + /** list of available record meta-data aspects and the file plan types the are applicable to */ private Map> recordMetaDataAspects; @@ -385,7 +387,7 @@ public class RecordServiceImpl extends BaseBehaviourBean { this.recordableVersionService = recordableVersionService; } - + /** * Init method */ @@ -1732,7 +1734,10 @@ public class RecordServiceImpl extends BaseBehaviourBean private void validateLinkConditions(NodeRef record, NodeRef recordFolder) { // ensure that the linking record folders have compatible disposition schedules - DispositionSchedule recordDispositionSchedule = dispositionService.getDispositionSchedule(record); + + // get the origin disposition schedule for the record, not the calculated one + DispositionSchedule recordDispositionSchedule = dispositionService.getOriginDispositionSchedule(record); + if (recordDispositionSchedule != null) { DispositionSchedule recordFolderDispositionSchedule = dispositionService.getDispositionSchedule(recordFolder); diff --git a/rm-server/source/java/org/alfresco/repo/security/permissions/impl/RMPermissionServiceImpl.java b/rm-server/source/java/org/alfresco/repo/security/permissions/impl/RMPermissionServiceImpl.java index b859425123..4aa903c1c5 100644 --- a/rm-server/source/java/org/alfresco/repo/security/permissions/impl/RMPermissionServiceImpl.java +++ b/rm-server/source/java/org/alfresco/repo/security/permissions/impl/RMPermissionServiceImpl.java @@ -21,8 +21,10 @@ package org.alfresco.repo.security.permissions.impl; import static org.apache.commons.lang.StringUtils.isNotBlank; import java.io.Serializable; +import java.util.Arrays; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Set; import org.alfresco.module.org_alfresco_module_rm.capability.RMPermissionModel; @@ -30,6 +32,7 @@ import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService; import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; import org.alfresco.module.org_alfresco_module_rm.role.FilePlanRoleService; import org.alfresco.repo.cache.SimpleCache; + import org.alfresco.repo.security.permissions.AccessControlEntry; import org.alfresco.repo.security.permissions.AccessControlList; import org.alfresco.service.cmr.repository.NodeRef; @@ -42,6 +45,7 @@ import org.alfresco.util.PropertyCheck; import org.apache.commons.lang.StringUtils; import org.springframework.context.ApplicationEvent; + /** * Extends the core permission service implementation allowing the consideration of the read records * permission. @@ -56,6 +60,16 @@ public class RMPermissionServiceImpl extends PermissionServiceImpl /** Writers simple cache */ protected SimpleCache> writersCache; + /** + * Configured Permission mapping. + * + * These strings come from alfresco-global.properties and allow fine tuning of the how permissions are mapped. + * This was added as a fix for MNT-16852 to enhance compatibility with our Outlook Integration. + * + **/ + protected List configuredReadPermissions; + protected List configuredFilePermissions; + /** File plan service */ private FilePlanService filePlanService; @@ -97,6 +111,28 @@ public class RMPermissionServiceImpl extends PermissionServiceImpl this.writersCache = writersCache; } + /** + * Maps the string from the properties file (rm.haspermissionmap.read) + * to the list used in the hasPermission method + * + * @param readMapping the mapping of permissions to ReadRecord + */ + public void setConfiguredReadPermissions(String readMapping) + { + this.configuredReadPermissions = Arrays.asList(readMapping.split(",")); + } + + /** + * Maps the string set in the properties file (rm.haspermissionmap.write) + * to the list used in the hasPermission method + * + * @param fileMapping the mapping of permissions to FileRecord + */ + public void setConfiguredFilePermissions(String fileMapping) + { + this.configuredFilePermissions = Arrays.asList(fileMapping.split(",")); + } + /** * @see org.alfresco.repo.security.permissions.impl.PermissionServiceImpl#onBootstrap(org.springframework.context.ApplicationEvent) */ @@ -118,18 +154,18 @@ public class RMPermissionServiceImpl extends PermissionServiceImpl public AccessStatus hasPermission(NodeRef nodeRef, String perm) { AccessStatus acs = super.hasPermission(nodeRef, perm); + if (AccessStatus.DENIED.equals(acs) && - PermissionService.READ.equals(perm) && nodeService.hasAspect(nodeRef, RecordsManagementModel.ASPECT_FILE_PLAN_COMPONENT)) { - return super.hasPermission(nodeRef, RMPermissionModel.READ_RECORDS); - } - // Added ADD_CHILDREN check in for MNT-16852. - else if (AccessStatus.DENIED.equals(acs) && - (PermissionService.WRITE.equals(perm) || PermissionService.ADD_CHILDREN.equals(perm)) && - nodeService.hasAspect(nodeRef, RecordsManagementModel.ASPECT_FILE_PLAN_COMPONENT)) - { - return super.hasPermission(nodeRef, RMPermissionModel.FILE_RECORDS); + if (PermissionService.READ.equals(perm) || this.configuredReadPermissions.contains(perm)) + { + return super.hasPermission(nodeRef, RMPermissionModel.READ_RECORDS); + } + else if (PermissionService.WRITE.equals(perm) || this.configuredFilePermissions.contains(perm)) + { + return super.hasPermission(nodeRef, RMPermissionModel.FILE_RECORDS); + } } return acs; diff --git a/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java b/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java index 97f22265a3..f4df1ec148 100644 --- a/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java +++ b/rm-server/source/java/org/alfresco/repo/web/scripts/roles/DynamicAuthoritiesGet.java @@ -18,22 +18,22 @@ */ package org.alfresco.repo.web.scripts.roles; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.io.PrintWriter; -import java.io.StringWriter; -import java.io.Writer; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.io.Writer; import java.text.MessageFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; +import java.util.Set; + +import javax.servlet.http.HttpServletResponse; -import javax.servlet.http.HttpServletResponse; - import org.alfresco.model.ContentModel; import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; @@ -43,26 +43,26 @@ import org.alfresco.repo.domain.node.NodeDAO; import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; -import org.alfresco.repo.web.scripts.content.ContentStreamer; -import org.alfresco.service.cmr.model.FileFolderService; -import org.alfresco.service.cmr.model.FileInfo; +import org.alfresco.repo.web.scripts.content.ContentStreamer; +import org.alfresco.service.cmr.model.FileFolderService; +import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.security.PermissionService; import org.alfresco.service.namespace.QName; import org.alfresco.service.transaction.TransactionService; import org.alfresco.util.Pair; -import org.alfresco.util.TempFileProvider; +import org.alfresco.util.TempFileProvider; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.springframework.extensions.webscripts.AbstractWebScript; +import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.Cache; -import org.springframework.extensions.webscripts.Format; +import org.springframework.extensions.webscripts.Format; import org.springframework.extensions.webscripts.Status; -import org.springframework.extensions.webscripts.WebScriptException; +import org.springframework.extensions.webscripts.WebScriptException; import org.springframework.extensions.webscripts.WebScriptRequest; -import org.springframework.extensions.webscripts.WebScriptResponse; +import org.springframework.extensions.webscripts.WebScriptResponse; /** * Webscript used for removing dynamic authorities from the records. @@ -71,7 +71,7 @@ import org.springframework.extensions.webscripts.WebScriptResponse; * @since 2.3.0.7 */ @SuppressWarnings("deprecation") -public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel +public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel { private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0."; private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN"; @@ -80,7 +80,7 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM private static final String MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE = "Processing record {0} - BEGIN"; private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid."; private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory"; - private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist."; + private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist."; private static final String SUCCESS_STATUS = "success"; /** * The logger @@ -88,8 +88,8 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class); private static final String BATCH_SIZE = "batchsize"; private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords"; - private static final String PARAM_EXPORT = "export"; - private static final String PARAM_PARENT_NODE_REF = "parentNodeRef"; + private static final String PARAM_EXPORT = "export"; + private static final String PARAM_PARENT_NODE_REF = "parentNodeRef"; private static final String MODEL_STATUS = "responsestatus"; private static final String MODEL_MESSAGE = "message"; private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records."; @@ -105,60 +105,60 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM private PermissionService permissionService; private ExtendedSecurityService extendedSecurityService; private TransactionService transactionService; - /** Content Streamer */ - protected ContentStreamer contentStreamer; - private FileFolderService fileFolderService; - - /** service setters */ - public void setPatchDAO(PatchDAO patchDAO) - { - this.patchDAO = patchDAO; - } + /** Content Streamer */ + protected ContentStreamer contentStreamer; + private FileFolderService fileFolderService; - public void setNodeDAO(NodeDAO nodeDAO) - { - this.nodeDAO = nodeDAO; - } + /** service setters */ + public void setPatchDAO(PatchDAO patchDAO) + { + this.patchDAO = patchDAO; + } - public void setQnameDAO(QNameDAO qnameDAO) + public void setNodeDAO(NodeDAO nodeDAO) { - this.qnameDAO = qnameDAO; - } + this.nodeDAO = nodeDAO; + } - public void setNodeService(NodeService nodeService) + public void setQnameDAO(QNameDAO qnameDAO) { - this.nodeService = nodeService; + this.qnameDAO = qnameDAO; } - - public void setPermissionService(PermissionService permissionService) + + public void setNodeService(NodeService nodeService) { - this.permissionService = permissionService; - } - - public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) + this.nodeService = nodeService; + } + + public void setPermissionService(PermissionService permissionService) { - this.extendedSecurityService = extendedSecurityService; + this.permissionService = permissionService; } - - public void setTransactionService(TransactionService transactionService) - { - this.transactionService = transactionService; - } - - public void setContentStreamer(ContentStreamer contentStreamer) + + public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) { - this.contentStreamer = contentStreamer; + this.extendedSecurityService = extendedSecurityService; } - - public void setFileFolderService(FileFolderService fileFolderService) - { - this.fileFolderService = fileFolderService; - } - - protected Map buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException - { - Map model = new HashMap(); - final Long batchSize = getBatchSizeParameter(req); + + public void setTransactionService(TransactionService transactionService) + { + this.transactionService = transactionService; + } + + public void setContentStreamer(ContentStreamer contentStreamer) + { + this.contentStreamer = contentStreamer; + } + + public void setFileFolderService(FileFolderService fileFolderService) + { + this.fileFolderService = fileFolderService; + } + + protected Map buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException + { + Map model = new HashMap(); + final Long batchSize = getBatchSizeParameter(req); // get the max node id and the extended security aspect Long maxNodeId = patchDAO.getMaxAdmNodeID(); final Pair recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY); @@ -170,201 +170,201 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM return model; } - Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize); - - boolean attach = getExportParameter(req); - - File file = TempFileProvider.createTempFile("processedNodes_", ".csv"); - FileWriter writer = new FileWriter(file); - BufferedWriter out = new BufferedWriter(writer); - List processedNodes = new ArrayList(); - try - { - NodeRef parentNodeRef = getParentNodeRefParameter(req); - if (parentNodeRef != null) - { - processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair, - totalNumberOfRecordsToProcess.intValue(), out, attach); - } - else - { - processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess, - out, attach); - } - } - finally - { - out.close(); - } - - int processedNodesSize = processedNodes.size(); - - String message = ""; - if (totalNumberOfRecordsToProcess == 0 - || (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess)) - { - message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize); - } - if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize) - { - message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess); - } - model.put(MODEL_STATUS, SUCCESS_STATUS); - model.put(MODEL_MESSAGE, message); - logger.info(message); - - if (attach) - { - try - { - String fileName = file.getName(); - contentStreamer.streamContent(req, res, file, null, attach, fileName, model); - model = null; - } - finally - { - if (file != null) - { - file.delete(); - } - } - } - return model; - } - - /** - * Get export parameter from the request - * - * @param req - * @return - */ - protected boolean getExportParameter(WebScriptRequest req) - { - boolean attach = false; - String export = req.getParameter(PARAM_EXPORT); - if (export != null && Boolean.parseBoolean(export)) - { - attach = true; - } - return attach; - } - - /* - * (non-Javadoc) - * @see org.alfresco.repo.web.scripts.content.StreamContent#execute(org.springframework.extensions.webscripts. - * WebScriptRequest, org.springframework.extensions.webscripts.WebScriptResponse) - */ - @Override - public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException - { - // retrieve requested format - String format = req.getFormat(); - - try - { - String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format); + Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize); + + boolean attach = getExportParameter(req); + + File file = TempFileProvider.createTempFile("processedNodes_", ".csv"); + FileWriter writer = new FileWriter(file); + BufferedWriter out = new BufferedWriter(writer); + List processedNodes = new ArrayList(); + try + { + NodeRef parentNodeRef = getParentNodeRefParameter(req); + if (parentNodeRef != null) + { + processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair, + totalNumberOfRecordsToProcess.intValue(), out, attach); + } + else + { + processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess, + out, attach); + } + } + finally + { + out.close(); + } + + int processedNodesSize = processedNodes.size(); + + String message = ""; + if (totalNumberOfRecordsToProcess == 0 + || (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess)) + { + message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize); + } + if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize) + { + message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess); + } + model.put(MODEL_STATUS, SUCCESS_STATUS); + model.put(MODEL_MESSAGE, message); + logger.info(message); + + if (attach) + { + try + { + String fileName = file.getName(); + contentStreamer.streamContent(req, res, file, null, attach, fileName, model); + model = null; + } + finally + { + if (file != null) + { + file.delete(); + } + } + } + return model; + } + + /** + * Get export parameter from the request + * + * @param req + * @return + */ + protected boolean getExportParameter(WebScriptRequest req) + { + boolean attach = false; + String export = req.getParameter(PARAM_EXPORT); + if (export != null && Boolean.parseBoolean(export)) + { + attach = true; + } + return attach; + } + + /* + * (non-Javadoc) + * @see org.alfresco.repo.web.scripts.content.StreamContent#execute(org.springframework.extensions.webscripts. + * WebScriptRequest, org.springframework.extensions.webscripts.WebScriptResponse) + */ + @Override + public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException + { + // retrieve requested format + String format = req.getFormat(); + + try + { + String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format); if (mimetype == null) { throw new WebScriptException("Web Script format '" + format + "' is not registered"); - } - - // construct model for script / template - Status status = new Status(); - Cache cache = new Cache(getDescription().getRequiredCache()); - Map model = buildModel(req, res); - if (model == null) { return; } - model.put("status", status); - model.put("cache", cache); - - Map templateModel = createTemplateParameters(req, res, model); - - // render output - int statusCode = status.getCode(); - if (statusCode != HttpServletResponse.SC_OK && !req.forceSuccessStatus()) - { - if (logger.isDebugEnabled()) - { - logger.debug("Force success status header in response: " + req.forceSuccessStatus()); - logger.debug("Setting status " + statusCode); - } - res.setStatus(statusCode); - } - - // apply location - String location = status.getLocation(); - if (location != null && location.length() > 0) - { - if (logger.isDebugEnabled()) logger.debug("Setting location to " + location); - res.setHeader(WebScriptResponse.HEADER_LOCATION, location); - } - - // apply cache - res.setCache(cache); - - String callback = null; - if (getContainer().allowCallbacks()) - { - callback = req.getJSONCallback(); - } - if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) - { - if (logger.isDebugEnabled()) logger.debug("Rendering JSON callback response: content type=" - + Format.JAVASCRIPT.mimetype() + ", status=" + statusCode + ", callback=" + callback); - - // NOTE: special case for wrapping JSON results in a javascript function callback - res.setContentType(Format.JAVASCRIPT.mimetype() + ";charset=UTF-8"); - res.getWriter().write((callback + "(")); - } - else - { - if (logger.isDebugEnabled()) - logger.debug("Rendering response: content type=" + mimetype + ", status=" + statusCode); - - res.setContentType(mimetype + ";charset=UTF-8"); - } - - // render response according to requested format - renderFormatTemplate(format, templateModel, res.getWriter()); - - if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) - { - // NOTE: special case for wrapping JSON results in a javascript function callback - res.getWriter().write(")"); - } - } - catch (Throwable e) - { - if (logger.isDebugEnabled()) - { - StringWriter stack = new StringWriter(); - e.printStackTrace(new PrintWriter(stack)); - logger.debug("Caught exception; decorating with appropriate status template : " + stack.toString()); - } - - throw createStatusException(e, req, res); - } - } - - protected void renderFormatTemplate(String format, Map model, Writer writer) - { - format = (format == null) ? "" : format; - - String templatePath = getDescription().getId() + "." + format; - - if (logger.isDebugEnabled()) logger.debug("Rendering template '" + templatePath + "'"); - - renderTemplate(templatePath, model, writer); - } - - /** - * Obtain maximum of the records to be processed from the request if it is specified or bachsize value otherwise - * - * @param req - * @return maximum of the records to be processed from the request if it is specified or bachsize value otherwise - */ - protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize) - { - String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); + } + + // construct model for script / template + Status status = new Status(); + Cache cache = new Cache(getDescription().getRequiredCache()); + Map model = buildModel(req, res); + if (model == null) { return; } + model.put("status", status); + model.put("cache", cache); + + Map templateModel = createTemplateParameters(req, res, model); + + // render output + int statusCode = status.getCode(); + if (statusCode != HttpServletResponse.SC_OK && !req.forceSuccessStatus()) + { + if (logger.isDebugEnabled()) + { + logger.debug("Force success status header in response: " + req.forceSuccessStatus()); + logger.debug("Setting status " + statusCode); + } + res.setStatus(statusCode); + } + + // apply location + String location = status.getLocation(); + if (location != null && location.length() > 0) + { + if (logger.isDebugEnabled()) logger.debug("Setting location to " + location); + res.setHeader(WebScriptResponse.HEADER_LOCATION, location); + } + + // apply cache + res.setCache(cache); + + String callback = null; + if (getContainer().allowCallbacks()) + { + callback = req.getJSONCallback(); + } + if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) + { + if (logger.isDebugEnabled()) logger.debug("Rendering JSON callback response: content type=" + + Format.JAVASCRIPT.mimetype() + ", status=" + statusCode + ", callback=" + callback); + + // NOTE: special case for wrapping JSON results in a javascript function callback + res.setContentType(Format.JAVASCRIPT.mimetype() + ";charset=UTF-8"); + res.getWriter().write((callback + "(")); + } + else + { + if (logger.isDebugEnabled()) + logger.debug("Rendering response: content type=" + mimetype + ", status=" + statusCode); + + res.setContentType(mimetype + ";charset=UTF-8"); + } + + // render response according to requested format + renderFormatTemplate(format, templateModel, res.getWriter()); + + if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) + { + // NOTE: special case for wrapping JSON results in a javascript function callback + res.getWriter().write(")"); + } + } + catch (Throwable e) + { + if (logger.isDebugEnabled()) + { + StringWriter stack = new StringWriter(); + e.printStackTrace(new PrintWriter(stack)); + logger.debug("Caught exception; decorating with appropriate status template : " + stack.toString()); + } + + throw createStatusException(e, req, res); + } + } + + protected void renderFormatTemplate(String format, Map model, Writer writer) + { + format = (format == null) ? "" : format; + + String templatePath = getDescription().getId() + "." + format; + + if (logger.isDebugEnabled()) logger.debug("Rendering template '" + templatePath + "'"); + + renderTemplate(templatePath, model, writer); + } + + /** + * Obtain maximum of the records to be processed from the request if it is specified or bachsize value otherwise + * + * @param req + * @return maximum of the records to be processed from the request if it is specified or bachsize value otherwise + */ + protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize) + { + String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); //default total number of records to be processed to batch size value Long totalNumberOfRecordsToProcess = batchSize; if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr)) @@ -378,77 +378,77 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM //do nothing here, the value will remain 0L in this case } } - return totalNumberOfRecordsToProcess; - } - - /** - * Obtain batchsize parameter from the request. - * - * @param req - * @return batchsize parameter from the request - */ - protected Long getBatchSizeParameter(WebScriptRequest req) - { - String batchSizeStr = req.getParameter(BATCH_SIZE); - Long size = 0L; - if (StringUtils.isBlank(batchSizeStr)) - { - logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY); - throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_MANDATORY); - } - try - { - size = Long.parseLong(batchSizeStr); - if (size <= 0) - { - logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); - throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); - } - } - catch (NumberFormatException ex) - { - logger.info(MESSAGE_BATCHSIZE_IS_INVALID); - throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_INVALID); - } - return size; - } + return totalNumberOfRecordsToProcess; + } - /** - * Get parentNodeRef parameter from the request - * - * @param req - * @return - */ - protected NodeRef getParentNodeRefParameter(WebScriptRequest req) - { - String parentNodeRefStr = req.getParameter(PARAM_PARENT_NODE_REF); - NodeRef parentNodeRef = null; - if (StringUtils.isNotBlank(parentNodeRefStr)) - { - parentNodeRef = new NodeRef(parentNodeRefStr); - if(!nodeService.exists(parentNodeRef)) - { - String message = MessageFormat.format(MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE, parentNodeRef.toString()); - logger.info(message); - throw new WebScriptException(Status.STATUS_BAD_REQUEST, message); - } - } - return parentNodeRef; - } - - /** - * Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess - * parameters - * - * @param batchSize - * @param maxNodeId - * @param recordAspectPair - * @param totalNumberOfRecordsToProcess - * @return the list of processed nodes - */ - protected List processNodes(final Long batchSize, Long maxNodeId, final Pair recordAspectPair, - Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach) - { + /** + * Obtain batchsize parameter from the request. + * + * @param req + * @return batchsize parameter from the request + */ + protected Long getBatchSizeParameter(WebScriptRequest req) + { + String batchSizeStr = req.getParameter(BATCH_SIZE); + Long size = 0L; + if (StringUtils.isBlank(batchSizeStr)) + { + logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_MANDATORY); + } + try + { + size = Long.parseLong(batchSizeStr); + if (size <= 0) + { + logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO); + } + } + catch (NumberFormatException ex) + { + logger.info(MESSAGE_BATCHSIZE_IS_INVALID); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_INVALID); + } + return size; + } + + /** + * Get parentNodeRef parameter from the request + * + * @param req + * @return + */ + protected NodeRef getParentNodeRefParameter(WebScriptRequest req) + { + String parentNodeRefStr = req.getParameter(PARAM_PARENT_NODE_REF); + NodeRef parentNodeRef = null; + if (StringUtils.isNotBlank(parentNodeRefStr)) + { + parentNodeRef = new NodeRef(parentNodeRefStr); + if(!nodeService.exists(parentNodeRef)) + { + String message = MessageFormat.format(MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE, parentNodeRef.toString()); + logger.info(message); + throw new WebScriptException(Status.STATUS_BAD_REQUEST, message); + } + } + return parentNodeRef; + } + + /** + * Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess + * parameters + * + * @param batchSize + * @param maxNodeId + * @param recordAspectPair + * @param totalNumberOfRecordsToProcess + * @return the list of processed nodes + */ + protected List processNodes(final Long batchSize, Long maxNodeId, final Pair recordAspectPair, + Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach) + { final Long maxRecordsToProcess = totalNumberOfRecordsToProcess; final List processedNodes = new ArrayList(); logger.info(MESSAGE_PROCESSING_BEGIN); @@ -466,8 +466,8 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM public Void execute() throws Throwable { // get the nodes with the extended security aspect applied - List nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, - currentIndex + batchSize); + List nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, + currentIndex + batchSize); // process each one for (Long nodeId : nodeIds) @@ -482,79 +482,79 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM processNode(record); logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); processedNodes.add(record); - if (attach) - { - out.write(recordName); - out.write(","); - out.write(record.toString()); - out.write("\n"); + if (attach) + { + out.write(recordName); + out.write(","); + out.write(record.toString()); + out.write("\n"); } - } + } return null; } - }, false, // read only + }, false, // read only true); // requires new } logger.info(MESSAGE_PROCESSING_END); - return processedNodes; + return processedNodes; + } + + protected List processChildrenNodes(NodeRef parentNodeRef, final int batchSize, + final Pair recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out, + final boolean attach) + { + final List processedNodes = new ArrayList(); + final List children = fileFolderService.search(parentNodeRef, "*", /*filesSearch*/true, /*folderSearch*/true, /*includeSubfolders*/true); + logger.info(MESSAGE_PROCESSING_BEGIN); + // by batch size + for (int i = 0; i < children.size(); i += batchSize) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + final int currentIndex = i; + + transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback() + { + public Void execute() throws Throwable + { + List nodes = children.subList(currentIndex, Math.min(currentIndex + batchSize, children.size())); + // process each one + for (FileInfo node : nodes) + { + if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) + { + break; + } + NodeRef record = node.getNodeRef(); + if (nodeService.hasAspect(record, recordAspectPair.getSecond())) + { + String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName)); + processNode(record); + logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); + processedNodes.add(record); + if (attach) + { + out.write(recordName); + out.write(","); + out.write(record.toString()); + out.write("\n"); + } + } + } + + return null; + } + }, false, // read only + true); // requires new + } + logger.info(MESSAGE_PROCESSING_END); + return processedNodes; } - protected List processChildrenNodes(NodeRef parentNodeRef, final int batchSize, - final Pair recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out, - final boolean attach) - { - final List processedNodes = new ArrayList(); - final List children = fileFolderService.search(parentNodeRef, "*", /*filesSearch*/true, /*folderSearch*/true, /*includeSubfolders*/true); - logger.info(MESSAGE_PROCESSING_BEGIN); - // by batch size - for (int i = 0; i < children.size(); i += batchSize) - { - if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) - { - break; - } - final int currentIndex = i; - - transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback() - { - public Void execute() throws Throwable - { - List nodes = children.subList(currentIndex, Math.min(currentIndex + batchSize, children.size())); - // process each one - for (FileInfo node : nodes) - { - if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess) - { - break; - } - NodeRef record = node.getNodeRef(); - if (nodeService.hasAspect(record, recordAspectPair.getSecond())) - { - String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME); - logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName)); - processNode(record); - logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); - processedNodes.add(record); - if (attach) - { - out.write(recordName); - out.write(","); - out.write(record.toString()); - out.write("\n"); - } - } - } - - return null; - } - }, false, // read only - true); // requires new - } - logger.info(MESSAGE_PROCESSING_END); - return processedNodes; - } - /** * Process each node * @@ -575,20 +575,20 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM permissionService.clearPermission(nodeRef, ExtendedWriterDynamicAuthority.EXTENDED_WRITER); // if record then ... - if (nodeService.hasAspect(nodeRef, ASPECT_RECORD)) - { - Set readersKeySet = null; - if (readers != null) + if (nodeService.hasAspect(nodeRef, ASPECT_RECORD)) { - readersKeySet = readers.keySet(); - } - Set writersKeySet = null; - if (writers != null) - { - writersKeySet = writers.keySet(); - } + Set readersKeySet = null; + if (readers != null) + { + readersKeySet = readers.keySet(); + } + Set writersKeySet = null; + if (writers != null) + { + writersKeySet = writers.keySet(); + } // re-set extended security via API - extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet); + extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet); } } } diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/DispositionTestSuite.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/DispositionTestSuite.java index e1938d1d28..913b961fc1 100644 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/DispositionTestSuite.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/DispositionTestSuite.java @@ -32,7 +32,9 @@ import org.junit.runners.Suite.SuiteClasses; @SuiteClasses( { CutOffTest.class, - UpdateDispositionScheduleTest.class + MultipleSchedulesTest.class, + UpdateDispositionScheduleTest.class, + UpdateNextDispositionActionTest.class }) public class DispositionTestSuite { diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/MultipleSchedulesTest.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/MultipleSchedulesTest.java new file mode 100644 index 0000000000..f166c97a1f --- /dev/null +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/MultipleSchedulesTest.java @@ -0,0 +1,176 @@ +package org.alfresco.module.org_alfresco_module_rm.test.integration.disposition; + +import static org.alfresco.module.org_alfresco_module_rm.test.util.bdt.BehaviourTest.test; + +import java.io.Serializable; +import java.util.Calendar; +import java.util.Date; +import java.util.Map; + +import org.alfresco.model.ContentModel; +import org.alfresco.module.org_alfresco_module_rm.action.impl.CutOffAction; +import org.alfresco.module.org_alfresco_module_rm.action.impl.DestroyAction; +import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionSchedule; +import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService; +import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase; +import org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils; +import org.alfresco.module.org_alfresco_module_rm.test.util.bdt.BehaviourTest; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.namespace.QName; +import org.alfresco.util.ApplicationContextHelper; +import org.springframework.extensions.webscripts.GUID; + +import com.google.common.collect.ImmutableMap; + +public class MultipleSchedulesTest extends BaseRMTestCase +{ + /** A unique prefix for the constants in this test. */ + protected static final String TEST_PREFIX = MultipleSchedulesTest.class.getName() + GUID.generate() + "_"; + /** The name to use for the first category. */ + protected static final String CATEGORY_A_NAME = TEST_PREFIX + "CategoryA"; + /** The name to use for the folder within the first category. */ + protected static final String FOLDER_A_NAME = TEST_PREFIX + "FolderA"; + /** The name to use for the second category. */ + protected static final String CATEGORY_B_NAME = TEST_PREFIX + "CategoryB"; + /** The name to use for the folder within the second category. */ + protected static final String FOLDER_B_NAME = TEST_PREFIX + "FolderB"; + /** The name to use for the record. */ + protected static final String RECORD_NAME = TEST_PREFIX + "Record"; + + /** The internal disposition service is used to avoid permissions issues when updating the record. */ + private DispositionService internalDispositionService; + + /** The first category node. */ + private NodeRef categoryA; + /** The folder node within the first category. */ + private NodeRef folderA; + /** The second category node. */ + private NodeRef categoryB; + /** The folder node within the second category. */ + private NodeRef folderB; + /** The record node. */ + private NodeRef record; + + @Override + protected void setUp() throws Exception + { + super.setUp(); + + BehaviourTest.initBehaviourTests(retryingTransactionHelper); + + // Get the application context + applicationContext = ApplicationContextHelper.getApplicationContext(getConfigLocations()); + internalDispositionService = (DispositionService) applicationContext.getBean("dispositionService"); + + // Ensure different records are used for each test. + record = null; + } + + /** + * Create two categories each containing a folder. Set up a schedule on category A that applies to records (cutoff + * immediately, destroy immediately). Set up a schedule on category B that is the same, but with a week delay before + * destroy becomes eligible. + */ + private void setUpFilePlan() + { + // Only set up the file plan if it hasn't already been done. + if (categoryA != null) + { + return; + } + + // Create two categories. + categoryA = filePlanService.createRecordCategory(filePlan, CATEGORY_A_NAME); + categoryB = filePlanService.createRecordCategory(filePlan, CATEGORY_B_NAME); + // Create a disposition schedule for category A (Cut off immediately, then Destroy immediately). + DispositionSchedule dispSchedA = utils.createBasicDispositionSchedule(categoryA, "instructions", "authority", true, false); + Map cutOffParamsA = ImmutableMap.of(PROP_DISPOSITION_ACTION_NAME, CutOffAction.NAME, + PROP_DISPOSITION_DESCRIPTION, "description", + PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_IMMEDIATELY); + dispositionService.addDispositionActionDefinition(dispSchedA, cutOffParamsA); + Map destroyParamsA = ImmutableMap.of(PROP_DISPOSITION_ACTION_NAME, DestroyAction.NAME, + PROP_DISPOSITION_DESCRIPTION, "description", + PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_IMMEDIATELY); + dispositionService.addDispositionActionDefinition(dispSchedA, destroyParamsA); + // Create a disposition schedule for category B (Cut off immediately, then Destroy one week after cutoff). + DispositionSchedule dispSchedB = utils.createBasicDispositionSchedule(categoryB, "instructions", "authority", true, false); + Map cutOffParamsB = ImmutableMap.of(PROP_DISPOSITION_ACTION_NAME, CutOffAction.NAME, + PROP_DISPOSITION_DESCRIPTION, "description", + PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_IMMEDIATELY); + dispositionService.addDispositionActionDefinition(dispSchedB, cutOffParamsB); + Map destroyParamsB = ImmutableMap.of(PROP_DISPOSITION_ACTION_NAME, DestroyAction.NAME, + PROP_DISPOSITION_DESCRIPTION, "description", + PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_ONE_WEEK, + PROP_DISPOSITION_PERIOD_PROPERTY, PROP_CUT_OFF_DATE); + dispositionService.addDispositionActionDefinition(dispSchedB, destroyParamsB); + // Create a folder within each category. + folderA = recordFolderService.createRecordFolder(categoryA, FOLDER_A_NAME); + folderB = recordFolderService.createRecordFolder(categoryB, FOLDER_B_NAME); + } + + /** + * RM-2526 + *

+     * Given a record subject to a disposition schedule
+     * And it is linked to a disposition schedule with the same step order, but a longer destroy step
+     * When the record is moved onto the destroy step
+     * Then the "as of" date is calculated using the longer period.
+     * 
+ */ + public void testLinkedToLongerSchedule() + { + Calendar calendar = Calendar.getInstance(); + test() + .given(() -> { + setUpFilePlan(); + // Create a record filed under category A and linked to category B. + record = fileFolderService.create(folderA, RECORD_NAME, ContentModel.TYPE_CONTENT).getNodeRef(); + recordService.link(record, folderB); + }) + .when(() -> { + // Cut off the record. + dispositionService.cutoffDisposableItem(record); + // Ensure the update has been applied to the record. + internalDispositionService.updateNextDispositionAction(record); + calendar.setTime((Date) nodeService.getProperty(record, PROP_CUT_OFF_DATE)); + calendar.add(Calendar.WEEK_OF_YEAR, 1); + }) + .then() + .expect(calendar.getTime()) + .from(() -> dispositionService.getNextDispositionAction(record).getAsOfDate()) + .because("Record should follow largest rentention schedule period, which is one week."); + } + + /** + * RM-2526 + *

+     * Given a record subject to a disposition schedule
+     * And it is linked to a disposition schedule with the same step order, but a shorter destroy step
+     * When the record is moved onto the destroy step
+     * Then the "as of" date is calculated using the longer period.
+     * 
+ */ + public void testLinkedToShorterSchedule() + { + Calendar calendar = Calendar.getInstance(); + test() + .given(() -> { + setUpFilePlan(); + // Create a record filed under category B and linked to category A. + record = fileFolderService.create(folderB, RECORD_NAME, ContentModel.TYPE_CONTENT).getNodeRef(); + recordService.link(record, folderA); + }) + .when(() -> { + // Cut off the record. + dispositionService.cutoffDisposableItem(record); + // Ensure the update has been applied to the record. + internalDispositionService.updateNextDispositionAction(record); + calendar.setTime((Date) nodeService.getProperty(record, PROP_CUT_OFF_DATE)); + calendar.add(Calendar.WEEK_OF_YEAR, 1); + }) + .then() + .expect(calendar.getTime()) + .from(() -> dispositionService.getNextDispositionAction(record).getAsOfDate()) + .because("Record should follow largest rentention schedule period, which is one week."); + } +} diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/UpdateNextDispositionActionTest.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/UpdateNextDispositionActionTest.java new file mode 100644 index 0000000000..d8dbb0b078 --- /dev/null +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/disposition/UpdateNextDispositionActionTest.java @@ -0,0 +1,136 @@ +/* + * Copyright (C) 2005-2014 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ +package org.alfresco.module.org_alfresco_module_rm.test.integration.disposition; + +import static org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils.DEFAULT_DISPOSITION_DESCRIPTION; +import static org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils.DEFAULT_DISPOSITION_INSTRUCTIONS; +import static org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils.DEFAULT_EVENT_NAME; +import static org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils.PERIOD_ONE_WEEK; +import static org.alfresco.util.GUID.generate; + +import java.io.Serializable; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; + +import org.alfresco.module.org_alfresco_module_rm.action.impl.CutOffAction; +import org.alfresco.module.org_alfresco_module_rm.action.impl.DestroyAction; +import org.alfresco.module.org_alfresco_module_rm.action.impl.EditDispositionActionAsOfDateAction; +import org.alfresco.module.org_alfresco_module_rm.action.impl.TransferAction; +import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionSchedule; +import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase; +import org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.namespace.QName; + +/** +* Update next disposition step integration tests. +* +* @author Roxana Lucanu +* @since 2.3.1 +*/ +public class UpdateNextDispositionActionTest extends BaseRMTestCase +{ + /** + * Given a record with multiple dispositions + * When updating the next step + * Then the action is available + *

+ * relates to https://issues.alfresco.com/jira/browse/RM-3060 + */ + public void testUpdateNextDispositionAction_RM3060() throws Exception + { + doBehaviourDrivenTest(new BehaviourDrivenTest() + { + NodeRef record; + NodeRef folder2; + + @Override + public void given() + { + // create category1 + NodeRef category1 = filePlanService.createRecordCategory(filePlan, generate()); + + // create disposition schedule for category1 + createDispositionSchedule(category1); + + // create category2 + NodeRef category2 = filePlanService.createRecordCategory(filePlan, generate()); + + // create disposition schedule for category2 + createDispositionSchedule(category2); + + // create folder2 inside category2 + folder2 = recordFolderService.createRecordFolder(category2, generate()); + + // create folder1 inside category1 + NodeRef folder1 = recordFolderService.createRecordFolder(category1, generate()); + + // create record inside folder1 + record = utils.createRecord(folder1, generate(), generate()); + + } + @Override + public void when() throws Exception + { + // link the record to folder2 + recordService.link(record, folder2); + + // complete record + utils.completeRecord(record); + + // cut off + rmActionService.executeRecordsManagementAction(record, CutOffAction.NAME, null); + } + + @Override + public void then() throws Exception + { + assertTrue("Record " + record + " doesn't have the cutOff aspect.", nodeService.hasAspect(record, ASPECT_CUT_OFF)); + } + }); + } + + private void createDispositionSchedule(NodeRef category) + { + DispositionSchedule ds = utils.createDispositionSchedule(category, DEFAULT_DISPOSITION_INSTRUCTIONS, DEFAULT_DISPOSITION_DESCRIPTION, true, false, false); + + // create the properties for CUTOFF action and add it to the disposition action definition + Map cutOff = new HashMap(3); + cutOff.put(PROP_DISPOSITION_ACTION_NAME, CutOffAction.NAME); + cutOff.put(PROP_DISPOSITION_DESCRIPTION, generate()); + cutOff.put(PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_IMMEDIATELY); + dispositionService.addDispositionActionDefinition(ds, cutOff); + + // create the properties for TRANSFER action and add it to the disposition action definition + Map transfer = new HashMap(3); + transfer.put(PROP_DISPOSITION_ACTION_NAME, TransferAction.NAME); + transfer.put(PROP_DISPOSITION_DESCRIPTION, generate()); + transfer.put(PROP_DISPOSITION_EVENT, (Serializable)Collections.singletonList(DEFAULT_EVENT_NAME)); + dispositionService.addDispositionActionDefinition(ds, transfer); + + // create the properties for DESTROY action and add it to the disposition action definition + Map destroy = new HashMap(3); + destroy.put(PROP_DISPOSITION_ACTION_NAME, DestroyAction.NAME); + destroy.put(PROP_DISPOSITION_DESCRIPTION, generate()); + destroy.put(PROP_DISPOSITION_PERIOD, PERIOD_ONE_WEEK); + dispositionService.addDispositionActionDefinition(ds, destroy); + } +} \ No newline at end of file diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/IssueTestSuite.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/IssueTestSuite.java index 5e5dd27b0a..30ff940467 100755 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/IssueTestSuite.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/IssueTestSuite.java @@ -42,7 +42,6 @@ import org.junit.runners.Suite.SuiteClasses; RM452Test.class, RM804Test.class, RM994Test.class, - RM1039Test.class, RM1799Test.class, RM1814Test.class, RM978Test.class, diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM1039Test.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM1039Test.java deleted file mode 100644 index 8ef86eef6e..0000000000 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/issue/RM1039Test.java +++ /dev/null @@ -1,186 +0,0 @@ -/* - * Copyright (C) 2005-2013 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.module.org_alfresco_module_rm.test.integration.issue; - -import java.io.Serializable; -import java.util.HashMap; -import java.util.Map; - -import net.sf.acegisecurity.vote.AccessDecisionVoter; - -import org.alfresco.model.ContentModel; -import org.alfresco.module.org_alfresco_module_rm.action.impl.CompleteEventAction; -import org.alfresco.module.org_alfresco_module_rm.action.impl.CutOffAction; -import org.alfresco.module.org_alfresco_module_rm.capability.Capability; -import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionAction; -import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase; -import org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils; -import org.alfresco.service.cmr.repository.NodeRef; - - -/** - * Unit test for RM-1039 ... can't move a folder into a category with a disposition schedule - * - * @author Roy Wetherall - * @since 2.1 - */ -public class RM1039Test extends BaseRMTestCase -{ - @Override - protected boolean isRecordTest() - { - return true; - } - - // try and move a folder from no disposition schedule to a disposition schedule - public void testMoveRecordFolderFromNoDisToDis() throws Exception - { - final NodeRef recordFolder = doTestInTransaction(new Test() - { - @Override - public NodeRef run() - { - // create a record category (no disposition schedule) - NodeRef recordCategory = filePlanService.createRecordCategory(filePlan, "Caitlin Reed"); - - // create a record folder - return recordFolderService.createRecordFolder(recordCategory, "Grace Wetherall"); - } - - @Override - public void test(NodeRef result) throws Exception - { - assertNotNull(result); - assertNull(dispositionService.getDispositionSchedule(result)); - assertFalse(nodeService.hasAspect(result, ASPECT_DISPOSITION_LIFECYCLE)); - } - }); - - final NodeRef record = doTestInTransaction(new Test() - { - @Override - public NodeRef run() - { - // create a record - return fileFolderService.create(recordFolder, "mytest.txt", ContentModel.TYPE_CONTENT).getNodeRef(); - } - - @Override - public void test(NodeRef result) throws Exception - { - assertNotNull(result); - assertNull(dispositionService.getDispositionSchedule(result)); - assertFalse(nodeService.hasAspect(result, ASPECT_DISPOSITION_LIFECYCLE)); - } - }); - - doTestInTransaction(new Test() - { - @Override - public NodeRef run() throws Exception - { - Capability capability = capabilityService.getCapability("CreateModifyDestroyFolders"); - assertEquals(AccessDecisionVoter.ACCESS_GRANTED, capability.evaluate(recordFolder)); - assertEquals(AccessDecisionVoter.ACCESS_GRANTED, capability.evaluate(recordFolder, rmContainer)); - - // take a look at the move capability - Capability moveCapability = capabilityService.getCapability("Move"); - assertEquals(AccessDecisionVoter.ACCESS_GRANTED, moveCapability.evaluate(recordFolder, rmContainer)); - - // move the node - return fileFolderService.move(recordFolder, rmContainer, null).getNodeRef(); - } - - @Override - public void test(NodeRef result) throws Exception - { - assertNotNull(result); - assertNotNull(dispositionService.getDispositionSchedule(result)); - assertTrue(nodeService.hasAspect(result, ASPECT_DISPOSITION_LIFECYCLE)); - - DispositionAction dispositionAction = dispositionService.getNextDispositionAction(result); - assertNotNull(dispositionAction); - - assertNull(dispositionAction.getAsOfDate()); - assertEquals("cutoff", dispositionAction.getName()); - assertEquals(1, dispositionAction.getEventCompletionDetails().size()); - - // take a look at the record and check things are as we would expect - assertFalse(nodeService.hasAspect(record, ASPECT_DISPOSITION_LIFECYCLE)); - } - }); - } - - // move from a disposition schedule to another .. both record folder level - - // move from a disposition schedule to another .. from record to folder level - - - // try and move a cutoff folder - public void testMoveCutoffRecordFolder() throws Exception - { - final NodeRef destination = doTestInTransaction(new Test() - { - @Override - public NodeRef run() - { - // create a record category (no disposition schedule) - return filePlanService.createRecordCategory(filePlan, "Caitlin Reed"); - } - }); - - final NodeRef testFolder = doTestInTransaction(new Test() - { - @Override - public NodeRef run() - { - // create folder - NodeRef testFolder = recordFolderService.createRecordFolder(rmContainer, "Peter Edward Francis"); - - // complete event - Map params = new HashMap(1); - params.put(CompleteEventAction.PARAM_EVENT_NAME, CommonRMTestUtils.DEFAULT_EVENT_NAME); - rmActionService.executeRecordsManagementAction(testFolder, CompleteEventAction.NAME, params); - - // cutoff folder - rmActionService.executeRecordsManagementAction(testFolder, CutOffAction.NAME); - - return testFolder; - } - - @Override - public void test(NodeRef result) throws Exception - { - // take a look at the move capability - Capability moveCapability = capabilityService.getCapability("Move"); - assertEquals(AccessDecisionVoter.ACCESS_DENIED, moveCapability.evaluate(result, destination)); - - } - }); - - doTestInTransaction(new FailureTest() - { - @Override - public void run() throws Exception - { - fileFolderService.move(testFolder, destination, null).getNodeRef(); - } - }); - } -} diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/recordfolder/MoveRecordFolderTest.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/recordfolder/MoveRecordFolderTest.java index d8ecd511ed..328a31b5e6 100644 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/recordfolder/MoveRecordFolderTest.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/integration/recordfolder/MoveRecordFolderTest.java @@ -384,9 +384,12 @@ public class MoveRecordFolderTest extends BaseRMTestCase } }); } - - - // try and move a cutoff folder + + /** + * Try and move a cutoff folder + * + * @see https://issues.alfresco.com/jira/browse/RM-1039 + */ public void testMoveCutoffRecordFolder() throws Exception { final NodeRef destination = doTestInTransaction(new Test() diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/legacy/service/DispositionServiceImplTest.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/legacy/service/DispositionServiceImplTest.java index e3853bfc30..29ab764296 100644 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/legacy/service/DispositionServiceImplTest.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/legacy/service/DispositionServiceImplTest.java @@ -657,7 +657,7 @@ public class DispositionServiceImplTest extends BaseRMTestCase checkDisposableItemChanged(mhRecordFolder42); checkDisposableItemChanged(record43); checkDisposableItemUnchanged(mhRecordFolder44); - checkDisposableItemUnchanged(record45);; + checkDisposableItemUnchanged(record45); } }); diff --git a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/util/CommonRMTestUtils.java b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/util/CommonRMTestUtils.java index 275dc089da..c7b0d2ef99 100644 --- a/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/util/CommonRMTestUtils.java +++ b/rm-server/test/java/org/alfresco/module/org_alfresco_module_rm/test/util/CommonRMTestUtils.java @@ -76,6 +76,7 @@ public class CommonRMTestUtils implements RecordsManagementModel public static final String DEFAULT_EVENT_NAME = "case_closed"; public static final String PERIOD_NONE = "none|0"; public static final String PERIOD_IMMEDIATELY = "immediately|0"; + public static final String PERIOD_ONE_WEEK = "week|1"; public static final String PERIOD_ONE_YEAR = "year|1"; public static final String PERIOD_THREE_YEARS = "year|3"; diff --git a/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/record/RecordServiceImplUnitTest.java b/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/record/RecordServiceImplUnitTest.java index 76b0f0cf11..38e0a3e4f6 100755 --- a/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/record/RecordServiceImplUnitTest.java +++ b/rm-server/unit-test/java/org/alfresco/module/org_alfresco_module_rm/record/RecordServiceImplUnitTest.java @@ -263,7 +263,7 @@ public class RecordServiceImplUnitTest extends BaseUnitTest DispositionSchedule recordDispositionSchedule = mock(DispositionSchedule.class); when(recordDispositionSchedule.isRecordLevelDisposition()) .thenReturn(true); - when(mockedDispositionService.getDispositionSchedule(record)) + when(mockedDispositionService.getOriginDispositionSchedule(record)) .thenReturn(recordDispositionSchedule); DispositionSchedule recordFolderDispositionSchedule = mock(DispositionSchedule.class);