RM-4247 Merge release/V2.3 into feature-2.3/RM-4247_UpdateDispositionProperty.

This commit is contained in:
Tom Page
2016-11-04 11:09:17 +00:00
28 changed files with 1600 additions and 1155 deletions

20
pom.xml
View File

@@ -4,7 +4,7 @@
<groupId>org.alfresco</groupId> <groupId>org.alfresco</groupId>
<artifactId>alfresco-rm-parent</artifactId> <artifactId>alfresco-rm-parent</artifactId>
<packaging>pom</packaging> <packaging>pom</packaging>
<version>2.3.1-SNAPSHOT</version> <version>2.3.2-SNAPSHOT</version>
<name>Alfresco Records Management</name> <name>Alfresco Records Management</name>
<url>http://www.alfresco.org/</url> <url>http://www.alfresco.org/</url>
@@ -60,9 +60,27 @@
</snapshotRepository> </snapshotRepository>
</distributionManagement> </distributionManagement>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-platform-distribution</artifactId>
<version>5.0.2</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.reflections</groupId>
<artifactId>reflections</artifactId>
<version>0.9.10</version>
</dependency>
</dependencies>
</dependencyManagement>
<modules> <modules>
<module>rm-server</module> <module>rm-server</module>
<module>rm-share</module> <module>rm-share</module>
<module>rm-automation</module>
</modules> </modules>
<properties> <properties>

View File

@@ -1,312 +1,360 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>org.alfresco</groupId> <modelVersion>4.0.0</modelVersion>
<artifactId>alfresco-rm-parent</artifactId> <artifactId>alfresco-rm-automation</artifactId>
<version>2.3.1-SNAPSHOT</version> <name>Alfresco Records Management Automation</name>
</parent>
<modelVersion>4.0.0</modelVersion> <parent>
<artifactId>alfresco-rm-automation</artifactId> <groupId>org.alfresco</groupId>
<properties> <artifactId>alfresco-rm-parent</artifactId>
<selenium.version>2.43.1</selenium.version> <version>2.3.2-SNAPSHOT</version>
<spring.version>4.0.5.RELEASE</spring.version> </parent>
</properties>
<build> <properties>
<plugins> <selenium.version>2.45.0</selenium.version>
<!-- Additional source folder to be added: source/compatibility --> <spring.version>4.0.5.RELEASE</spring.version>
<maven.build.sourceVersion>1.8</maven.build.sourceVersion>
<suiteXmlFile>testng.xml</suiteXmlFile>
<skip.automationtests>true</skip.automationtests>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<executions>
<execution>
<id>add-test-source</id>
<goals>
<goal>add-test-source</goal>
</goals>
<configuration>
<sources>
<source>src/unit-test/java</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<redirectTestOutputToFile>false</redirectTestOutputToFile>
<suiteXmlFiles>
<suiteXmlFile>${project.build.testOutputDirectory}/${suiteXmlFile}</suiteXmlFile>
</suiteXmlFiles>
<skipTests>${skip.automationtests}</skipTests>
</configuration>
</plugin>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>default-cli</id>
<configuration>
<target>
<echo>Stopping Alfresco...</echo>
<exec executable="${basedir}/target/alf-installation/alfresco.sh" dir="target/alf-installation" failonerror="true">
<arg value="stop" />
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>license-maven-plugin</artifactId>
<configuration>
<licenseName>alfresco_enterprise</licenseName>
<licenseResolver>file:${project.parent.basedir}/license</licenseResolver>
<descriptionTemplate>${project.parent.basedir}/license/description.ftl</descriptionTemplate>
</configuration>
</plugin>
</plugins>
<pluginManagement>
<plugins>
<plugin> <plugin>
<groupId>org.codehaus.mojo</groupId> <groupId>org.eclipse.m2e</groupId>
<artifactId>build-helper-maven-plugin</artifactId> <artifactId>lifecycle-mapping</artifactId>
<executions> <version>1.0.0</version>
<execution> <configuration>
<id>add-test-source</id> <lifecycleMappingMetadata>
<pluginExecutions>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.codehaus.mojo</groupId>
<artifactId>license-maven-plugin</artifactId>
<versionRange>[1.8,)</versionRange>
<goals>
<goal>update-file-header</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore />
</action>
</pluginExecution>
</pluginExecutions>
</lifecycleMappingMetadata>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
<dependencies>
<dependency>
<groupId>org.alfresco.test</groupId>
<artifactId>dataprep</artifactId>
<version>1.8</version>
</dependency>
<dependency>
<groupId>org.alfresco.test</groupId>
<artifactId>alfresco-testng</artifactId>
<version>1.1</version>
</dependency>
<dependency>
<groupId>org.alfresco</groupId>
<artifactId>selenium-grid</artifactId>
<version>1.8</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${spring.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.8.8</version>
</dependency>
<dependency>
<groupId>ru.yandex.qatools.htmlelements</groupId>
<artifactId>htmlelements-all</artifactId>
<version>1.15</version>
</dependency>
<dependency>
<groupId>ru.yandex.qatools.properties</groupId>
<artifactId>properties-loader</artifactId>
<version>1.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.github.tomakehurst</groupId>
<artifactId>wiremock</artifactId>
<version>1.56</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jul-to-slf4j</artifactId>
<version>1.7.21</version>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
<profile>
<id>install-alfresco</id>
<build>
<plugins>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>fetch-installer</id>
<phase>generate-test-resources</phase>
<goals> <goals>
<goal>add-test-source</goal> <goal>run</goal>
</goals> </goals>
<configuration> <configuration>
<sources> <target>
<source>src/unit-test/java</source> <echo>Recreating database...</echo>
</sources> <sql driver="org.postgresql.Driver" url="jdbc:postgresql:template1" userid="alfresco" password="alfresco" autocommit="true">drop database if exists alfresco; create database alfresco</sql>
<echo>Downloading Alfresco installer...</echo>
<get src="https://releases.alfresco.com/${installer.path}" dest="target/alf-installer.bin" />
<chmod file="target/alf-installer.bin" perm="a+x" verbose="true" />
<echo>Installing Alfresco...</echo>
<exec executable="${basedir}/target/alf-installer.bin" dir="target" failonerror="true">
<arg line="--mode unattended --alfresco_admin_password admin --disable-components postgres,alfrescowcmqs --jdbc_username alfresco --jdbc_password alfresco --prefix ${basedir}/target/alf-installation" />
</exec>
</target>
</configuration> </configuration>
</execution> </execution>
</executions> </executions>
</plugin> <dependencies>
<plugin> <dependency>
<artifactId>maven-surefire-plugin</artifactId> <groupId>org.apache.ant</groupId>
<configuration> <artifactId>ant-jsch</artifactId>
<redirectTestOutputToFile>false</redirectTestOutputToFile> <version>1.8.2</version>
<properties> </dependency>
<property> <dependency>
<name>usedefaultlisteners</name> <groupId>postgresql</groupId>
<value>false</value> <artifactId>postgresql</artifactId>
</property> <version>9.1-901-1.jdbc4</version>
<property> </dependency>
<name>listener</name> </dependencies>
<value>org.uncommons.reportng.HTMLReporter, org.uncommons.reportng.JUnitXMLReporter</value> </plugin>
</property> <plugin>
</properties> <artifactId>maven-dependency-plugin</artifactId>
<suiteXmlFiles> <executions>
<suiteXmlFile>${project.build.testOutputDirectory}/testng.xml</suiteXmlFile> <execution>
</suiteXmlFiles> <id>fetch-amps</id>
</configuration> <phase>process-test-resources</phase>
</plugin> <goals>
<plugin> <goal>copy</goal>
<!-- Configuration triggered by mvn antrun:run, used by Bamboo to stop server --> </goals>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>default-cli</id>
<configuration> <configuration>
<target> <artifactItems>
<echo>Stopping Alfresco...</echo> <artifactItem>
<exec executable="${basedir}/target/alf-installation/alfresco.sh" dir="target/alf-installation" failonerror="true"> <groupId>org.alfresco</groupId>
<arg value="stop" /> <artifactId>alfresco-rm-share</artifactId>
</exec> <version>${project.version}</version>
</target> <type>amp</type>
<classifier>amp</classifier>
</artifactItem>
<artifactItem>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-rm-server</artifactId>
<version>${project.version}</version>
<type>amp</type>
<classifier>amp</classifier>
</artifactItem>
</artifactItems>
<outputDirectory>${project.build.directory}/amps</outputDirectory>
<useBaseVersion>true</useBaseVersion>
</configuration> </configuration>
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
</plugins> <plugin>
</build> <groupId>org.alfresco.maven.plugin</groupId>
<dependencies> <artifactId>alfresco-maven-plugin</artifactId>
<dependency> <extensions>true</extensions>
<groupId>org.alfresco</groupId> <executions>
<artifactId>webdrone</artifactId> <execution>
<version>2.6.1</version> <id>install-server-amp</id>
</dependency> <goals>
<dependency> <goal>install</goal>
<groupId>org.seleniumhq.selenium</groupId> </goals>
<artifactId>selenium-java</artifactId> <phase>process-test-resources</phase>
<version>${selenium.version}</version>
</dependency>
<dependency>
<groupId>org.seleniumhq.selenium</groupId>
<artifactId>selenium-server</artifactId>
<version>${selenium.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${spring.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.8.8</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.uncommons</groupId>
<artifactId>reportng</artifactId>
<version>1.1.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>ru.yandex.qatools.htmlelements</groupId>
<artifactId>htmlelements-all</artifactId>
<version>1.12</version>
</dependency>
<dependency>
<groupId>ru.yandex.qatools.properties</groupId>
<artifactId>properties-loader</artifactId>
<version>1.5</version>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
<profile>
<id>install-alfresco</id>
<build>
<plugins>
<!-- Download and install the latest enterprise alfresco installer -->
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>fetch-installer</id>
<phase>generate-test-resources</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<echo>Recreating database...</echo>
<sql driver="org.postgresql.Driver" url="jdbc:postgresql:template1" userid="alfresco" password="alfresco" autocommit="true">drop database if exists alfresco; create database alfresco</sql>
<echo>Downloading Alfresco installer...</echo>
<sshexec username="tomcat" host="pbld01.alfresco.com" keyfile="${user.home}/.ssh/id_rsa" outputproperty="installerPath" command="ls -rt ${enterprise.installer.path} | tail -1 | tr ' ' '?' " />
<scp remoteFile="tomcat@pbld01.alfresco.com:${installerPath}" localTofile="target/alf-installer.bin" keyfile="${user.home}/.ssh/id_rsa" />
<chmod file="target/alf-installer.bin" perm="a+x" verbose="true" />
<echo>Installing Alfresco...</echo>
<exec executable="${basedir}/target/alf-installer.bin" dir="target" failonerror="true">
<arg line="--mode unattended --alfresco_admin_password admin --disable-components postgres,alfrescowcmqs --jdbc_username alfresco --jdbc_password alfresco --prefix ${basedir}/target/alf-installation" />
</exec>
</target>
</configuration>
</execution>
</executions>
<dependencies>
<dependency>
<groupId>org.apache.ant</groupId>
<artifactId>ant-jsch</artifactId>
<version>1.8.2</version>
</dependency>
<dependency>
<groupId>postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>9.1-901-1.jdbc4</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>fetch-amps</id>
<phase>process-test-resources</phase>
<goals>
<goal>copy</goal>
</goals>
<configuration>
<artifactItems>
<artifactItem>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-rm-share</artifactId>
<version>${project.version}</version>
<classifier>amp</classifier>
<type>amp</type>
</artifactItem>
<artifactItem>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-rm-server</artifactId>
<version>${project.version}</version>
<classifier>amp</classifier>
<type>amp</type>
</artifactItem>
</artifactItems>
<outputDirectory>${project.build.directory}/amps</outputDirectory>
<useBaseVersion>true</useBaseVersion>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.alfresco.maven.plugin</groupId>
<artifactId>alfresco-maven-plugin</artifactId>
<extensions>true</extensions>
<executions>
<execution>
<id>install-server-amp</id>
<goals>
<goal>install</goal>
</goals>
<phase>process-test-resources</phase>
<configuration>
<backup>true</backup>
<ampLocation>${project.build.directory}/amps/alfresco-rm-server-${project.version}-amp.amp</ampLocation>
<warLocation>${project.build.directory}/alf-installation/tomcat/webapps/alfresco.war</warLocation>
<classifier>amp</classifier>
</configuration>
</execution>
<execution>
<id>install-share-amp</id>
<goals>
<goal>install</goal>
</goals>
<phase>process-test-resources</phase>
<configuration>
<backup>true</backup>
<ampLocation>${project.build.directory}/amps/alfresco-rm-share-${project.version}-amp.amp</ampLocation>
<warLocation>${project.build.directory}/alf-installation/tomcat/webapps/share.war</warLocation>
<classifier>amp</classifier>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>run-alfresco</id>
<build>
<plugins>
<!-- Fetch JaCoCo agent and set the argLine property accordingly -->
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.6.3.201306030806</version>
<executions>
<execution>
<id>prepare-jacoco</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
</executions>
<configuration> <configuration>
<includes> <backup>true</backup>
<include>org.alfresco.*</include> <ampLocation>${project.build.directory}/amps/alfresco-rm-server-${project.version}-amp.amp</ampLocation>
</includes> <warLocation>${project.build.directory}/alf-installation/tomcat/webapps/alfresco.war</warLocation>
</configuration> </configuration>
</plugin> </execution>
<!-- Starts/stop the installed Alfresco --> <execution>
<plugin> <id>install-share-amp</id>
<artifactId>maven-antrun-plugin</artifactId> <goals>
<executions> <goal>install</goal>
<execution> </goals>
<id>start-alfresco</id> <phase>process-test-resources</phase>
<phase>process-test-classes</phase> <configuration>
<goals> <backup>true</backup>
<goal>run</goal> <ampLocation>${project.build.directory}/amps/alfresco-rm-share-${project.version}-amp.amp</ampLocation>
</goals> <warLocation>${project.build.directory}/alf-installation/tomcat/webapps/share.war</warLocation>
<configuration> </configuration>
<target> </execution>
<echo>Starting Alfresco...</echo> </executions>
<exec executable="${basedir}/target/alf-installation/alfresco.sh" dir="target/alf-installation" failonerror="true"> </plugin>
<arg value="start" /> </plugins>
<env key="CATALINA_OPTS" value="${argLine}" /> </build>
</exec> </profile>
<sleep minutes="5" /> <profile>
</target> <id>run-alfresco</id>
</configuration> <build>
</execution> <plugins>
<execution> <plugin>
<id>stop-alfresco</id> <groupId>org.jacoco</groupId>
<phase>post-integration-test</phase> <artifactId>jacoco-maven-plugin</artifactId>
<goals> <version>0.7.5.201505241946</version>
<goal>run</goal> <executions>
</goals> <execution>
<configuration> <id>prepare-jacoco</id>
<target> <goals>
<echo>Stopping Alfresco...</echo> <goal>prepare-agent</goal>
<exec executable="${basedir}/target/alf-installation/alfresco.sh" dir="target/alf-installation" failonerror="true"> </goals>
<arg value="stop" /> </execution>
</exec> </executions>
</target> <configuration>
</configuration> <includes>
</execution> <include>org.alfresco.*</include>
</executions> </includes>
</plugin> </configuration>
</plugins> </plugin>
</build> <plugin>
</profile> <artifactId>maven-antrun-plugin</artifactId>
</profiles> <executions>
</project> <execution>
<id>start-alfresco</id>
<phase>process-test-classes</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<echo>Starting Alfresco...</echo>
<exec executable="${basedir}/target/alf-installation/alfresco.sh" dir="target/alf-installation" failonerror="true">
<arg value="start" />
<env key="CATALINA_OPTS" value="${argLine}" />
</exec>
<sleep minutes="5" />
</target>
</configuration>
</execution>
<execution>
<id>stop-alfresco</id>
<phase>post-integration-test</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<echo>Stopping Alfresco...</echo>
<exec executable="${basedir}/target/alf-installation/alfresco.sh" dir="target/alf-installation" failonerror="true">
<arg value="stop" />
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@@ -72,6 +72,12 @@ rm.completerecord.mandatorypropertiescheck.enabled=true
# #
rm.patch.v22.convertToStandardFilePlan=false rm.patch.v22.convertToStandardFilePlan=false
# Permission mapping
# these take a comma separated string of permissions from org.alfresco.service.cmr.security.PermissionService
# read maps to ReadRecords and write to FileRecords
rm.haspermissionmap.read=ReadProperties,ReadChildren
rm.haspermissionmap.write=WriteProperties,AddChildren
# #
# Extended auto-version behaviour. If true and other auto-version properties are satisfied, then # Extended auto-version behaviour. If true and other auto-version properties are satisfied, then
# a document will be auto-versioned when its type is changed. # a document will be auto-versioned when its type is changed.

View File

@@ -138,6 +138,12 @@
<property name="filePlanService"> <property name="filePlanService">
<ref bean="filePlanService" /> <ref bean="filePlanService" />
</property> </property>
<property name="configuredReadPermissions">
<value>${rm.haspermissionmap.read}</value>
</property>
<property name="configuredFilePermissions">
<value>${rm.haspermissionmap.write}</value>
</property>
</bean> </bean>
<bean id="extendedReaderDynamicAuthority" class="org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority" /> <bean id="extendedReaderDynamicAuthority" class="org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority" />

View File

@@ -392,6 +392,11 @@
<type>d:date</type> <type>d:date</type>
<mandatory>false</mandatory> <mandatory>false</mandatory>
</property> </property>
<property name="rma:manuallySetAsOf">
<title>Manually Set Disposition Date Flag</title>
<type>d:boolean</type>
<default>false</default>
</property>
<property name="rma:dispositionEventsEligible"> <property name="rma:dispositionEventsEligible">
<title>Disposition Events Eligible</title> <title>Disposition Events Eligible</title>
<type>d:boolean</type> <type>d:boolean</type>

View File

@@ -39,12 +39,6 @@
<bean id="RecordsManagementServiceRegistry" class="org.alfresco.module.org_alfresco_module_rm.RecordsManagementServiceRegistryImpl" /> <bean id="RecordsManagementServiceRegistry" class="org.alfresco.module.org_alfresco_module_rm.RecordsManagementServiceRegistryImpl" />
<!-- Disposition selection strategy -->
<bean id="org_alfresco_module_rm_dispositionSelectionStrategy"
class="org.alfresco.module.org_alfresco_module_rm.disposition.DispositionSelectionStrategy" >
<property name="dispositionService" ref="dispositionService"/>
</bean>
<!-- Vital Record Service --> <!-- Vital Record Service -->
<bean id="vitalRecordService" parent="baseService" class="org.alfresco.module.org_alfresco_module_rm.vital.VitalRecordServiceImpl"> <bean id="vitalRecordService" parent="baseService" class="org.alfresco.module.org_alfresco_module_rm.vital.VitalRecordServiceImpl">
@@ -96,9 +90,6 @@
<property name="recordFolderService" ref="RecordFolderService"/> <property name="recordFolderService" ref="RecordFolderService"/>
<property name="recordService" ref="RecordService"/> <property name="recordService" ref="RecordService"/>
<property name="freezeService" ref="FreezeService"/> <property name="freezeService" ref="FreezeService"/>
<property name="dispositionSelectionStrategy">
<ref local="org_alfresco_module_rm_dispositionSelectionStrategy" />
</property>
</bean> </bean>
<bean id="DispositionService" class="org.springframework.aop.framework.ProxyFactoryBean"> <bean id="DispositionService" class="org.springframework.aop.framework.ProxyFactoryBean">
@@ -139,6 +130,7 @@
org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.registerDispositionProperty=RM_ALLOW org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.registerDispositionProperty=RM_ALLOW
org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getDispositionProperties=RM_ALLOW org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getDispositionProperties=RM_ALLOW
org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getDispositionSchedule=RM.Read.0 org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getDispositionSchedule=RM.Read.0
org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getOriginDispositionSchedule=RM.Read.0
org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getAssociatedDispositionSchedule=RM.Read.0 org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getAssociatedDispositionSchedule=RM.Read.0
org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getAssociatedRecordsManagementContainer=RM_ALLOW org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.getAssociatedRecordsManagementContainer=RM_ALLOW
org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.hasDisposableItems=RM_ALLOW org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService.hasDisposableItems=RM_ALLOW

View File

@@ -5,10 +5,10 @@
URL parameter batchsize is mandatory, and represents the maximum number of records that can be processed in one transaction.<br/> URL parameter batchsize is mandatory, and represents the maximum number of records that can be processed in one transaction.<br/>
URL parameter maxProcessedRecords is optional, and represents the maximum number of records that will be processed in one request.<br/> URL parameter maxProcessedRecords is optional, and represents the maximum number of records that will be processed in one request.<br/>
URL parameter export is optional, and if the it's value is true, will export the processed records into a csv file.<br/> URL parameter export is optional, and if the it's value is true, will export the processed records into a csv file.<br/>
URL parameter parentNodeRef is optional, and represents the nodeRef of the folder that contains the records to be processed.<br/> URL parameter parentNodeRef is optional, and represents the nodeRef of the folder that contains the records to be processed.<br/>
]]> ]]>
</description> </description>
<url>/api/rm/rm-dynamicauthorities?batchsize={batchsize}&amp;maxProcessedRecords={maxProcessedRecords?}&amp;export={export?}&amp;parentNodeRef={parentNodeRef?}</url> <url>/api/rm/rm-dynamicauthorities?batchsize={batchsize}&amp;maxProcessedRecords={maxProcessedRecords?}&amp;export={export?}&amp;parentNodeRef={parentNodeRef?}</url>
<format default="json">argument</format> <format default="json">argument</format>
<authentication>admin</authentication> <authentication>admin</authentication>
<transaction allow="readonly">required</transaction> <transaction allow="readonly">required</transaction>

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>org.alfresco</groupId> <groupId>org.alfresco</groupId>
<artifactId>alfresco-rm-parent</artifactId> <artifactId>alfresco-rm-parent</artifactId>
<version>2.3.1-SNAPSHOT</version> <version>2.3.2-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>alfresco-rm-server</artifactId> <artifactId>alfresco-rm-server</artifactId>
@@ -19,7 +19,13 @@
<resources> <resources>
<resource> <resource>
<directory>config</directory> <directory>config</directory>
<filtering>true</filtering> <filtering>true</filtering>
<includes>
<include>**/module.properties</include>
</includes>
</resource>
<resource>
<directory>config</directory>
</resource> </resource>
</resources> </resources>
<testResources> <testResources>
@@ -225,7 +231,6 @@
<dependency> <dependency>
<groupId>org.mockito</groupId> <groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId> <artifactId>mockito-all</artifactId>
<version>1.9.5</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@@ -18,6 +18,8 @@
*/ */
package org.alfresco.module.org_alfresco_module_rm.action.impl; package org.alfresco.module.org_alfresco_module_rm.action.impl;
import static org.apache.commons.lang3.BooleanUtils.isNotTrue;
import java.io.Serializable; import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
@@ -185,7 +187,8 @@ public class BroadcastDispositionActionDefinitionUpdateAction extends RMActionEx
{ {
// the change does effect the nextAction for this node // the change does effect the nextAction for this node
// so go ahead and determine what needs updating // so go ahead and determine what needs updating
if (changedProps.contains(PROP_DISPOSITION_PERIOD) || changedProps.contains(PROP_DISPOSITION_PERIOD_PROPERTY)) if ((changedProps.contains(PROP_DISPOSITION_PERIOD) || changedProps.contains(PROP_DISPOSITION_PERIOD_PROPERTY))
&& isNotTrue((Boolean) getNodeService().getProperty(nextAction.getNodeRef(), PROP_MANUALLY_SET_AS_OF)))
{ {
persistPeriodChanges(dispositionActionDef, nextAction); persistPeriodChanges(dispositionActionDef, nextAction);
} }

View File

@@ -38,6 +38,9 @@ public class EditDispositionActionAsOfDateAction extends RMActionExecuterAbstrac
private static final String MSG_VALID_DATE_DISP_ASOF = "rm.action.valid-date-disp-asof"; private static final String MSG_VALID_DATE_DISP_ASOF = "rm.action.valid-date-disp-asof";
private static final String MSG_DISP_ASOF_LIFECYCLE_APPLIED = "rm.action.disp-asof-lifecycle-applied"; private static final String MSG_DISP_ASOF_LIFECYCLE_APPLIED = "rm.action.disp-asof-lifecycle-applied";
/** Action name */
public static final String NAME = "editDispositionActionAsOfDate";
/** Action parameters */ /** Action parameters */
public static final String PARAM_AS_OF_DATE = "asOfDate"; public static final String PARAM_AS_OF_DATE = "asOfDate";
@@ -62,6 +65,7 @@ public class EditDispositionActionAsOfDateAction extends RMActionExecuterAbstrac
if (da != null) if (da != null)
{ {
getNodeService().setProperty(da.getNodeRef(), PROP_DISPOSITION_AS_OF, asOfDate); getNodeService().setProperty(da.getNodeRef(), PROP_DISPOSITION_AS_OF, asOfDate);
getNodeService().setProperty(da.getNodeRef(), PROP_MANUALLY_SET_AS_OF, true);
} }
} }
else else

View File

@@ -1,199 +0,0 @@
/*
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.module.org_alfresco_module_rm.disposition;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
import org.alfresco.service.cmr.repository.NodeRef;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* This class offers the default implementation of a strategy for selection of
* disposition schedule for a record when there is more than one which is applicable.
* An example of where this strategy might be used would be in the case of a record
* which was multiply filed.
*
* @author neilm
*/
public class DispositionSelectionStrategy implements RecordsManagementModel
{
/** Logger */
private static Log logger = LogFactory.getLog(DispositionSelectionStrategy.class);
/** Disposition service */
private DispositionService dispositionService;
/**
* Set the disposition service
*
* @param dispositionService disposition service
*/
public void setDispositionService(DispositionService dispositionService)
{
this.dispositionService = dispositionService;
}
/**
* Select the disposition schedule to use given there is more than one
*
* @param recordFolders
* @return
*/
public NodeRef selectDispositionScheduleFrom(List<NodeRef> recordFolders)
{
if (recordFolders == null || recordFolders.isEmpty())
{
return null;
}
else
{
// 46 CHAPTER 2
// Records assigned more than 1 disposition must be retained and linked to the record folder (category) with the longest
// retention period.
// Assumption: an event-based disposition action has a longer retention
// period than a time-based one - as we cannot know when an event will occur
// TODO Automatic events?
NodeRef recordFolder = null;
if (recordFolders.size() == 1)
{
recordFolder = recordFolders.get(0);
}
else
{
SortedSet<NodeRef> sortedFolders = new TreeSet<NodeRef>(new DispositionableNodeRefComparator());
sortedFolders.addAll(recordFolders);
recordFolder = sortedFolders.first();
}
DispositionSchedule dispSchedule = dispositionService.getDispositionSchedule(recordFolder);
if (logger.isDebugEnabled())
{
logger.debug("Selected disposition schedule: " + dispSchedule);
}
NodeRef result = null;
if (dispSchedule != null)
{
result = dispSchedule.getNodeRef();
}
return result;
}
}
/**
* This class defines a natural comparison order between NodeRefs that have
* the dispositionLifecycle aspect applied.
* This order has the following meaning: NodeRefs with a 'lesser' value are considered
* to have a shorter retention period, although the actual retention period may
* not be straightforwardly determined in all cases.
*/
class DispositionableNodeRefComparator implements Comparator<NodeRef>
{
public int compare(final NodeRef f1, final NodeRef f2)
{
// Run as admin user
return AuthenticationUtil.runAs(new RunAsWork<Integer>()
{
public Integer doWork()
{
return compareImpl(f1, f2);
}
}, AuthenticationUtil.getAdminUserName());
}
private int compareImpl(NodeRef f1, NodeRef f2)
{
// quick check to see if the node references are the same
if (f1.equals(f2))
{
return 0;
}
// get the disposition schedules for the folders
DispositionSchedule ds1 = dispositionService.getDispositionSchedule(f1);
DispositionSchedule ds2 = dispositionService.getDispositionSchedule(f2);
// make sure each folder has a disposition schedule
if (ds1 == null && ds2 != null)
{
return 1;
}
else if (ds1 != null && ds2 == null)
{
return -1;
}
else if (ds1 == null && ds2 == null)
{
return 0;
}
// TODO this won't work correctly if we are trying to compare schedules that are record based!!
DispositionAction da1 = dispositionService.getNextDispositionAction(f1);
DispositionAction da2 = dispositionService.getNextDispositionAction(f2);
if (da1 != null && da2 != null)
{
Date asOfDate1 = da1.getAsOfDate();
Date asOfDate2 = da2.getAsOfDate();
// If both record(Folder)s have asOfDates, then use these to compare
if (asOfDate1 != null && asOfDate2 != null)
{
return asOfDate1.compareTo(asOfDate2);
}
// If one has a date and the other doesn't, the one with the date is "less".
// (Defined date is 'shorter' than undefined date as an undefined date means it may be retained forever - theoretically)
else if (asOfDate1 != null || asOfDate2 != null)
{
return asOfDate1 == null ? +1 : -1;
}
else
{
// Neither has an asOfDate. (Somewhat arbitrarily) we'll use the number of events to compare now.
DispositionActionDefinition dad1 = da1.getDispositionActionDefinition();
DispositionActionDefinition dad2 = da2.getDispositionActionDefinition();
int eventsCount1 = 0;
int eventsCount2 = 0;
if (dad1 != null)
{
eventsCount1 = dad1.getEvents().size();
}
if (dad2 != null)
{
eventsCount2 = dad2.getEvents().size();
}
return Integer.valueOf(eventsCount1).compareTo(eventsCount2);
}
}
return 0;
}
}
}

View File

@@ -233,6 +233,17 @@ public interface DispositionService
* @param nodeRef node reference * @param nodeRef node reference
*/ */
void refreshDispositionAction(NodeRef nodeRef); void refreshDispositionAction(NodeRef nodeRef);
/**
* Gets date of the disposition action for the given
* disposition schedule with the given action name
*
* @param record
* @param dispositionSchedule nodeRef
* @param dispositionActionName
* @return date
*/
Date getDispositionActionDate(NodeRef record, NodeRef dispositionSchedule, String dispositionActionName);
/** /**
* Compute the "disposition as of" date (if necessary) for a disposition action and a node. * Compute the "disposition as of" date (if necessary) for a disposition action and a node.
@@ -244,4 +255,13 @@ public interface DispositionService
*/ */
Date calculateAsOfDate(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition, Date calculateAsOfDate(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition,
boolean allowContextFromAsOf); boolean allowContextFromAsOf);
/**
* Gets the origin disposition schedule for the record, not the calculated one
* in case of multiple dispositions applied to record
*
* @param nodeRef record
* @return the initial disposition
*/
DispositionSchedule getOriginDispositionSchedule(NodeRef nodeRef);
} }

View File

@@ -18,6 +18,8 @@
*/ */
package org.alfresco.module.org_alfresco_module_rm.disposition; package org.alfresco.module.org_alfresco_module_rm.disposition;
import static org.apache.commons.lang3.BooleanUtils.isNotTrue;
import java.io.Serializable; import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
@@ -27,6 +29,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.RecordsManagementPolicies; import org.alfresco.module.org_alfresco_module_rm.RecordsManagementPolicies;
import org.alfresco.module.org_alfresco_module_rm.RecordsManagementServiceRegistry; import org.alfresco.module.org_alfresco_module_rm.RecordsManagementServiceRegistry;
import org.alfresco.module.org_alfresco_module_rm.disposition.property.DispositionProperty; import org.alfresco.module.org_alfresco_module_rm.disposition.property.DispositionProperty;
@@ -38,6 +41,7 @@ import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
import org.alfresco.module.org_alfresco_module_rm.record.RecordService; import org.alfresco.module.org_alfresco_module_rm.record.RecordService;
import org.alfresco.module.org_alfresco_module_rm.recordfolder.RecordFolderService; import org.alfresco.module.org_alfresco_module_rm.recordfolder.RecordFolderService;
import org.alfresco.module.org_alfresco_module_rm.util.ServiceBaseImpl; import org.alfresco.module.org_alfresco_module_rm.util.ServiceBaseImpl;
import org.alfresco.repo.dictionary.types.period.Immediately;
import org.alfresco.repo.policy.BehaviourFilter; import org.alfresco.repo.policy.BehaviourFilter;
import org.alfresco.repo.policy.annotation.Behaviour; import org.alfresco.repo.policy.annotation.Behaviour;
import org.alfresco.repo.policy.annotation.BehaviourBean; import org.alfresco.repo.policy.annotation.BehaviourBean;
@@ -70,15 +74,26 @@ public class DispositionServiceImpl extends ServiceBaseImpl
/** Logger */ /** Logger */
private static final Logger LOGGER = LoggerFactory.getLogger(DispositionServiceImpl.class); private static final Logger LOGGER = LoggerFactory.getLogger(DispositionServiceImpl.class);
/** Transaction mode for setting next action */
public enum WriteMode
{
/** Do not update any data. */
READ_ONLY,
/** Only set the "disposition as of" date. */
DATE_ONLY,
/**
* Set the "disposition as of" date and the name of the next action. This only happens during the creation of a
* disposition schedule impl node under a record or folder.
*/
DATE_AND_NAME
};
/** Behaviour filter */ /** Behaviour filter */
private BehaviourFilter behaviourFilter; private BehaviourFilter behaviourFilter;
/** Records management service registry */ /** Records management service registry */
private RecordsManagementServiceRegistry serviceRegistry; private RecordsManagementServiceRegistry serviceRegistry;
/** Disposition selection strategy */
private DispositionSelectionStrategy dispositionSelectionStrategy;
/** File plan service */ /** File plan service */
private FilePlanService filePlanService; private FilePlanService filePlanService;
@@ -168,16 +183,6 @@ public class DispositionServiceImpl extends ServiceBaseImpl
this.freezeService = freezeService; this.freezeService = freezeService;
} }
/**
* Set the dispositionSelectionStrategy bean.
*
* @param dispositionSelectionStrategy
*/
public void setDispositionSelectionStrategy(DispositionSelectionStrategy dispositionSelectionStrategy)
{
this.dispositionSelectionStrategy = dispositionSelectionStrategy;
}
/** /**
* Behavior to initialize the disposition schedule of a newly filed record. * Behavior to initialize the disposition schedule of a newly filed record.
* *
@@ -267,31 +272,76 @@ public class DispositionServiceImpl extends ServiceBaseImpl
* @see org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService#getDispositionSchedule(org.alfresco.service.cmr.repository.NodeRef) * @see org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService#getDispositionSchedule(org.alfresco.service.cmr.repository.NodeRef)
*/ */
@Override @Override
public DispositionSchedule getDispositionSchedule(NodeRef nodeRef) public DispositionSchedule getDispositionSchedule(final NodeRef nodeRef)
{ {
DispositionSchedule di = null; DispositionSchedule ds = null;
NodeRef diNodeRef = null; NodeRef dsNodeRef = null;
if (isRecord(nodeRef)) if (isRecord(nodeRef))
{ {
// Get the record folders for the record // calculate disposition schedule without taking into account the user
List<NodeRef> recordFolders = recordFolderService.getRecordFolders(nodeRef); DispositionSchedule originDispositionSchedule = AuthenticationUtil.runAsSystem(new RunAsWork<DispositionSchedule>()
// At this point, we may have disposition instruction objects from 1..n folders. {
diNodeRef = dispositionSelectionStrategy.selectDispositionScheduleFrom(recordFolders); @Override
public DispositionSchedule doWork()
{
return getOriginDispositionSchedule(nodeRef);
}
});
// if the initial disposition schedule of the record is folder based
if (originDispositionSchedule == null ||
isNotTrue(originDispositionSchedule.isRecordLevelDisposition()))
{
return null;
}
final NextActionFromDisposition dsNextAction = getDispositionActionByNameForRecord(nodeRef);
if (dsNextAction != null)
{
final NodeRef action = dsNextAction.getNextActionNodeRef();
if (isNotTrue((Boolean)nodeService.getProperty(action, PROP_MANUALLY_SET_AS_OF)))
{
if (!dsNextAction.getWriteMode().equals(WriteMode.READ_ONLY))
{
final String dispositionActionName = dsNextAction.getNextActionName();
final Date dispositionActionDate = dsNextAction.getNextActionDateAsOf();
AuthenticationUtil.runAsSystem(new RunAsWork<Void>()
{
@Override
public Void doWork()
{
nodeService.setProperty(action, PROP_DISPOSITION_AS_OF, dispositionActionDate);
if (dsNextAction.getWriteMode().equals(WriteMode.DATE_AND_NAME))
{
nodeService.setProperty(action, PROP_DISPOSITION_ACTION_NAME, dispositionActionName);
}
return null;
}
});
}
}
dsNodeRef = dsNextAction.getDispositionNodeRef();
}
} }
else else
{ {
// Get the disposition instructions for the node reference provided // Get the disposition instructions for the node reference provided
diNodeRef = getDispositionScheduleImpl(nodeRef); dsNodeRef = getDispositionScheduleImpl(nodeRef);
} }
if (diNodeRef != null) if (dsNodeRef != null)
{ {
di = new DispositionScheduleImpl(serviceRegistry, nodeService, diNodeRef); ds = new DispositionScheduleImpl(serviceRegistry, nodeService, dsNodeRef);
} }
return di; return ds;
} }
/** /**
* This method returns a NodeRef * This method returns a NodeRef
* Gets the disposition instructions * Gets the disposition instructions
@@ -313,6 +363,28 @@ public class DispositionServiceImpl extends ServiceBaseImpl
} }
return result; return result;
} }
public DispositionSchedule getOriginDispositionSchedule(NodeRef nodeRef)
{
NodeRef parent = this.nodeService.getPrimaryParent(nodeRef).getParentRef();
if (parent != null)
{
if (filePlanService.isRecordCategory(parent))
{
NodeRef result = getAssociatedDispositionScheduleImpl(parent);
if (result == null)
{
return null;
}
return new DispositionScheduleImpl(serviceRegistry, nodeService, result);
}
else
{
return getOriginDispositionSchedule(parent);
}
}
return null;
}
/** /**
* @see org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService#getAssociatedDispositionSchedule(org.alfresco.service.cmr.repository.NodeRef) * @see org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService#getAssociatedDispositionSchedule(org.alfresco.service.cmr.repository.NodeRef)
@@ -620,8 +692,14 @@ public class DispositionServiceImpl extends ServiceBaseImpl
* @param dispositionActionDefinition disposition action definition * @param dispositionActionDefinition disposition action definition
* @param allowContextFromAsOf true if the context date is allowed to be obtained from the disposition "as of" property. * @param allowContextFromAsOf true if the context date is allowed to be obtained from the disposition "as of" property.
*/ */
private void initialiseDispositionAction(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition, boolean allowContextFromAsOf) private DispositionAction initialiseDispositionAction(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition, boolean allowContextFromAsOf)
{ {
List<ChildAssociationRef> childAssocs = nodeService.getChildAssocs(nodeRef, ASSOC_NEXT_DISPOSITION_ACTION, ASSOC_NEXT_DISPOSITION_ACTION, 1, true);
if (childAssocs != null && childAssocs.size() > 0)
{
return new DispositionActionImpl(serviceRegistry, childAssocs.get(0).getChildRef());
}
// Create the properties // Create the properties
Map<QName, Serializable> props = new HashMap<QName, Serializable>(10); Map<QName, Serializable> props = new HashMap<QName, Serializable>(10);
@@ -651,6 +729,7 @@ public class DispositionServiceImpl extends ServiceBaseImpl
// For every event create an entry on the action // For every event create an entry on the action
da.addEventCompletionDetails(event); da.addEventCompletionDetails(event);
} }
return da;
} }
/** /**
@@ -683,9 +762,16 @@ public class DispositionServiceImpl extends ServiceBaseImpl
} }
else else
{ {
// for now use 'NOW' as the default context date if (period.getPeriodType().equals(Immediately.PERIOD_TYPE))
// TODO set the default period property ... cut off date or last disposition date depending on context {
contextDate = new Date(); contextDate = (Date)nodeService.getProperty(nodeRef, ContentModel.PROP_CREATED);
}
else
{
// for now use 'NOW' as the default context date
// TODO set the default period property ... cut off date or last disposition date depending on context
contextDate = new Date();
}
} }
// Calculate the as of date // Calculate the as of date
@@ -897,6 +983,14 @@ public class DispositionServiceImpl extends ServiceBaseImpl
String currentADId = (String) nodeService.getProperty(currentDispositionAction, PROP_DISPOSITION_ACTION_ID); String currentADId = (String) nodeService.getProperty(currentDispositionAction, PROP_DISPOSITION_ACTION_ID);
currentDispositionActionDefinition = di.getDispositionActionDefinition(currentADId); currentDispositionActionDefinition = di.getDispositionActionDefinition(currentADId);
// When the record has multiple disposition schedules the current disposition action may not be found by id
// In this case it will be searched by name
if(currentDispositionActionDefinition == null)
{
String currentADName = (String) nodeService.getProperty(currentDispositionAction, PROP_DISPOSITION_ACTION);
currentDispositionActionDefinition = di.getDispositionActionDefinitionByName(currentADName);
}
// Get the next disposition action // Get the next disposition action
int index = currentDispositionActionDefinition.getIndex(); int index = currentDispositionActionDefinition.getIndex();
index++; index++;
@@ -983,6 +1077,24 @@ public class DispositionServiceImpl extends ServiceBaseImpl
} }
} }
public Date getDispositionActionDate(NodeRef record, NodeRef dispositionSchedule, String dispositionActionName)
{
DispositionSchedule ds = new DispositionScheduleImpl(serviceRegistry, nodeService, dispositionSchedule);
List<ChildAssociationRef> assocs = nodeService.getChildAssocs(dispositionSchedule);
if (assocs != null && assocs.size() > 0)
{
for (ChildAssociationRef assoc : assocs)
{
if (assoc != null && assoc.getQName().getLocalName().contains(dispositionActionName))
{
DispositionActionDefinition actionDefinition = ds.getDispositionActionDefinition(assoc.getChildRef().getId());
return calculateAsOfDate(record, actionDefinition, true);
}
}
}
return null;
}
/** /**
* Helper method to determine if a node is frozen or has frozen children * Helper method to determine if a node is frozen or has frozen children
* *
@@ -1030,4 +1142,169 @@ public class DispositionServiceImpl extends ServiceBaseImpl
} }
}); });
} }
/**
* Calculate next disposition action for a record
*
* @param record
* @return next disposition action (name, date) and the disposition associated
*/
protected NextActionFromDisposition getDispositionActionByNameForRecord(NodeRef record)
{
List<NodeRef> recordFolders = recordFolderService.getRecordFolders(record);
DispositionAction nextDispositionAction = getNextDispositionAction(record);
if (nextDispositionAction == null)
{
DispositionAction lastCompletedDispositionAction = getLastCompletedDispostionAction(record);
if (lastCompletedDispositionAction != null)
{
// all disposition actions upon the given record were completed
return null;
}
return getFirstDispositionAction(record, recordFolders);
}
else
{
return getNextDispositionAction(record, recordFolders, nextDispositionAction);
}
}
/**
* Calculate next disposition action when the record already has one
* @param recordFolders
* @param nextDispositionAction
* @return next disposition action and the associated disposition schedule
*/
private NextActionFromDisposition getNextDispositionAction(NodeRef record, List<NodeRef> recordFolders, DispositionAction nextDispositionAction)
{
String recordNextDispositionActionName = nextDispositionAction.getName();
Date recordNextDispositionActionDate = nextDispositionAction.getAsOfDate();
// We're looking for the latest date, so initially start with a very early one.
Date nextDispositionActionDate = new Date(Long.MIN_VALUE);
NodeRef dispositionNodeRef = null;
// Find the latest "disposition as of" date from all the schedules this record is subject to.
for (NodeRef folder : recordFolders)
{
NodeRef dsNodeRef = getDispositionScheduleImpl(folder);
if (dsNodeRef != null)
{
Date dispActionDate = getDispositionActionDate(record, dsNodeRef, recordNextDispositionActionName);
if (dispActionDate == null || (nextDispositionActionDate != null
&& nextDispositionActionDate.before(dispActionDate)))
{
nextDispositionActionDate = dispActionDate;
dispositionNodeRef = dsNodeRef;
if (dispActionDate == null)
{
// Treat null as the latest date possible (so stop searching further).
break;
}
}
}
}
if (dispositionNodeRef == null)
{
return null;
}
WriteMode mode = determineWriteMode(recordNextDispositionActionDate, nextDispositionActionDate);
return new NextActionFromDisposition(dispositionNodeRef, nextDispositionAction.getNodeRef(),
recordNextDispositionActionName, nextDispositionActionDate, mode);
}
/**
* Determine what should be updated for an existing disposition schedule impl. We only update the date if the
* existing date is earlier than the calculated one.
*
* @param recordNextDispositionActionDate The next action date found on the record node (or folder node).
* @param nextDispositionActionDate The next action date calculated from the current disposition schedule(s)
* affecting the node.
* @return READ_ONLY if nothing should be updated, or DATE_ONLY if the date needs updating.
*/
private WriteMode determineWriteMode(Date recordNextDispositionActionDate, Date nextDispositionActionDate)
{
// Treat null dates as being the latest possible date.
Date maxDate = new Date(Long.MAX_VALUE);
Date recordDate = (recordNextDispositionActionDate != null ? recordNextDispositionActionDate : maxDate);
Date calculatedDate = (nextDispositionActionDate != null ? nextDispositionActionDate : maxDate);
// We only need to update the date if the current one is too early.
if (recordDate.before(calculatedDate))
{
return WriteMode.DATE_ONLY;
}
else
{
return WriteMode.READ_ONLY;
}
}
/**
* Calculate first disposition action when the record doesn't have one
* @param recordFolders
* @return next disposition action and the associated disposition schedule
*/
private NextActionFromDisposition getFirstDispositionAction(NodeRef record, List<NodeRef> recordFolders)
{
NodeRef newAction = null;
String newDispositionActionName = null;
// We're looking for the latest date, so start with a very early one.
Date newDispositionActionDateAsOf = new Date(Long.MIN_VALUE);
NodeRef dispositionNodeRef = null;
for (NodeRef folder : recordFolders)
{
NodeRef folderDS = getDispositionScheduleImpl(folder);
if (folderDS != null)
{
DispositionSchedule ds = new DispositionScheduleImpl(serviceRegistry, nodeService, folderDS);
List<DispositionActionDefinition> dispositionActionDefinitions = ds.getDispositionActionDefinitions();
if (dispositionActionDefinitions != null && dispositionActionDefinitions.size() > 0)
{
DispositionActionDefinition firstDispositionActionDef = dispositionActionDefinitions.get(0);
dispositionNodeRef = folderDS;
if (newAction == null)
{
NodeRef recordOrFolder = record;
if (!ds.isRecordLevelDisposition())
{
recordOrFolder = folder;
}
DispositionAction firstDispositionAction = initialiseDispositionAction(recordOrFolder, firstDispositionActionDef, true);
newAction = firstDispositionAction.getNodeRef();
newDispositionActionName = (String)nodeService.getProperty(newAction, PROP_DISPOSITION_ACTION_NAME);
newDispositionActionDateAsOf = firstDispositionAction.getAsOfDate();
}
else if (firstDispositionActionDef.getPeriod() != null)
{
Date firstActionDate = calculateAsOfDate(record, firstDispositionActionDef, true);
if (firstActionDate == null || (newDispositionActionDateAsOf != null
&& newDispositionActionDateAsOf.before(firstActionDate)))
{
newDispositionActionName = firstDispositionActionDef.getName();
newDispositionActionDateAsOf = firstActionDate;
if (firstActionDate == null)
{
// Treat null as the latest date possible, so there's no point searching further.
break;
}
}
}
}
}
}
if (newDispositionActionName == null || dispositionNodeRef == null || newAction == null)
{
return null;
}
return new NextActionFromDisposition(dispositionNodeRef, newAction,
newDispositionActionName, newDispositionActionDateAsOf, WriteMode.DATE_AND_NAME);
}
} }

View File

@@ -0,0 +1,80 @@
package org.alfresco.module.org_alfresco_module_rm.disposition;
import java.util.Date;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionServiceImpl.WriteMode;
import org.alfresco.service.cmr.repository.NodeRef;
public class NextActionFromDisposition
{
public NextActionFromDisposition(NodeRef dispositionNodeRef, NodeRef nextActionNodeRef, String nextActionName, Date nextActionDateAsOf,
WriteMode writeMode)
{
super();
this.dispositionNodeRef = dispositionNodeRef;
this.nextActionNodeRef = nextActionNodeRef;
this.nextActionName = nextActionName;
this.nextActionDateAsOf = nextActionDateAsOf;
this.writeMode = writeMode;
}
private NodeRef dispositionNodeRef;
private NodeRef nextActionNodeRef;
private String nextActionName;
private Date nextActionDateAsOf;
private WriteMode writeMode;
public WriteMode getWriteMode()
{
return writeMode;
}
public void setWriteMode(WriteMode writeMode)
{
this.writeMode = writeMode;
}
public NodeRef getNextActionNodeRef()
{
return nextActionNodeRef;
}
public void setNextActionNodeRef(NodeRef nextActionNodeRef)
{
this.nextActionNodeRef = nextActionNodeRef;
}
public NodeRef getDispositionNodeRef()
{
return dispositionNodeRef;
}
public void setDispositionNodeRef(NodeRef dispositionNodeRef)
{
this.dispositionNodeRef = dispositionNodeRef;
}
public String getNextActionName()
{
return nextActionName;
}
public void setNextActionName(String nextActionName)
{
this.nextActionName = nextActionName;
}
public Date getNextActionDateAsOf()
{
return nextActionDateAsOf;
}
public void setNextActionDateAsOf(Date nextActionDateAsOf)
{
this.nextActionDateAsOf = nextActionDateAsOf;
}
}

View File

@@ -18,6 +18,8 @@
*/ */
package org.alfresco.module.org_alfresco_module_rm.disposition.property; package org.alfresco.module.org_alfresco_module_rm.disposition.property;
import static org.apache.commons.lang3.BooleanUtils.isNotTrue;
import java.io.Serializable; import java.io.Serializable;
import java.util.Date; import java.util.Date;
import java.util.Map; import java.util.Map;
@@ -211,7 +213,11 @@ public class DispositionProperty extends BaseBehaviourBean
// update asOf date on the disposition action based on the new property value // update asOf date on the disposition action based on the new property value
NodeRef daNodeRef = dispositionAction.getNodeRef(); NodeRef daNodeRef = dispositionAction.getNodeRef();
nodeService.setProperty(daNodeRef, PROP_DISPOSITION_AS_OF, updatedAsOf); // Don't overwrite a manually set "disposition as of" date.
if (isNotTrue((Boolean) nodeService.getProperty(daNodeRef, PROP_MANUALLY_SET_AS_OF)))
{
nodeService.setProperty(daNodeRef, PROP_DISPOSITION_AS_OF, updatedAsOf);
}
} }
} }
} }

View File

@@ -149,6 +149,8 @@ public interface RecordsManagementModel extends RecordsManagementCustomModel
QName PROP_DISPOSITION_ACTION_ID = QName.createQName(RM_URI, "dispositionActionId"); QName PROP_DISPOSITION_ACTION_ID = QName.createQName(RM_URI, "dispositionActionId");
QName PROP_DISPOSITION_ACTION = QName.createQName(RM_URI, "dispositionAction"); QName PROP_DISPOSITION_ACTION = QName.createQName(RM_URI, "dispositionAction");
QName PROP_DISPOSITION_AS_OF = QName.createQName(RM_URI, "dispositionAsOf"); QName PROP_DISPOSITION_AS_OF = QName.createQName(RM_URI, "dispositionAsOf");
/** A flag indicating that the "disposition as of" date has been manually set and shouldn't be changed. */
QName PROP_MANUALLY_SET_AS_OF = QName.createQName(RM_URI, "manuallySetAsOf");
QName PROP_DISPOSITION_EVENTS_ELIGIBLE = QName.createQName(RM_URI, "dispositionEventsEligible"); QName PROP_DISPOSITION_EVENTS_ELIGIBLE = QName.createQName(RM_URI, "dispositionEventsEligible");
QName PROP_DISPOSITION_ACTION_STARTED_AT = QName.createQName(RM_URI, "dispositionActionStartedAt"); QName PROP_DISPOSITION_ACTION_STARTED_AT = QName.createQName(RM_URI, "dispositionActionStartedAt");
QName PROP_DISPOSITION_ACTION_STARTED_BY = QName.createQName(RM_URI, "dispositionActionStartedBy"); QName PROP_DISPOSITION_ACTION_STARTED_BY = QName.createQName(RM_URI, "dispositionActionStartedBy");

View File

@@ -34,12 +34,14 @@ import java.util.Set;
import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel; import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.RecordsManagementServiceRegistry;
import org.alfresco.module.org_alfresco_module_rm.RecordsManagementPolicies.BeforeFileRecord; import org.alfresco.module.org_alfresco_module_rm.RecordsManagementPolicies.BeforeFileRecord;
import org.alfresco.module.org_alfresco_module_rm.RecordsManagementPolicies.OnFileRecord; import org.alfresco.module.org_alfresco_module_rm.RecordsManagementPolicies.OnFileRecord;
import org.alfresco.module.org_alfresco_module_rm.capability.Capability; import org.alfresco.module.org_alfresco_module_rm.capability.Capability;
import org.alfresco.module.org_alfresco_module_rm.capability.CapabilityService; import org.alfresco.module.org_alfresco_module_rm.capability.CapabilityService;
import org.alfresco.module.org_alfresco_module_rm.capability.RMPermissionModel; import org.alfresco.module.org_alfresco_module_rm.capability.RMPermissionModel;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionSchedule; import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionSchedule;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionScheduleImpl;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService; import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService;
import org.alfresco.module.org_alfresco_module_rm.dod5015.DOD5015Model; import org.alfresco.module.org_alfresco_module_rm.dod5015.DOD5015Model;
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService; import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService;
@@ -228,7 +230,7 @@ public class RecordServiceImpl extends BaseBehaviourBean
/** recordable version service */ /** recordable version service */
private RecordableVersionService recordableVersionService; private RecordableVersionService recordableVersionService;
/** list of available record meta-data aspects and the file plan types the are applicable to */ /** list of available record meta-data aspects and the file plan types the are applicable to */
private Map<QName, Set<QName>> recordMetaDataAspects; private Map<QName, Set<QName>> recordMetaDataAspects;
@@ -385,7 +387,7 @@ public class RecordServiceImpl extends BaseBehaviourBean
{ {
this.recordableVersionService = recordableVersionService; this.recordableVersionService = recordableVersionService;
} }
/** /**
* Init method * Init method
*/ */
@@ -1732,7 +1734,10 @@ public class RecordServiceImpl extends BaseBehaviourBean
private void validateLinkConditions(NodeRef record, NodeRef recordFolder) private void validateLinkConditions(NodeRef record, NodeRef recordFolder)
{ {
// ensure that the linking record folders have compatible disposition schedules // ensure that the linking record folders have compatible disposition schedules
DispositionSchedule recordDispositionSchedule = dispositionService.getDispositionSchedule(record);
// get the origin disposition schedule for the record, not the calculated one
DispositionSchedule recordDispositionSchedule = dispositionService.getOriginDispositionSchedule(record);
if (recordDispositionSchedule != null) if (recordDispositionSchedule != null)
{ {
DispositionSchedule recordFolderDispositionSchedule = dispositionService.getDispositionSchedule(recordFolder); DispositionSchedule recordFolderDispositionSchedule = dispositionService.getDispositionSchedule(recordFolder);

View File

@@ -21,8 +21,10 @@ package org.alfresco.repo.security.permissions.impl;
import static org.apache.commons.lang.StringUtils.isNotBlank; import static org.apache.commons.lang.StringUtils.isNotBlank;
import java.io.Serializable; import java.io.Serializable;
import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.List;
import java.util.Set; import java.util.Set;
import org.alfresco.module.org_alfresco_module_rm.capability.RMPermissionModel; import org.alfresco.module.org_alfresco_module_rm.capability.RMPermissionModel;
@@ -30,6 +32,7 @@ import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService;
import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
import org.alfresco.module.org_alfresco_module_rm.role.FilePlanRoleService; import org.alfresco.module.org_alfresco_module_rm.role.FilePlanRoleService;
import org.alfresco.repo.cache.SimpleCache; import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.repo.security.permissions.AccessControlEntry; import org.alfresco.repo.security.permissions.AccessControlEntry;
import org.alfresco.repo.security.permissions.AccessControlList; import org.alfresco.repo.security.permissions.AccessControlList;
import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeRef;
@@ -42,6 +45,7 @@ import org.alfresco.util.PropertyCheck;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.springframework.context.ApplicationEvent; import org.springframework.context.ApplicationEvent;
/** /**
* Extends the core permission service implementation allowing the consideration of the read records * Extends the core permission service implementation allowing the consideration of the read records
* permission. * permission.
@@ -56,6 +60,16 @@ public class RMPermissionServiceImpl extends PermissionServiceImpl
/** Writers simple cache */ /** Writers simple cache */
protected SimpleCache<Serializable, Set<String>> writersCache; protected SimpleCache<Serializable, Set<String>> writersCache;
/**
* Configured Permission mapping.
*
* These strings come from alfresco-global.properties and allow fine tuning of the how permissions are mapped.
* This was added as a fix for MNT-16852 to enhance compatibility with our Outlook Integration.
*
**/
protected List<String> configuredReadPermissions;
protected List<String> configuredFilePermissions;
/** File plan service */ /** File plan service */
private FilePlanService filePlanService; private FilePlanService filePlanService;
@@ -97,6 +111,28 @@ public class RMPermissionServiceImpl extends PermissionServiceImpl
this.writersCache = writersCache; this.writersCache = writersCache;
} }
/**
* Maps the string from the properties file (rm.haspermissionmap.read)
* to the list used in the hasPermission method
*
* @param readMapping the mapping of permissions to ReadRecord
*/
public void setConfiguredReadPermissions(String readMapping)
{
this.configuredReadPermissions = Arrays.asList(readMapping.split(","));
}
/**
* Maps the string set in the properties file (rm.haspermissionmap.write)
* to the list used in the hasPermission method
*
* @param fileMapping the mapping of permissions to FileRecord
*/
public void setConfiguredFilePermissions(String fileMapping)
{
this.configuredFilePermissions = Arrays.asList(fileMapping.split(","));
}
/** /**
* @see org.alfresco.repo.security.permissions.impl.PermissionServiceImpl#onBootstrap(org.springframework.context.ApplicationEvent) * @see org.alfresco.repo.security.permissions.impl.PermissionServiceImpl#onBootstrap(org.springframework.context.ApplicationEvent)
*/ */
@@ -118,18 +154,18 @@ public class RMPermissionServiceImpl extends PermissionServiceImpl
public AccessStatus hasPermission(NodeRef nodeRef, String perm) public AccessStatus hasPermission(NodeRef nodeRef, String perm)
{ {
AccessStatus acs = super.hasPermission(nodeRef, perm); AccessStatus acs = super.hasPermission(nodeRef, perm);
if (AccessStatus.DENIED.equals(acs) && if (AccessStatus.DENIED.equals(acs) &&
PermissionService.READ.equals(perm) &&
nodeService.hasAspect(nodeRef, RecordsManagementModel.ASPECT_FILE_PLAN_COMPONENT)) nodeService.hasAspect(nodeRef, RecordsManagementModel.ASPECT_FILE_PLAN_COMPONENT))
{ {
return super.hasPermission(nodeRef, RMPermissionModel.READ_RECORDS); if (PermissionService.READ.equals(perm) || this.configuredReadPermissions.contains(perm))
} {
// Added ADD_CHILDREN check in for MNT-16852. return super.hasPermission(nodeRef, RMPermissionModel.READ_RECORDS);
else if (AccessStatus.DENIED.equals(acs) && }
(PermissionService.WRITE.equals(perm) || PermissionService.ADD_CHILDREN.equals(perm)) && else if (PermissionService.WRITE.equals(perm) || this.configuredFilePermissions.contains(perm))
nodeService.hasAspect(nodeRef, RecordsManagementModel.ASPECT_FILE_PLAN_COMPONENT)) {
{ return super.hasPermission(nodeRef, RMPermissionModel.FILE_RECORDS);
return super.hasPermission(nodeRef, RMPermissionModel.FILE_RECORDS); }
} }
return acs; return acs;

View File

@@ -18,22 +18,22 @@
*/ */
package org.alfresco.repo.web.scripts.roles; package org.alfresco.repo.web.scripts.roles;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.File; import java.io.File;
import java.io.FileWriter; import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter; import java.io.PrintWriter;
import java.io.StringWriter; import java.io.StringWriter;
import java.io.Writer; import java.io.Writer;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServletResponse;
import org.alfresco.model.ContentModel; import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel; import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority; import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority;
@@ -43,26 +43,26 @@ import org.alfresco.repo.domain.node.NodeDAO;
import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.patch.PatchDAO;
import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.domain.qname.QNameDAO;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.repo.web.scripts.content.ContentStreamer; import org.alfresco.repo.web.scripts.content.ContentStreamer;
import org.alfresco.service.cmr.model.FileFolderService; import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.model.FileInfo;
import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.security.PermissionService; import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.namespace.QName; import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService; import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.Pair; import org.alfresco.util.Pair;
import org.alfresco.util.TempFileProvider; import org.alfresco.util.TempFileProvider;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.springframework.extensions.webscripts.AbstractWebScript; import org.springframework.extensions.webscripts.AbstractWebScript;
import org.springframework.extensions.webscripts.Cache; import org.springframework.extensions.webscripts.Cache;
import org.springframework.extensions.webscripts.Format; import org.springframework.extensions.webscripts.Format;
import org.springframework.extensions.webscripts.Status; import org.springframework.extensions.webscripts.Status;
import org.springframework.extensions.webscripts.WebScriptException; import org.springframework.extensions.webscripts.WebScriptException;
import org.springframework.extensions.webscripts.WebScriptRequest; import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.extensions.webscripts.WebScriptResponse; import org.springframework.extensions.webscripts.WebScriptResponse;
/** /**
* Webscript used for removing dynamic authorities from the records. * Webscript used for removing dynamic authorities from the records.
@@ -71,7 +71,7 @@ import org.springframework.extensions.webscripts.WebScriptResponse;
* @since 2.3.0.7 * @since 2.3.0.7
*/ */
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel
{ {
private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0."; private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0.";
private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN"; private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN";
@@ -80,7 +80,7 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
private static final String MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE = "Processing record {0} - BEGIN"; private static final String MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE = "Processing record {0} - BEGIN";
private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid."; private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid.";
private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory"; private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory";
private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist."; private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist.";
private static final String SUCCESS_STATUS = "success"; private static final String SUCCESS_STATUS = "success";
/** /**
* The logger * The logger
@@ -88,8 +88,8 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class); private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class);
private static final String BATCH_SIZE = "batchsize"; private static final String BATCH_SIZE = "batchsize";
private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords"; private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords";
private static final String PARAM_EXPORT = "export"; private static final String PARAM_EXPORT = "export";
private static final String PARAM_PARENT_NODE_REF = "parentNodeRef"; private static final String PARAM_PARENT_NODE_REF = "parentNodeRef";
private static final String MODEL_STATUS = "responsestatus"; private static final String MODEL_STATUS = "responsestatus";
private static final String MODEL_MESSAGE = "message"; private static final String MODEL_MESSAGE = "message";
private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records."; private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records.";
@@ -105,60 +105,60 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
private PermissionService permissionService; private PermissionService permissionService;
private ExtendedSecurityService extendedSecurityService; private ExtendedSecurityService extendedSecurityService;
private TransactionService transactionService; private TransactionService transactionService;
/** Content Streamer */ /** Content Streamer */
protected ContentStreamer contentStreamer; protected ContentStreamer contentStreamer;
private FileFolderService fileFolderService; private FileFolderService fileFolderService;
/** service setters */
public void setPatchDAO(PatchDAO patchDAO)
{
this.patchDAO = patchDAO;
}
public void setNodeDAO(NodeDAO nodeDAO) /** service setters */
{ public void setPatchDAO(PatchDAO patchDAO)
this.nodeDAO = nodeDAO; {
} this.patchDAO = patchDAO;
}
public void setQnameDAO(QNameDAO qnameDAO) public void setNodeDAO(NodeDAO nodeDAO)
{ {
this.qnameDAO = qnameDAO; this.nodeDAO = nodeDAO;
} }
public void setNodeService(NodeService nodeService) public void setQnameDAO(QNameDAO qnameDAO)
{ {
this.nodeService = nodeService; this.qnameDAO = qnameDAO;
} }
public void setPermissionService(PermissionService permissionService) public void setNodeService(NodeService nodeService)
{ {
this.permissionService = permissionService; this.nodeService = nodeService;
} }
public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService) public void setPermissionService(PermissionService permissionService)
{ {
this.extendedSecurityService = extendedSecurityService; this.permissionService = permissionService;
} }
public void setTransactionService(TransactionService transactionService) public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService)
{
this.transactionService = transactionService;
}
public void setContentStreamer(ContentStreamer contentStreamer)
{ {
this.contentStreamer = contentStreamer; this.extendedSecurityService = extendedSecurityService;
} }
public void setFileFolderService(FileFolderService fileFolderService) public void setTransactionService(TransactionService transactionService)
{ {
this.fileFolderService = fileFolderService; this.transactionService = transactionService;
} }
protected Map<String, Object> buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException public void setContentStreamer(ContentStreamer contentStreamer)
{ {
Map<String, Object> model = new HashMap<String, Object>(); this.contentStreamer = contentStreamer;
final Long batchSize = getBatchSizeParameter(req); }
public void setFileFolderService(FileFolderService fileFolderService)
{
this.fileFolderService = fileFolderService;
}
protected Map<String, Object> buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException
{
Map<String, Object> model = new HashMap<String, Object>();
final Long batchSize = getBatchSizeParameter(req);
// get the max node id and the extended security aspect // get the max node id and the extended security aspect
Long maxNodeId = patchDAO.getMaxAdmNodeID(); Long maxNodeId = patchDAO.getMaxAdmNodeID();
final Pair<Long, QName> recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY); final Pair<Long, QName> recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY);
@@ -170,201 +170,201 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
return model; return model;
} }
Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize); Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize);
boolean attach = getExportParameter(req); boolean attach = getExportParameter(req);
File file = TempFileProvider.createTempFile("processedNodes_", ".csv"); File file = TempFileProvider.createTempFile("processedNodes_", ".csv");
FileWriter writer = new FileWriter(file); FileWriter writer = new FileWriter(file);
BufferedWriter out = new BufferedWriter(writer); BufferedWriter out = new BufferedWriter(writer);
List<NodeRef> processedNodes = new ArrayList<NodeRef>(); List<NodeRef> processedNodes = new ArrayList<NodeRef>();
try try
{ {
NodeRef parentNodeRef = getParentNodeRefParameter(req); NodeRef parentNodeRef = getParentNodeRefParameter(req);
if (parentNodeRef != null) if (parentNodeRef != null)
{ {
processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair, processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair,
totalNumberOfRecordsToProcess.intValue(), out, attach); totalNumberOfRecordsToProcess.intValue(), out, attach);
} }
else else
{ {
processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess, processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess,
out, attach); out, attach);
} }
} }
finally finally
{ {
out.close(); out.close();
} }
int processedNodesSize = processedNodes.size(); int processedNodesSize = processedNodes.size();
String message = ""; String message = "";
if (totalNumberOfRecordsToProcess == 0 if (totalNumberOfRecordsToProcess == 0
|| (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess)) || (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess))
{ {
message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize); message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize);
} }
if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize) if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize)
{ {
message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess); message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess);
} }
model.put(MODEL_STATUS, SUCCESS_STATUS); model.put(MODEL_STATUS, SUCCESS_STATUS);
model.put(MODEL_MESSAGE, message); model.put(MODEL_MESSAGE, message);
logger.info(message); logger.info(message);
if (attach) if (attach)
{ {
try try
{ {
String fileName = file.getName(); String fileName = file.getName();
contentStreamer.streamContent(req, res, file, null, attach, fileName, model); contentStreamer.streamContent(req, res, file, null, attach, fileName, model);
model = null; model = null;
} }
finally finally
{ {
if (file != null) if (file != null)
{ {
file.delete(); file.delete();
} }
} }
} }
return model; return model;
} }
/** /**
* Get export parameter from the request * Get export parameter from the request
* *
* @param req * @param req
* @return * @return
*/ */
protected boolean getExportParameter(WebScriptRequest req) protected boolean getExportParameter(WebScriptRequest req)
{ {
boolean attach = false; boolean attach = false;
String export = req.getParameter(PARAM_EXPORT); String export = req.getParameter(PARAM_EXPORT);
if (export != null && Boolean.parseBoolean(export)) if (export != null && Boolean.parseBoolean(export))
{ {
attach = true; attach = true;
} }
return attach; return attach;
} }
/* /*
* (non-Javadoc) * (non-Javadoc)
* @see org.alfresco.repo.web.scripts.content.StreamContent#execute(org.springframework.extensions.webscripts. * @see org.alfresco.repo.web.scripts.content.StreamContent#execute(org.springframework.extensions.webscripts.
* WebScriptRequest, org.springframework.extensions.webscripts.WebScriptResponse) * WebScriptRequest, org.springframework.extensions.webscripts.WebScriptResponse)
*/ */
@Override @Override
public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException
{ {
// retrieve requested format // retrieve requested format
String format = req.getFormat(); String format = req.getFormat();
try try
{ {
String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format); String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format);
if (mimetype == null) if (mimetype == null)
{ {
throw new WebScriptException("Web Script format '" + format + "' is not registered"); throw new WebScriptException("Web Script format '" + format + "' is not registered");
} }
// construct model for script / template // construct model for script / template
Status status = new Status(); Status status = new Status();
Cache cache = new Cache(getDescription().getRequiredCache()); Cache cache = new Cache(getDescription().getRequiredCache());
Map<String, Object> model = buildModel(req, res); Map<String, Object> model = buildModel(req, res);
if (model == null) { return; } if (model == null) { return; }
model.put("status", status); model.put("status", status);
model.put("cache", cache); model.put("cache", cache);
Map<String, Object> templateModel = createTemplateParameters(req, res, model); Map<String, Object> templateModel = createTemplateParameters(req, res, model);
// render output // render output
int statusCode = status.getCode(); int statusCode = status.getCode();
if (statusCode != HttpServletResponse.SC_OK && !req.forceSuccessStatus()) if (statusCode != HttpServletResponse.SC_OK && !req.forceSuccessStatus())
{ {
if (logger.isDebugEnabled()) if (logger.isDebugEnabled())
{ {
logger.debug("Force success status header in response: " + req.forceSuccessStatus()); logger.debug("Force success status header in response: " + req.forceSuccessStatus());
logger.debug("Setting status " + statusCode); logger.debug("Setting status " + statusCode);
} }
res.setStatus(statusCode); res.setStatus(statusCode);
} }
// apply location // apply location
String location = status.getLocation(); String location = status.getLocation();
if (location != null && location.length() > 0) if (location != null && location.length() > 0)
{ {
if (logger.isDebugEnabled()) logger.debug("Setting location to " + location); if (logger.isDebugEnabled()) logger.debug("Setting location to " + location);
res.setHeader(WebScriptResponse.HEADER_LOCATION, location); res.setHeader(WebScriptResponse.HEADER_LOCATION, location);
} }
// apply cache // apply cache
res.setCache(cache); res.setCache(cache);
String callback = null; String callback = null;
if (getContainer().allowCallbacks()) if (getContainer().allowCallbacks())
{ {
callback = req.getJSONCallback(); callback = req.getJSONCallback();
} }
if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null)
{ {
if (logger.isDebugEnabled()) logger.debug("Rendering JSON callback response: content type=" if (logger.isDebugEnabled()) logger.debug("Rendering JSON callback response: content type="
+ Format.JAVASCRIPT.mimetype() + ", status=" + statusCode + ", callback=" + callback); + Format.JAVASCRIPT.mimetype() + ", status=" + statusCode + ", callback=" + callback);
// NOTE: special case for wrapping JSON results in a javascript function callback // NOTE: special case for wrapping JSON results in a javascript function callback
res.setContentType(Format.JAVASCRIPT.mimetype() + ";charset=UTF-8"); res.setContentType(Format.JAVASCRIPT.mimetype() + ";charset=UTF-8");
res.getWriter().write((callback + "(")); res.getWriter().write((callback + "("));
} }
else else
{ {
if (logger.isDebugEnabled()) if (logger.isDebugEnabled())
logger.debug("Rendering response: content type=" + mimetype + ", status=" + statusCode); logger.debug("Rendering response: content type=" + mimetype + ", status=" + statusCode);
res.setContentType(mimetype + ";charset=UTF-8"); res.setContentType(mimetype + ";charset=UTF-8");
} }
// render response according to requested format // render response according to requested format
renderFormatTemplate(format, templateModel, res.getWriter()); renderFormatTemplate(format, templateModel, res.getWriter());
if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null) if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null)
{ {
// NOTE: special case for wrapping JSON results in a javascript function callback // NOTE: special case for wrapping JSON results in a javascript function callback
res.getWriter().write(")"); res.getWriter().write(")");
} }
} }
catch (Throwable e) catch (Throwable e)
{ {
if (logger.isDebugEnabled()) if (logger.isDebugEnabled())
{ {
StringWriter stack = new StringWriter(); StringWriter stack = new StringWriter();
e.printStackTrace(new PrintWriter(stack)); e.printStackTrace(new PrintWriter(stack));
logger.debug("Caught exception; decorating with appropriate status template : " + stack.toString()); logger.debug("Caught exception; decorating with appropriate status template : " + stack.toString());
} }
throw createStatusException(e, req, res); throw createStatusException(e, req, res);
} }
} }
protected void renderFormatTemplate(String format, Map<String, Object> model, Writer writer) protected void renderFormatTemplate(String format, Map<String, Object> model, Writer writer)
{ {
format = (format == null) ? "" : format; format = (format == null) ? "" : format;
String templatePath = getDescription().getId() + "." + format; String templatePath = getDescription().getId() + "." + format;
if (logger.isDebugEnabled()) logger.debug("Rendering template '" + templatePath + "'"); if (logger.isDebugEnabled()) logger.debug("Rendering template '" + templatePath + "'");
renderTemplate(templatePath, model, writer); renderTemplate(templatePath, model, writer);
} }
/** /**
* Obtain maximum of the records to be processed from the request if it is specified or bachsize value otherwise * Obtain maximum of the records to be processed from the request if it is specified or bachsize value otherwise
* *
* @param req * @param req
* @return maximum of the records to be processed from the request if it is specified or bachsize value otherwise * @return maximum of the records to be processed from the request if it is specified or bachsize value otherwise
*/ */
protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize) protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize)
{ {
String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS); String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS);
//default total number of records to be processed to batch size value //default total number of records to be processed to batch size value
Long totalNumberOfRecordsToProcess = batchSize; Long totalNumberOfRecordsToProcess = batchSize;
if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr)) if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr))
@@ -378,77 +378,77 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
//do nothing here, the value will remain 0L in this case //do nothing here, the value will remain 0L in this case
} }
} }
return totalNumberOfRecordsToProcess; return totalNumberOfRecordsToProcess;
} }
/**
* Obtain batchsize parameter from the request.
*
* @param req
* @return batchsize parameter from the request
*/
protected Long getBatchSizeParameter(WebScriptRequest req)
{
String batchSizeStr = req.getParameter(BATCH_SIZE);
Long size = 0L;
if (StringUtils.isBlank(batchSizeStr))
{
logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_MANDATORY);
}
try
{
size = Long.parseLong(batchSizeStr);
if (size <= 0)
{
logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
}
}
catch (NumberFormatException ex)
{
logger.info(MESSAGE_BATCHSIZE_IS_INVALID);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_INVALID);
}
return size;
}
/** /**
* Get parentNodeRef parameter from the request * Obtain batchsize parameter from the request.
* *
* @param req * @param req
* @return * @return batchsize parameter from the request
*/ */
protected NodeRef getParentNodeRefParameter(WebScriptRequest req) protected Long getBatchSizeParameter(WebScriptRequest req)
{ {
String parentNodeRefStr = req.getParameter(PARAM_PARENT_NODE_REF); String batchSizeStr = req.getParameter(BATCH_SIZE);
NodeRef parentNodeRef = null; Long size = 0L;
if (StringUtils.isNotBlank(parentNodeRefStr)) if (StringUtils.isBlank(batchSizeStr))
{ {
parentNodeRef = new NodeRef(parentNodeRefStr); logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY);
if(!nodeService.exists(parentNodeRef)) throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_MANDATORY);
{ }
String message = MessageFormat.format(MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE, parentNodeRef.toString()); try
logger.info(message); {
throw new WebScriptException(Status.STATUS_BAD_REQUEST, message); size = Long.parseLong(batchSizeStr);
} if (size <= 0)
} {
return parentNodeRef; logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
} throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
}
/** }
* Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess catch (NumberFormatException ex)
* parameters {
* logger.info(MESSAGE_BATCHSIZE_IS_INVALID);
* @param batchSize throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_INVALID);
* @param maxNodeId }
* @param recordAspectPair return size;
* @param totalNumberOfRecordsToProcess }
* @return the list of processed nodes
*/ /**
protected List<NodeRef> processNodes(final Long batchSize, Long maxNodeId, final Pair<Long, QName> recordAspectPair, * Get parentNodeRef parameter from the request
Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach) *
{ * @param req
* @return
*/
protected NodeRef getParentNodeRefParameter(WebScriptRequest req)
{
String parentNodeRefStr = req.getParameter(PARAM_PARENT_NODE_REF);
NodeRef parentNodeRef = null;
if (StringUtils.isNotBlank(parentNodeRefStr))
{
parentNodeRef = new NodeRef(parentNodeRefStr);
if(!nodeService.exists(parentNodeRef))
{
String message = MessageFormat.format(MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE, parentNodeRef.toString());
logger.info(message);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, message);
}
}
return parentNodeRef;
}
/**
* Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess
* parameters
*
* @param batchSize
* @param maxNodeId
* @param recordAspectPair
* @param totalNumberOfRecordsToProcess
* @return the list of processed nodes
*/
protected List<NodeRef> processNodes(final Long batchSize, Long maxNodeId, final Pair<Long, QName> recordAspectPair,
Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach)
{
final Long maxRecordsToProcess = totalNumberOfRecordsToProcess; final Long maxRecordsToProcess = totalNumberOfRecordsToProcess;
final List<NodeRef> processedNodes = new ArrayList<NodeRef>(); final List<NodeRef> processedNodes = new ArrayList<NodeRef>();
logger.info(MESSAGE_PROCESSING_BEGIN); logger.info(MESSAGE_PROCESSING_BEGIN);
@@ -466,8 +466,8 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
public Void execute() throws Throwable public Void execute() throws Throwable
{ {
// get the nodes with the extended security aspect applied // get the nodes with the extended security aspect applied
List<Long> nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex, List<Long> nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex,
currentIndex + batchSize); currentIndex + batchSize);
// process each one // process each one
for (Long nodeId : nodeIds) for (Long nodeId : nodeIds)
@@ -482,79 +482,79 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
processNode(record); processNode(record);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName)); logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName));
processedNodes.add(record); processedNodes.add(record);
if (attach) if (attach)
{ {
out.write(recordName); out.write(recordName);
out.write(","); out.write(",");
out.write(record.toString()); out.write(record.toString());
out.write("\n"); out.write("\n");
} }
} }
return null; return null;
} }
}, false, // read only }, false, // read only
true); // requires new true); // requires new
} }
logger.info(MESSAGE_PROCESSING_END); logger.info(MESSAGE_PROCESSING_END);
return processedNodes; return processedNodes;
}
protected List<NodeRef> processChildrenNodes(NodeRef parentNodeRef, final int batchSize,
final Pair<Long, QName> recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out,
final boolean attach)
{
final List<NodeRef> processedNodes = new ArrayList<NodeRef>();
final List<FileInfo> children = fileFolderService.search(parentNodeRef, "*", /*filesSearch*/true, /*folderSearch*/true, /*includeSubfolders*/true);
logger.info(MESSAGE_PROCESSING_BEGIN);
// by batch size
for (int i = 0; i < children.size(); i += batchSize)
{
if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess)
{
break;
}
final int currentIndex = i;
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>()
{
public Void execute() throws Throwable
{
List<FileInfo> nodes = children.subList(currentIndex, Math.min(currentIndex + batchSize, children.size()));
// process each one
for (FileInfo node : nodes)
{
if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess)
{
break;
}
NodeRef record = node.getNodeRef();
if (nodeService.hasAspect(record, recordAspectPair.getSecond()))
{
String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName));
processNode(record);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName));
processedNodes.add(record);
if (attach)
{
out.write(recordName);
out.write(",");
out.write(record.toString());
out.write("\n");
}
}
}
return null;
}
}, false, // read only
true); // requires new
}
logger.info(MESSAGE_PROCESSING_END);
return processedNodes;
} }
protected List<NodeRef> processChildrenNodes(NodeRef parentNodeRef, final int batchSize,
final Pair<Long, QName> recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out,
final boolean attach)
{
final List<NodeRef> processedNodes = new ArrayList<NodeRef>();
final List<FileInfo> children = fileFolderService.search(parentNodeRef, "*", /*filesSearch*/true, /*folderSearch*/true, /*includeSubfolders*/true);
logger.info(MESSAGE_PROCESSING_BEGIN);
// by batch size
for (int i = 0; i < children.size(); i += batchSize)
{
if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess)
{
break;
}
final int currentIndex = i;
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>()
{
public Void execute() throws Throwable
{
List<FileInfo> nodes = children.subList(currentIndex, Math.min(currentIndex + batchSize, children.size()));
// process each one
for (FileInfo node : nodes)
{
if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess)
{
break;
}
NodeRef record = node.getNodeRef();
if (nodeService.hasAspect(record, recordAspectPair.getSecond()))
{
String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName));
processNode(record);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName));
processedNodes.add(record);
if (attach)
{
out.write(recordName);
out.write(",");
out.write(record.toString());
out.write("\n");
}
}
}
return null;
}
}, false, // read only
true); // requires new
}
logger.info(MESSAGE_PROCESSING_END);
return processedNodes;
}
/** /**
* Process each node * Process each node
* *
@@ -575,20 +575,20 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
permissionService.clearPermission(nodeRef, ExtendedWriterDynamicAuthority.EXTENDED_WRITER); permissionService.clearPermission(nodeRef, ExtendedWriterDynamicAuthority.EXTENDED_WRITER);
// if record then ... // if record then ...
if (nodeService.hasAspect(nodeRef, ASPECT_RECORD)) if (nodeService.hasAspect(nodeRef, ASPECT_RECORD))
{
Set<String> readersKeySet = null;
if (readers != null)
{ {
readersKeySet = readers.keySet(); Set<String> readersKeySet = null;
} if (readers != null)
Set<String> writersKeySet = null; {
if (writers != null) readersKeySet = readers.keySet();
{ }
writersKeySet = writers.keySet(); Set<String> writersKeySet = null;
} if (writers != null)
{
writersKeySet = writers.keySet();
}
// re-set extended security via API // re-set extended security via API
extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet); extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet);
} }
} }
} }

View File

@@ -32,7 +32,9 @@ import org.junit.runners.Suite.SuiteClasses;
@SuiteClasses( @SuiteClasses(
{ {
CutOffTest.class, CutOffTest.class,
UpdateDispositionScheduleTest.class MultipleSchedulesTest.class,
UpdateDispositionScheduleTest.class,
UpdateNextDispositionActionTest.class
}) })
public class DispositionTestSuite public class DispositionTestSuite
{ {

View File

@@ -0,0 +1,176 @@
package org.alfresco.module.org_alfresco_module_rm.test.integration.disposition;
import static org.alfresco.module.org_alfresco_module_rm.test.util.bdt.BehaviourTest.test;
import java.io.Serializable;
import java.util.Calendar;
import java.util.Date;
import java.util.Map;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.action.impl.CutOffAction;
import org.alfresco.module.org_alfresco_module_rm.action.impl.DestroyAction;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionSchedule;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService;
import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase;
import org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils;
import org.alfresco.module.org_alfresco_module_rm.test.util.bdt.BehaviourTest;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.ApplicationContextHelper;
import org.springframework.extensions.webscripts.GUID;
import com.google.common.collect.ImmutableMap;
public class MultipleSchedulesTest extends BaseRMTestCase
{
/** A unique prefix for the constants in this test. */
protected static final String TEST_PREFIX = MultipleSchedulesTest.class.getName() + GUID.generate() + "_";
/** The name to use for the first category. */
protected static final String CATEGORY_A_NAME = TEST_PREFIX + "CategoryA";
/** The name to use for the folder within the first category. */
protected static final String FOLDER_A_NAME = TEST_PREFIX + "FolderA";
/** The name to use for the second category. */
protected static final String CATEGORY_B_NAME = TEST_PREFIX + "CategoryB";
/** The name to use for the folder within the second category. */
protected static final String FOLDER_B_NAME = TEST_PREFIX + "FolderB";
/** The name to use for the record. */
protected static final String RECORD_NAME = TEST_PREFIX + "Record";
/** The internal disposition service is used to avoid permissions issues when updating the record. */
private DispositionService internalDispositionService;
/** The first category node. */
private NodeRef categoryA;
/** The folder node within the first category. */
private NodeRef folderA;
/** The second category node. */
private NodeRef categoryB;
/** The folder node within the second category. */
private NodeRef folderB;
/** The record node. */
private NodeRef record;
@Override
protected void setUp() throws Exception
{
super.setUp();
BehaviourTest.initBehaviourTests(retryingTransactionHelper);
// Get the application context
applicationContext = ApplicationContextHelper.getApplicationContext(getConfigLocations());
internalDispositionService = (DispositionService) applicationContext.getBean("dispositionService");
// Ensure different records are used for each test.
record = null;
}
/**
* Create two categories each containing a folder. Set up a schedule on category A that applies to records (cutoff
* immediately, destroy immediately). Set up a schedule on category B that is the same, but with a week delay before
* destroy becomes eligible.
*/
private void setUpFilePlan()
{
// Only set up the file plan if it hasn't already been done.
if (categoryA != null)
{
return;
}
// Create two categories.
categoryA = filePlanService.createRecordCategory(filePlan, CATEGORY_A_NAME);
categoryB = filePlanService.createRecordCategory(filePlan, CATEGORY_B_NAME);
// Create a disposition schedule for category A (Cut off immediately, then Destroy immediately).
DispositionSchedule dispSchedA = utils.createBasicDispositionSchedule(categoryA, "instructions", "authority", true, false);
Map<QName, Serializable> cutOffParamsA = ImmutableMap.of(PROP_DISPOSITION_ACTION_NAME, CutOffAction.NAME,
PROP_DISPOSITION_DESCRIPTION, "description",
PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_IMMEDIATELY);
dispositionService.addDispositionActionDefinition(dispSchedA, cutOffParamsA);
Map<QName, Serializable> destroyParamsA = ImmutableMap.of(PROP_DISPOSITION_ACTION_NAME, DestroyAction.NAME,
PROP_DISPOSITION_DESCRIPTION, "description",
PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_IMMEDIATELY);
dispositionService.addDispositionActionDefinition(dispSchedA, destroyParamsA);
// Create a disposition schedule for category B (Cut off immediately, then Destroy one week after cutoff).
DispositionSchedule dispSchedB = utils.createBasicDispositionSchedule(categoryB, "instructions", "authority", true, false);
Map<QName, Serializable> cutOffParamsB = ImmutableMap.of(PROP_DISPOSITION_ACTION_NAME, CutOffAction.NAME,
PROP_DISPOSITION_DESCRIPTION, "description",
PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_IMMEDIATELY);
dispositionService.addDispositionActionDefinition(dispSchedB, cutOffParamsB);
Map<QName, Serializable> destroyParamsB = ImmutableMap.of(PROP_DISPOSITION_ACTION_NAME, DestroyAction.NAME,
PROP_DISPOSITION_DESCRIPTION, "description",
PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_ONE_WEEK,
PROP_DISPOSITION_PERIOD_PROPERTY, PROP_CUT_OFF_DATE);
dispositionService.addDispositionActionDefinition(dispSchedB, destroyParamsB);
// Create a folder within each category.
folderA = recordFolderService.createRecordFolder(categoryA, FOLDER_A_NAME);
folderB = recordFolderService.createRecordFolder(categoryB, FOLDER_B_NAME);
}
/**
* <a href="https://issues.alfresco.com/jira/browse/RM-2526">RM-2526</a>
* <p><pre>
* Given a record subject to a disposition schedule
* And it is linked to a disposition schedule with the same step order, but a longer destroy step
* When the record is moved onto the destroy step
* Then the "as of" date is calculated using the longer period.
* </pre>
*/
public void testLinkedToLongerSchedule()
{
Calendar calendar = Calendar.getInstance();
test()
.given(() -> {
setUpFilePlan();
// Create a record filed under category A and linked to category B.
record = fileFolderService.create(folderA, RECORD_NAME, ContentModel.TYPE_CONTENT).getNodeRef();
recordService.link(record, folderB);
})
.when(() -> {
// Cut off the record.
dispositionService.cutoffDisposableItem(record);
// Ensure the update has been applied to the record.
internalDispositionService.updateNextDispositionAction(record);
calendar.setTime((Date) nodeService.getProperty(record, PROP_CUT_OFF_DATE));
calendar.add(Calendar.WEEK_OF_YEAR, 1);
})
.then()
.expect(calendar.getTime())
.from(() -> dispositionService.getNextDispositionAction(record).getAsOfDate())
.because("Record should follow largest rentention schedule period, which is one week.");
}
/**
* <a href="https://issues.alfresco.com/jira/browse/RM-2526">RM-2526</a>
* <p><pre>
* Given a record subject to a disposition schedule
* And it is linked to a disposition schedule with the same step order, but a shorter destroy step
* When the record is moved onto the destroy step
* Then the "as of" date is calculated using the longer period.
* </pre>
*/
public void testLinkedToShorterSchedule()
{
Calendar calendar = Calendar.getInstance();
test()
.given(() -> {
setUpFilePlan();
// Create a record filed under category B and linked to category A.
record = fileFolderService.create(folderB, RECORD_NAME, ContentModel.TYPE_CONTENT).getNodeRef();
recordService.link(record, folderA);
})
.when(() -> {
// Cut off the record.
dispositionService.cutoffDisposableItem(record);
// Ensure the update has been applied to the record.
internalDispositionService.updateNextDispositionAction(record);
calendar.setTime((Date) nodeService.getProperty(record, PROP_CUT_OFF_DATE));
calendar.add(Calendar.WEEK_OF_YEAR, 1);
})
.then()
.expect(calendar.getTime())
.from(() -> dispositionService.getNextDispositionAction(record).getAsOfDate())
.because("Record should follow largest rentention schedule period, which is one week.");
}
}

View File

@@ -0,0 +1,136 @@
/*
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.module.org_alfresco_module_rm.test.integration.disposition;
import static org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils.DEFAULT_DISPOSITION_DESCRIPTION;
import static org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils.DEFAULT_DISPOSITION_INSTRUCTIONS;
import static org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils.DEFAULT_EVENT_NAME;
import static org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils.PERIOD_ONE_WEEK;
import static org.alfresco.util.GUID.generate;
import java.io.Serializable;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import org.alfresco.module.org_alfresco_module_rm.action.impl.CutOffAction;
import org.alfresco.module.org_alfresco_module_rm.action.impl.DestroyAction;
import org.alfresco.module.org_alfresco_module_rm.action.impl.EditDispositionActionAsOfDateAction;
import org.alfresco.module.org_alfresco_module_rm.action.impl.TransferAction;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionSchedule;
import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase;
import org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.namespace.QName;
/**
* Update next disposition step integration tests.
*
* @author Roxana Lucanu
* @since 2.3.1
*/
public class UpdateNextDispositionActionTest extends BaseRMTestCase
{
/**
* Given a record with multiple dispositions
* When updating the next step
* Then the action is available
* <p>
* relates to https://issues.alfresco.com/jira/browse/RM-3060
*/
public void testUpdateNextDispositionAction_RM3060() throws Exception
{
doBehaviourDrivenTest(new BehaviourDrivenTest()
{
NodeRef record;
NodeRef folder2;
@Override
public void given()
{
// create category1
NodeRef category1 = filePlanService.createRecordCategory(filePlan, generate());
// create disposition schedule for category1
createDispositionSchedule(category1);
// create category2
NodeRef category2 = filePlanService.createRecordCategory(filePlan, generate());
// create disposition schedule for category2
createDispositionSchedule(category2);
// create folder2 inside category2
folder2 = recordFolderService.createRecordFolder(category2, generate());
// create folder1 inside category1
NodeRef folder1 = recordFolderService.createRecordFolder(category1, generate());
// create record inside folder1
record = utils.createRecord(folder1, generate(), generate());
}
@Override
public void when() throws Exception
{
// link the record to folder2
recordService.link(record, folder2);
// complete record
utils.completeRecord(record);
// cut off
rmActionService.executeRecordsManagementAction(record, CutOffAction.NAME, null);
}
@Override
public void then() throws Exception
{
assertTrue("Record " + record + " doesn't have the cutOff aspect.", nodeService.hasAspect(record, ASPECT_CUT_OFF));
}
});
}
private void createDispositionSchedule(NodeRef category)
{
DispositionSchedule ds = utils.createDispositionSchedule(category, DEFAULT_DISPOSITION_INSTRUCTIONS, DEFAULT_DISPOSITION_DESCRIPTION, true, false, false);
// create the properties for CUTOFF action and add it to the disposition action definition
Map<QName, Serializable> cutOff = new HashMap<QName, Serializable>(3);
cutOff.put(PROP_DISPOSITION_ACTION_NAME, CutOffAction.NAME);
cutOff.put(PROP_DISPOSITION_DESCRIPTION, generate());
cutOff.put(PROP_DISPOSITION_PERIOD, CommonRMTestUtils.PERIOD_IMMEDIATELY);
dispositionService.addDispositionActionDefinition(ds, cutOff);
// create the properties for TRANSFER action and add it to the disposition action definition
Map<QName, Serializable> transfer = new HashMap<QName, Serializable>(3);
transfer.put(PROP_DISPOSITION_ACTION_NAME, TransferAction.NAME);
transfer.put(PROP_DISPOSITION_DESCRIPTION, generate());
transfer.put(PROP_DISPOSITION_EVENT, (Serializable)Collections.singletonList(DEFAULT_EVENT_NAME));
dispositionService.addDispositionActionDefinition(ds, transfer);
// create the properties for DESTROY action and add it to the disposition action definition
Map<QName, Serializable> destroy = new HashMap<QName, Serializable>(3);
destroy.put(PROP_DISPOSITION_ACTION_NAME, DestroyAction.NAME);
destroy.put(PROP_DISPOSITION_DESCRIPTION, generate());
destroy.put(PROP_DISPOSITION_PERIOD, PERIOD_ONE_WEEK);
dispositionService.addDispositionActionDefinition(ds, destroy);
}
}

View File

@@ -42,7 +42,6 @@ import org.junit.runners.Suite.SuiteClasses;
RM452Test.class, RM452Test.class,
RM804Test.class, RM804Test.class,
RM994Test.class, RM994Test.class,
RM1039Test.class,
RM1799Test.class, RM1799Test.class,
RM1814Test.class, RM1814Test.class,
RM978Test.class, RM978Test.class,

View File

@@ -1,186 +0,0 @@
/*
* Copyright (C) 2005-2013 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.module.org_alfresco_module_rm.test.integration.issue;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import net.sf.acegisecurity.vote.AccessDecisionVoter;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.action.impl.CompleteEventAction;
import org.alfresco.module.org_alfresco_module_rm.action.impl.CutOffAction;
import org.alfresco.module.org_alfresco_module_rm.capability.Capability;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionAction;
import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase;
import org.alfresco.module.org_alfresco_module_rm.test.util.CommonRMTestUtils;
import org.alfresco.service.cmr.repository.NodeRef;
/**
* Unit test for RM-1039 ... can't move a folder into a category with a disposition schedule
*
* @author Roy Wetherall
* @since 2.1
*/
public class RM1039Test extends BaseRMTestCase
{
@Override
protected boolean isRecordTest()
{
return true;
}
// try and move a folder from no disposition schedule to a disposition schedule
public void testMoveRecordFolderFromNoDisToDis() throws Exception
{
final NodeRef recordFolder = doTestInTransaction(new Test<NodeRef>()
{
@Override
public NodeRef run()
{
// create a record category (no disposition schedule)
NodeRef recordCategory = filePlanService.createRecordCategory(filePlan, "Caitlin Reed");
// create a record folder
return recordFolderService.createRecordFolder(recordCategory, "Grace Wetherall");
}
@Override
public void test(NodeRef result) throws Exception
{
assertNotNull(result);
assertNull(dispositionService.getDispositionSchedule(result));
assertFalse(nodeService.hasAspect(result, ASPECT_DISPOSITION_LIFECYCLE));
}
});
final NodeRef record = doTestInTransaction(new Test<NodeRef>()
{
@Override
public NodeRef run()
{
// create a record
return fileFolderService.create(recordFolder, "mytest.txt", ContentModel.TYPE_CONTENT).getNodeRef();
}
@Override
public void test(NodeRef result) throws Exception
{
assertNotNull(result);
assertNull(dispositionService.getDispositionSchedule(result));
assertFalse(nodeService.hasAspect(result, ASPECT_DISPOSITION_LIFECYCLE));
}
});
doTestInTransaction(new Test<NodeRef>()
{
@Override
public NodeRef run() throws Exception
{
Capability capability = capabilityService.getCapability("CreateModifyDestroyFolders");
assertEquals(AccessDecisionVoter.ACCESS_GRANTED, capability.evaluate(recordFolder));
assertEquals(AccessDecisionVoter.ACCESS_GRANTED, capability.evaluate(recordFolder, rmContainer));
// take a look at the move capability
Capability moveCapability = capabilityService.getCapability("Move");
assertEquals(AccessDecisionVoter.ACCESS_GRANTED, moveCapability.evaluate(recordFolder, rmContainer));
// move the node
return fileFolderService.move(recordFolder, rmContainer, null).getNodeRef();
}
@Override
public void test(NodeRef result) throws Exception
{
assertNotNull(result);
assertNotNull(dispositionService.getDispositionSchedule(result));
assertTrue(nodeService.hasAspect(result, ASPECT_DISPOSITION_LIFECYCLE));
DispositionAction dispositionAction = dispositionService.getNextDispositionAction(result);
assertNotNull(dispositionAction);
assertNull(dispositionAction.getAsOfDate());
assertEquals("cutoff", dispositionAction.getName());
assertEquals(1, dispositionAction.getEventCompletionDetails().size());
// take a look at the record and check things are as we would expect
assertFalse(nodeService.hasAspect(record, ASPECT_DISPOSITION_LIFECYCLE));
}
});
}
// move from a disposition schedule to another .. both record folder level
// move from a disposition schedule to another .. from record to folder level
// try and move a cutoff folder
public void testMoveCutoffRecordFolder() throws Exception
{
final NodeRef destination = doTestInTransaction(new Test<NodeRef>()
{
@Override
public NodeRef run()
{
// create a record category (no disposition schedule)
return filePlanService.createRecordCategory(filePlan, "Caitlin Reed");
}
});
final NodeRef testFolder = doTestInTransaction(new Test<NodeRef>()
{
@Override
public NodeRef run()
{
// create folder
NodeRef testFolder = recordFolderService.createRecordFolder(rmContainer, "Peter Edward Francis");
// complete event
Map<String, Serializable> params = new HashMap<String, Serializable>(1);
params.put(CompleteEventAction.PARAM_EVENT_NAME, CommonRMTestUtils.DEFAULT_EVENT_NAME);
rmActionService.executeRecordsManagementAction(testFolder, CompleteEventAction.NAME, params);
// cutoff folder
rmActionService.executeRecordsManagementAction(testFolder, CutOffAction.NAME);
return testFolder;
}
@Override
public void test(NodeRef result) throws Exception
{
// take a look at the move capability
Capability moveCapability = capabilityService.getCapability("Move");
assertEquals(AccessDecisionVoter.ACCESS_DENIED, moveCapability.evaluate(result, destination));
}
});
doTestInTransaction(new FailureTest()
{
@Override
public void run() throws Exception
{
fileFolderService.move(testFolder, destination, null).getNodeRef();
}
});
}
}

View File

@@ -384,9 +384,12 @@ public class MoveRecordFolderTest extends BaseRMTestCase
} }
}); });
} }
/**
// try and move a cutoff folder * Try and move a cutoff folder
*
* @see https://issues.alfresco.com/jira/browse/RM-1039
*/
public void testMoveCutoffRecordFolder() throws Exception public void testMoveCutoffRecordFolder() throws Exception
{ {
final NodeRef destination = doTestInTransaction(new Test<NodeRef>() final NodeRef destination = doTestInTransaction(new Test<NodeRef>()

View File

@@ -657,7 +657,7 @@ public class DispositionServiceImplTest extends BaseRMTestCase
checkDisposableItemChanged(mhRecordFolder42); checkDisposableItemChanged(mhRecordFolder42);
checkDisposableItemChanged(record43); checkDisposableItemChanged(record43);
checkDisposableItemUnchanged(mhRecordFolder44); checkDisposableItemUnchanged(mhRecordFolder44);
checkDisposableItemUnchanged(record45);; checkDisposableItemUnchanged(record45);
} }
}); });

View File

@@ -76,6 +76,7 @@ public class CommonRMTestUtils implements RecordsManagementModel
public static final String DEFAULT_EVENT_NAME = "case_closed"; public static final String DEFAULT_EVENT_NAME = "case_closed";
public static final String PERIOD_NONE = "none|0"; public static final String PERIOD_NONE = "none|0";
public static final String PERIOD_IMMEDIATELY = "immediately|0"; public static final String PERIOD_IMMEDIATELY = "immediately|0";
public static final String PERIOD_ONE_WEEK = "week|1";
public static final String PERIOD_ONE_YEAR = "year|1"; public static final String PERIOD_ONE_YEAR = "year|1";
public static final String PERIOD_THREE_YEARS = "year|3"; public static final String PERIOD_THREE_YEARS = "year|3";

View File

@@ -263,7 +263,7 @@ public class RecordServiceImplUnitTest extends BaseUnitTest
DispositionSchedule recordDispositionSchedule = mock(DispositionSchedule.class); DispositionSchedule recordDispositionSchedule = mock(DispositionSchedule.class);
when(recordDispositionSchedule.isRecordLevelDisposition()) when(recordDispositionSchedule.isRecordLevelDisposition())
.thenReturn(true); .thenReturn(true);
when(mockedDispositionService.getDispositionSchedule(record)) when(mockedDispositionService.getOriginDispositionSchedule(record))
.thenReturn(recordDispositionSchedule); .thenReturn(recordDispositionSchedule);
DispositionSchedule recordFolderDispositionSchedule = mock(DispositionSchedule.class); DispositionSchedule recordFolderDispositionSchedule = mock(DispositionSchedule.class);