Compare commits

..

2 Commits

Author SHA1 Message Date
alfresco-build
5dc0d39287 [maven-release-plugin] prepare release V2.6.0.1 2019-10-10 13:19:11 +01:00
Tom Page
5f87a5cd92 Update license headers. 2019-10-02 14:58:48 +01:00
51 changed files with 195 additions and 3905 deletions

22
pom.xml
View File

@@ -4,7 +4,7 @@
<groupId>org.alfresco</groupId>
<artifactId>alfresco-rm</artifactId>
<packaging>pom</packaging>
<version>2.6.2</version>
<version>2.6.0.1</version>
<name>Alfresco Records Management</name>
<parent>
@@ -24,7 +24,7 @@
<connection>scm:git:https://git.alfresco.com/records-management/records-management.git</connection>
<developerConnection>scm:git:https://git.alfresco.com/records-management/records-management.git</developerConnection>
<url>https://git.alfresco.com/records-management/records-management</url>
<tag>V2.6.2</tag>
<tag>V2.6.0.1</tag>
</scm>
<issueManagement>
@@ -198,20 +198,8 @@
<argLine>-Xmx1024m -XX:MaxPermSize=256m -Duser.language=en -Dcom.sun.management.jmxremote</argLine>
<license.update.dryrun>true</license.update.dryrun>
<license.verbose>false</license.verbose>
<!-- Prevent the module from being added to inappropriate versions of the WAR file !-->
<alfresco.min.version>5.2.4</alfresco.min.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>18.0</version>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>junit</groupId>
@@ -256,9 +244,9 @@
<regexPropertySettings>
<regexPropertySetting>
<name>rm.module.repo.version.min</name>
<value>${alfresco.min.version}</value>
<regex>(\d+)\.(\d+).(\d+).*</regex>
<replacement>$1.$2.$3</replacement>
<value>${alfresco.version}</value>
<regex>(\d+)\.(\d+).*</regex>
<replacement>$1.$2</replacement>
<failIfNoMatch>false</failIfNoMatch>
</regexPropertySetting>
<regexPropertySetting>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-rm</artifactId>
<version>2.6.2</version>
<version>2.6.0.1</version>
</parent>
<licenses>
@@ -145,7 +145,6 @@
</goals>
<configuration>
<artifactItems>
<!-- Due to no compatible community version of ACS 5.2.N with RM community
<artifactItem>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-rm-community-share</artifactId>
@@ -158,19 +157,6 @@
<version>${project.version}</version>
<type>amp</type>
</artifactItem>
!-->
<artifactItem>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-rm-enterprise-share</artifactId>
<version>${project.version}</version>
<type>amp</type>
</artifactItem>
<artifactItem>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-rm-enterprise-repo</artifactId>
<version>${project.version}</version>
<type>amp</type>
</artifactItem>
</artifactItems>
<outputDirectory>${project.build.directory}/amps</outputDirectory>
<useBaseVersion>true</useBaseVersion>
@@ -192,7 +178,7 @@
<configuration>
<backup>true</backup>
<ampLocation>
${project.build.directory}/amps/alfresco-rm-enterprise-repo-${project.version}.amp
${project.build.directory}/amps/alfresco-rm-community-repo-${project.version}.amp
</ampLocation>
<warLocation>${project.build.directory}/alf-installation/tomcat/webapps/alfresco.war
</warLocation>
@@ -207,7 +193,7 @@
<configuration>
<backup>true</backup>
<ampLocation>
${project.build.directory}/amps/alfresco-rm-enterprise-share-${project.version}.amp
${project.build.directory}/amps/alfresco-rm-community-share-${project.version}.amp
</ampLocation>
<warLocation>${project.build.directory}/alf-installation/tomcat/webapps/share.war
</warLocation>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-rm-automation</artifactId>
<version>2.6.2</version>
<version>2.6.0.1</version>
</parent>
<properties>

View File

@@ -92,28 +92,14 @@ public class RestAPIFactory
return getRmRestWrapper().withSearchAPI();
}
public Node getNodeAPI(RepoTestModel model) throws RuntimeException
public Node getNodeAPI(RepoTestModel model) throws Exception
{
try
{
return getCoreAPI(null).usingNode(model);
}
catch (Exception e)
{
throw new RuntimeException("Failed to load nodeAPI.", e);
}
return getCoreAPI(null).usingNode(model);
}
public Node getNodeAPI(UserModel userModel, RepoTestModel model) throws RuntimeException
public Node getNodeAPI(UserModel userModel, RepoTestModel model) throws Exception
{
try
{
return getCoreAPI(userModel).usingNode(model);
}
catch (Exception e)
{
throw new RuntimeException("Failed to load nodeAPI.", e);
}
return getCoreAPI(userModel).usingNode(model);
}
public RMSiteAPI getRMSiteAPI()

View File

@@ -73,7 +73,7 @@ import org.springframework.beans.factory.annotation.Autowired;
public abstract class BaseAPI
{
// logger
protected static final Logger LOGGER = LoggerFactory.getLogger(BaseAPI.class);
private static final Logger LOGGER = LoggerFactory.getLogger(BaseAPI.class);
/** exception key in JSON response body */
private static final String EXCEPTION_KEY = "exception";
@@ -216,7 +216,6 @@ public abstract class BaseAPI
client.getAlfrescoUrl(),
URLEncodedUtils.format(parameters, "UTF-8"));
}
LOGGER.info("On GET {}, received following response: ", requestURL);
client.close();
return doGetRequest(username, password, requestURL);
}

View File

@@ -26,8 +26,6 @@
*/
package org.alfresco.rest.rm.community.model.common;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
@@ -52,10 +50,4 @@ public class IdNamePair
@JsonProperty (required = true)
private String name;
@JsonProperty (required = true)
private List<String> aspectNames;
@JsonProperty (required = true)
private String nodeType;
}

View File

@@ -52,13 +52,6 @@ import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanCo
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_ORIGINATING_CREATION_DATE;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_ORIGINATING_LOCATION;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_ORIGINATING_USER_ID;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_SEARCH_DISPOSITION_ACTION_AS_OF;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_SEARCH_DISPOSITION_ACTION_NAME;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_SEARCH_DISPOSITION_AUTHORITY;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_SEARCH_DISPOSITION_EVENTS_ELIGIBLE;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_SEARCH_DISPOSITION_INSTRUCTIONS;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_SEARCH_DISPOSITION_PERIOD;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_SEARCH_DISPOSITION_PERIOD_EXPRESSION;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RECORD_SEARCH_HAS_DISPOSITION_SCHEDULE;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_RESOLUTION_UNIT;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_ROOT_NODE_REF;
@@ -71,7 +64,6 @@ import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanCo
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_X_RESOLUTION;
import static org.alfresco.rest.rm.community.model.fileplancomponents.FilePlanComponentFields.PROPERTIES_Y_RESOLUTION;
import java.util.Date;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@@ -216,25 +208,4 @@ public class RecordProperties extends TestModel
@JsonProperty(PROPERTIES_AUTHOR)
private String author;
@JsonProperty (PROPERTIES_RECORD_SEARCH_DISPOSITION_PERIOD_EXPRESSION)
private String recordSearchDispositionPeriodExpression;
@JsonProperty (PROPERTIES_RECORD_SEARCH_DISPOSITION_AUTHORITY)
private String recordSearchDispositionAuthority;
@JsonProperty (PROPERTIES_RECORD_SEARCH_DISPOSITION_ACTION_AS_OF)
private Date recordSearchDispositionActionAsOf;
@JsonProperty (PROPERTIES_RECORD_SEARCH_DISPOSITION_PERIOD)
private String recordSearchDispositionPeriod;
@JsonProperty (PROPERTIES_RECORD_SEARCH_DISPOSITION_ACTION_NAME)
private String recordSearchDispositionActionName;
@JsonProperty (PROPERTIES_RECORD_SEARCH_DISPOSITION_EVENTS_ELIGIBLE)
private Boolean recordSearchDispositionEventsEligible;
@JsonProperty (PROPERTIES_RECORD_SEARCH_DISPOSITION_INSTRUCTIONS)
private String recordSearchDispositionInstructions;
}

View File

@@ -135,7 +135,7 @@ public class FilePlanAPI extends RMModelRequest
/**
* see {@link #createRootRecordCategory(RecordCategory, String, String)}
*/
public RecordCategory createRootRecordCategory(RecordCategory recordCategoryModel, String filePlanId)
public RecordCategory createRootRecordCategory(RecordCategory recordCategoryModel, String filePlanId) throws Exception
{
mandatoryObject("recordCategoryModel", recordCategoryModel);
mandatoryString("filePlanId", filePlanId);
@@ -150,7 +150,7 @@ public class FilePlanAPI extends RMModelRequest
* @param filePlanId The identifier of a file plan
* @param parameters The URL parameters to add
* @return The created {@link RecordCategory}
* @throws RuntimeException for the following cases:
* @throws Exception for the following cases:
* <ul>
* <li>{@code filePlanId} is not a valid format or {@code filePlanId} is invalid</li>
* <li>authentication fails</li>
@@ -160,7 +160,7 @@ public class FilePlanAPI extends RMModelRequest
* <li>model integrity exception, including node name with invalid characters</li>
* </ul>
*/
public RecordCategory createRootRecordCategory(RecordCategory recordCategoryModel, String filePlanId, String parameters)
public RecordCategory createRootRecordCategory(RecordCategory recordCategoryModel, String filePlanId, String parameters) throws Exception
{
mandatoryObject("recordCategoryModel", recordCategoryModel);
mandatoryString("filePlanId", filePlanId);

View File

@@ -201,7 +201,7 @@ public class RecordCategoryAPI extends RMModelRequest
/**
* see {@link #createRecordCategoryChild(RecordCategoryChild, String, String)}
*/
public RecordCategoryChild createRecordCategoryChild(RecordCategoryChild recordCategoryChildModel, String recordCategoryId)
public RecordCategoryChild createRecordCategoryChild(RecordCategoryChild recordCategoryChildModel, String recordCategoryId) throws Exception
{
mandatoryObject("recordCategoryChildModel", recordCategoryChildModel);
mandatoryString("recordCategoryId", recordCategoryId);
@@ -216,7 +216,7 @@ public class RecordCategoryAPI extends RMModelRequest
* @param recordCategoryId The identifier of a record category
* @param parameters The URL parameters to add
* @return The created {@link RecordCategoryChild}
* @throws RuntimeException for the following cases:
* @throws Exception for the following cases:
* <ul>
* <li>{@code recordCategoryId} is not a valid format or {@code recordCategoryChildModel} is invalid</li>
* <li>authentication fails</li>
@@ -226,7 +226,7 @@ public class RecordCategoryAPI extends RMModelRequest
* <li>model integrity exception, including node name with invalid characters</li>
* </ul>
*/
public RecordCategoryChild createRecordCategoryChild(RecordCategoryChild recordCategoryChildModel, String recordCategoryId, String parameters)
public RecordCategoryChild createRecordCategoryChild(RecordCategoryChild recordCategoryChildModel, String recordCategoryId, String parameters) throws Exception
{
mandatoryObject("filePlanComponentProperties", recordCategoryChildModel);
mandatoryString("recordCategoryId", recordCategoryId);

View File

@@ -40,7 +40,6 @@ import static org.springframework.http.HttpMethod.PUT;
import static org.testng.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import com.fasterxml.jackson.databind.JsonNode;
@@ -213,7 +212,7 @@ public class RecordFolderAPI extends RMModelRequest
/**
* see {@link #createRecord(Record, String, String)}
*/
public Record createRecord(Record recordModel, String recordFolderId)
public Record createRecord(Record recordModel, String recordFolderId) throws Exception
{
mandatoryObject("recordModel", recordModel);
mandatoryString("recordFolderId", recordFolderId);
@@ -228,9 +227,9 @@ public class RecordFolderAPI extends RMModelRequest
* @param recordContent {@link File} pointing to the content of the electronic record to be created
* @param recordFolderId The identifier of a record folder
* @return newly created {@link Record}
* @throws RuntimeException for invalid recordModel JSON strings
* @throws Exception for invalid recordModel JSON strings
*/
public Record createRecord(Record recordModel, String recordFolderId, File recordContent) throws RuntimeException
public Record createRecord(Record recordModel, String recordFolderId, File recordContent) throws Exception
{
mandatoryString("recordFolderId", recordFolderId);
mandatoryObject("recordContent", recordContent);
@@ -246,15 +245,7 @@ public class RecordFolderAPI extends RMModelRequest
* to the request.
*/
RequestSpecBuilder builder = getRmRestWrapper().configureRequestSpec();
JsonNode root;
try
{
root = new ObjectMapper().readTree(toJson(recordModel, Record.class, FilePlanComponentMixIn.class));
}
catch (IOException e)
{
throw new RuntimeException("Failed to convert model to JSON.", e);
}
JsonNode root = new ObjectMapper().readTree(toJson(recordModel, Record.class, FilePlanComponentMixIn.class));
// add request fields
Iterator<String> fieldNames = root.fieldNames();
while (fieldNames.hasNext())
@@ -275,7 +266,7 @@ public class RecordFolderAPI extends RMModelRequest
* @param recordFolderId The identifier of a record folder
* @param parameters The URL parameters to add
* @return The created {@link Record}
* @throws RuntimeException for the following cases:
* @throws Exception for the following cases:
* <ul>
* <li>{@code recordFolderId is not a valid format or {@code recordModel} is invalid</li>
* <li>authentication fails</li>
@@ -284,7 +275,7 @@ public class RecordFolderAPI extends RMModelRequest
* <li>model integrity exception, including node name with invalid characters</li>
* </ul>
*/
public Record createRecord(Record recordModel, String recordFolderId, String parameters)
public Record createRecord(Record recordModel, String recordFolderId, String parameters) throws Exception
{
mandatoryObject("recordModel", recordModel);
mandatoryString("recordFolderId", recordFolderId);

View File

@@ -51,8 +51,6 @@ public class RecordCategoriesAPI extends BaseAPI
private static final Logger LOGGER = LoggerFactory.getLogger(RecordCategoriesAPI.class);
private static final String RM_ACTIONS_API = "{0}rma/actions/ExecutionQueue";
private static final String DISPOSITION_ACTIONS_API = "{0}node/{1}/dispositionschedule/dispositionactiondefinitions";
private static final String DISPOSITION_SCHEDULE_API = "{0}node/{1}/dispositionschedule";
/**
* Creates a retention schedule for the category given as parameter
@@ -73,21 +71,6 @@ public class RecordCategoriesAPI extends BaseAPI
return doPostJsonRequest(user, password, SC_OK, requestParams, RM_ACTIONS_API);
}
/**
* Get the disposition schedule nodeRef
*
* @param user
* @param password
* @param categoryName
* @return the disposition schedule nodeRef
*/
public String getDispositionScheduleNodeRef(String user, String password, String categoryName)
{
String catNodeRef = NODE_PREFIX + getItemNodeRef(user, password, "/" + categoryName);
JSONObject dispositionSchedule = doGetRequest(user, password, MessageFormat.format(DISPOSITION_SCHEDULE_API, "{0}", catNodeRef));
return dispositionSchedule.getJSONObject("data").getString("nodeRef").replace(getNodeRefSpacesStore(), "");
}
/**
* Sets retention schedule authority and instructions, also if it is applied to records or folders
*
@@ -125,11 +108,7 @@ public class RecordCategoriesAPI extends BaseAPI
addPropertyToRequest(requestParams, "period", properties, RETENTION_SCHEDULE.RETENTION_PERIOD);
addPropertyToRequest(requestParams, "ghostOnDestroy", properties, RETENTION_SCHEDULE.RETENTION_GHOST);
addPropertyToRequest(requestParams, "periodProperty", properties, RETENTION_SCHEDULE.RETENTION_PERIOD_PROPERTY);
String events = getPropertyValue(properties, RETENTION_SCHEDULE.RETENTION_EVENTS);
if(!events.equals(""))
{
requestParams.append("events", events);
}
addPropertyToRequest(requestParams, "events", properties, RETENTION_SCHEDULE.RETENTION_EVENTS);
addPropertyToRequest(requestParams, "eligibleOnFirstCompleteEvent", properties, RETENTION_SCHEDULE.RETENTION_ELIGIBLE_FIRST_EVENT);
return doPostJsonRequest(user, password, SC_OK, requestParams, MessageFormat.format(DISPOSITION_ACTIONS_API, "{0}", catNodeRef));

View File

@@ -350,17 +350,4 @@ public class RecordsAPI extends BaseAPI
return doPostJsonRequest(user, password, SC_OK, requestParams, ACTIONS_API);
}
/**
* Retrieves the record's nodeRef
*
* @param username the user's username
* @param password its password
* @param recordName the record full name
* @param recordPath the String with which the record name starts
* @return the record nodeRef in case it exists, empty string otherwise
*/
public String getRecordNodeRef(String username, String password, String recordName, String recordPath)
{
return getNodeRefSpacesStore() + getItemNodeRef(username, password, recordPath + "/" + recordName);
}
}

View File

@@ -36,7 +36,6 @@ import org.alfresco.rest.core.v0.BaseAPI;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import org.apache.http.message.BasicNameValuePair;
import org.json.JSONException;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -57,9 +56,6 @@ public class SearchAPI extends BaseAPI
/** faceted search API endpoint */
private static final String FACETED_SEARCH_ENDPOINT = "{0}alfresco/s/slingshot/rmsearch/faceted/rmsearch?{1}";
/** share live search API endpoint */
private static final String SHARE_LIVE_SEARCH_DOCS_ENDPOINT = "{0}alfresco/s/slingshot/live-search-docs?{1}";
/** RM search URL template */
private static final String RM_SEARCH_ENDPOINT = "{0}alfresco/s/slingshot/rmsearch/{1}?{2}";
@@ -142,20 +138,6 @@ public class SearchAPI extends BaseAPI
return facetedRequest(username, password, parameters, FACETED_SEARCH_ENDPOINT);
}
/**
* Execute share live search for documents.
*
* @param searchUser
* @param searchPassword
* @param searchTerm
* @return search results (see API reference for more details)
*/
public JSONObject liveSearchForDocuments(String searchUser, String searchPassword, String searchTerm)
{
return facetedRequest(searchUser, searchPassword, Arrays.asList(new BasicNameValuePair("t", searchTerm)),
SHARE_LIVE_SEARCH_DOCS_ENDPOINT);
}
/**
* Execute faceted search for term.
* @param searchUser
@@ -183,20 +165,6 @@ public class SearchAPI extends BaseAPI
return getItemNames(facetedSearchForTerm(username, password, term));
}
/**
* Helper method to search for documents as a user using share live search.
* @param username to search as
* @param password for username
* @param term search term
* @return list of document names found
*/
public List<String> liveSearchForDocumentsAsUser(String username, String password, String term) throws JSONException
{
JSONObject searchResult = liveSearchForDocuments(username, password, term);
LOGGER.info(searchResult.toString(3));
return getItemNames(searchResult);
}
/**
* Helper method to extract list of names from search result.
* @param searchResult

View File

@@ -1,142 +0,0 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2019 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.v0.service;
import java.util.HashMap;
import org.alfresco.rest.core.v0.BaseAPI;
import org.alfresco.rest.v0.RecordCategoriesAPI;
import org.alfresco.utility.data.DataUser;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* Service for different disposition schedule actions
*
* @author jcule, cagache
* @since 2.6.2
*/
@Service
public class DispositionScheduleService extends BaseAPI
{
@Autowired
private RecordCategoriesAPI recordCategoriesAPI;
@Autowired
private DataUser dataUser;
/**
* Helper method for adding a retain after period step
*
* @param categoryName the category in whose schedule the step will be added
* @param period
*/
public void addRetainAfterPeriodStep(String categoryName, String period)
{
HashMap<RETENTION_SCHEDULE, String> retainStep = new HashMap<>();
retainStep.put(RETENTION_SCHEDULE.NAME, "retain");
retainStep.put(RETENTION_SCHEDULE.RETENTION_PERIOD, period);
retainStep.put(RETENTION_SCHEDULE.DESCRIPTION, "Retain after a period step");
recordCategoriesAPI.addDispositionScheduleSteps(dataUser.getAdminUser().getUsername(),
dataUser.getAdminUser().getPassword(), categoryName, retainStep);
}
/**
* Helper method for adding a cut off after period step
*
* @param categoryName the category in whose schedule the step will be added
* @param period
*/
public void addCutOffAfterPeriodStep(String categoryName, String period)
{
HashMap<RETENTION_SCHEDULE, String> cutOffStep = new HashMap<>();
cutOffStep.put(RETENTION_SCHEDULE.NAME, "cutoff");
cutOffStep.put(RETENTION_SCHEDULE.RETENTION_PERIOD, period);
cutOffStep.put(RETENTION_SCHEDULE.DESCRIPTION, "Cut off after a period step");
recordCategoriesAPI.addDispositionScheduleSteps(dataUser.getAdminUser().getUsername(),
dataUser.getAdminUser().getPassword(), categoryName, cutOffStep);
}
/**
* Helper method for adding a destroy with ghosting after period
*
* @param categoryName the category in whose schedule the step will be added
* @param period
*/
public void addDestroyWithGhostingAfterPeriodStep(String categoryName, String period)
{
HashMap<RETENTION_SCHEDULE, String> destroyStep = new HashMap<>();
destroyStep.put(RETENTION_SCHEDULE.NAME, "destroy");
destroyStep.put(RETENTION_SCHEDULE.RETENTION_PERIOD, period);
destroyStep.put(RETENTION_SCHEDULE.DESCRIPTION, "Destroy after a period step");
destroyStep.put(RETENTION_SCHEDULE.RETENTION_GHOST, "on");
recordCategoriesAPI.addDispositionScheduleSteps(dataUser.getAdminUser().getUsername(),
dataUser.getAdminUser().getPassword(), categoryName, destroyStep);
}
/**
* Helper method for adding a cut off after an event occurs step
*
* @param categoryName the category in whose schedule the step will be added
* @param events
*/
public void addCutOffAfterEventStep(String categoryName, String events)
{
HashMap<RETENTION_SCHEDULE, String> cutOffStep = new HashMap<>();
cutOffStep.put(RETENTION_SCHEDULE.NAME, "cutoff");
cutOffStep.put(RETENTION_SCHEDULE.RETENTION_EVENTS, events);
cutOffStep.put(RETENTION_SCHEDULE.DESCRIPTION, "Cut off after event step");
recordCategoriesAPI.addDispositionScheduleSteps(dataUser.getAdminUser().getUsername(),
dataUser.getAdminUser().getPassword(), categoryName, cutOffStep);
}
/**
* Helper method to create retention schedule with general fields for the given category as admin
* and apply it to the records
*
* @param categoryName
* @param appliedToRecords
*/
public void createCategoryRetentionSchedule(String categoryName, Boolean appliedToRecords)
{
recordCategoriesAPI.createRetentionSchedule(dataUser.getAdminUser().getUsername(),
dataUser.getAdminUser().getPassword(), categoryName);
String retentionScheduleNodeRef = recordCategoriesAPI.getDispositionScheduleNodeRef(
dataUser.getAdminUser().getUsername(), dataUser.getAdminUser().getPassword(), categoryName);
HashMap<RETENTION_SCHEDULE, String> retentionScheduleGeneralFields = new HashMap<>();
retentionScheduleGeneralFields.put(RETENTION_SCHEDULE.RETENTION_AUTHORITY, "Authority");
retentionScheduleGeneralFields.put(RETENTION_SCHEDULE.RETENTION_INSTRUCTIONS, "Instructions");
recordCategoriesAPI.setRetentionScheduleGeneralFields(dataUser.getAdminUser().getUsername(),
dataUser.getAdminUser().getPassword(), retentionScheduleNodeRef, retentionScheduleGeneralFields,
appliedToRecords);
}
}

View File

@@ -196,9 +196,9 @@ public class BaseRMRestTest extends RestTest
* @param userModel The user under whose privileges this structure is going to be created
* @param categoryName The name of the category
* @return The created category
* @throws RuntimeException on unsuccessful component creation
* @throws Exception on unsuccessful component creation
*/
public RecordCategory createRootCategory(UserModel userModel, String categoryName)
public RecordCategory createRootCategory(UserModel userModel, String categoryName) throws Exception
{
return createRootCategory(userModel, categoryName, RECORD_CATEGORY_TITLE);
}
@@ -223,9 +223,9 @@ public class BaseRMRestTest extends RestTest
* @param categoryName The name of the category
* @param categoryTitle The title of the category
* @return The created category
* @throws RuntimeException on unsuccessful component creation
* @throws Exception on unsuccessful component creation
*/
public RecordCategory createRootCategory(UserModel userModel, String categoryName, String categoryTitle)
public RecordCategory createRootCategory(UserModel userModel, String categoryName, String categoryTitle) throws Exception
{
RecordCategory recordCategoryModel = createRecordCategoryModel(categoryName, categoryTitle);
return getRestAPIFactory().getFilePlansAPI(userModel).createRootRecordCategory(recordCategoryModel, FILE_PLAN_ALIAS);
@@ -294,9 +294,9 @@ public class BaseRMRestTest extends RestTest
* @param recordCategoryId The id of the record category
* @param name The name of the folder
* @return The created folder
* @throws RuntimeException on unsuccessful component creation
* @throws Exception on unsuccessful component creation
*/
public RecordCategoryChild createFolder(UserModel user, String recordCategoryId, String name)
public RecordCategoryChild createFolder(UserModel user, String recordCategoryId, String name) throws Exception
{
RecordCategoryChild recordFolderModel = createRecordCategoryChildModel(name, RECORD_FOLDER_TYPE);
return getRestAPIFactory().getRecordCategoryAPI(user).createRecordCategoryChild(recordFolderModel, recordCategoryId);
@@ -434,13 +434,13 @@ public class BaseRMRestTest extends RestTest
}
/**
* Helper method to create a randomly-named [category]/[folder] structure in file plan
* Helper method to create a randomly-named <category>/<folder> structure in file plan
*
* @param user The user under whose privileges this structure is going to be created
* @return {@link RecordCategoryChild} which represents the record folder
* @throws RuntimeException on failed creation
* @throws Exception on failed creation
*/
public RecordCategoryChild createCategoryFolderInFilePlan(UserModel user)
public RecordCategoryChild createCategoryFolderInFilePlan(UserModel user) throws Exception
{
// create root category
RecordCategory recordCategory = createRootCategory(user, "Category " + getRandomAlphanumeric());
@@ -450,12 +450,12 @@ public class BaseRMRestTest extends RestTest
}
/**
* Helper method to create a randomly-named [category]/[folder] structure in file plan as the admin user
* Helper method to create a randomly-named <category>/<folder> structure in file plan as the admin user
*
* @return {@link RecordCategoryChild} which represents the record folder
* @throws RuntimeException on failed creation
* @throws Exception on failed creation
*/
public RecordCategoryChild createCategoryFolderInFilePlan()
public RecordCategoryChild createCategoryFolderInFilePlan() throws Exception
{
return createCategoryFolderInFilePlan(getAdminUser());
}

View File

@@ -1,299 +0,0 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2019 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.rm.community.recordcategories;
import static org.alfresco.utility.data.RandomData.getRandomName;
import static org.alfresco.utility.report.log.Step.STEP;
import org.alfresco.rest.rm.community.base.BaseRMRestTest;
import org.alfresco.rest.rm.community.model.record.Record;
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategory;
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategoryChild;
import org.alfresco.rest.v0.service.DispositionScheduleService;
import org.alfresco.test.AlfrescoTest;
import org.springframework.beans.factory.annotation.Autowired;
import org.testng.Assert;
import org.testng.annotations.Test;
public class DispositionScheduleInheritanceTests extends BaseRMRestTest
{
private static final String RETAIN_STEP = "retain";
private static final String CUTOFF_STEP = "cutoff";
@Autowired
private DispositionScheduleService dispositionScheduleService;
/**
* Given following structure is created:
* rootCategory with RS applied on records level
* - subCategory without RS
* - recFolder
* - incomplete electronic record
* - complete non-electronic record
* Then both records should inherit the RS from rootCategory
*/
@AlfrescoTest (jira = "MNT-19967")
@Test
public void testRSInheritanceOnRecordsWhenOnlyACategoryHasRS() throws Exception
{
STEP("Create record category with retention schedule and apply it to records.");
RecordCategory rootCategory = createRootCategory(getRandomName("rootCategory"));
dispositionScheduleService.createCategoryRetentionSchedule(rootCategory.getName(), true);
STEP("Add retention schedule cut off step with immediate period.");
dispositionScheduleService.addCutOffAfterPeriodStep(rootCategory.getName(), "immediately");
STEP("Add retention schedule retain step with immediate period.");
dispositionScheduleService.addRetainAfterPeriodStep(rootCategory.getName(), "immediately");
STEP("Create a subcategory with a record folder");
RecordCategoryChild subCategory = createRecordCategory(rootCategory.getId(), getRandomName("subCategory"));
RecordCategoryChild recFolder = createFolder(subCategory.getId(), getRandomName("recFolder"));
STEP("Create 2 records in the record folder. Complete one of them.");
Record elRecord = createElectronicRecord(recFolder.getId(), getRandomName("elRecord"));
Record nonElRecord = createNonElectronicRecord(recFolder.getId(), getRandomName("nonElRecord"));
getRestAPIFactory().getRecordsAPI().completeRecord(nonElRecord.getId());
STEP("Check that both records inherit root category retention schedule");
Assert.assertTrue(elRecord.getProperties().getRecordSearchHasDispositionSchedule(),
"rma:recordSearchHasDispositionSchedule property should be true");
Assert.assertTrue(nonElRecord.getProperties().getRecordSearchHasDispositionSchedule(),
"rma:recordSearchHasDispositionSchedule property should be true");
}
/**
* Given following structure is created:
* rootCategory with RS applied on records folder level
* - subCategory without RS
* - recFolder
* Then recFolder should inherit the RS from rootCategory
*/
@Test
public void testRSInheritanceOnRecordFoldersWhenOnlyACategoryHasRS() throws Exception
{
STEP("Create record category with retention schedule and apply it to record folders.");
RecordCategory rootCategory = createRootCategory(getRandomName("rootCategory"));
dispositionScheduleService.createCategoryRetentionSchedule(rootCategory.getName(), false);
STEP("Add retention schedule cut off step with immediate period.");
dispositionScheduleService.addCutOffAfterPeriodStep(rootCategory.getName(), "immediately");
STEP("Add retention schedule retain step with immediate period.");
dispositionScheduleService.addRetainAfterPeriodStep(rootCategory.getName(), "immediately");
STEP("Create a subcategory with a record folder");
RecordCategoryChild subCategory = createRecordCategory(rootCategory.getId(), getRandomName("subCategory"));
RecordCategoryChild recFolder = createFolder(subCategory.getId(), getRandomName("recFolder"));
STEP("Check that recFolder inherits root category retention schedule");
Assert.assertTrue(recFolder.getProperties().getRecordSearchHasDispositionSchedule(),
"rma:recordSearchHasDispositionSchedule property should be true");
}
/**
* Given following structure is created:
* rootCategory with RS applied on records level
* - subCategory1 with another RS applied on records level
* - subCategory2 without RS
* - recFolder
* - incomplete electronic record
* - complete non-electronic record
* Then both records should inherit the RS from subCategory1
*/
@Test
public void testRSInheritanceOnRecordsWhen2CategoriesHaveRS() throws Exception
{
STEP("Create record category with retention schedule and apply it to records.");
RecordCategory rootCategory = createRootCategory(getRandomName("rootCategory"));
dispositionScheduleService.createCategoryRetentionSchedule(rootCategory.getName(), true);
STEP("Add retention schedule cut off step with immediate period.");
dispositionScheduleService.addCutOffAfterPeriodStep(rootCategory.getName(), "immediately");
STEP("Create a subcategory with retention schedule and apply it to records.");
RecordCategoryChild subCategory1 = createRecordCategory(rootCategory.getId(), getRandomName("subCategory"));
String subcategory1Path = rootCategory.getName() + "/" + subCategory1.getName();
dispositionScheduleService.createCategoryRetentionSchedule(subcategory1Path, true);
STEP("Add retention schedule retain step with 1 day after created date.");
dispositionScheduleService.addRetainAfterPeriodStep(subcategory1Path, "day|1");
STEP("Create a subcategory2 in subcategory1");
RecordCategoryChild subCategory2 = createRecordCategory(subCategory1.getId(), getRandomName("subCategory"));
STEP("Create a record folder with 2 records. Complete one of them.");
RecordCategoryChild recFolder = createFolder(subCategory2.getId(), getRandomName("recFolder"));
Record elRecord = createElectronicRecord(recFolder.getId(), getRandomName("elRecord"));
Record nonElRecord = createNonElectronicRecord(recFolder.getId(), getRandomName("nonElRecord"));
getRestAPIFactory().getRecordsAPI().completeRecord(nonElRecord.getId());
STEP("Check that both records inherit subCategory1 retention schedule");
Assert.assertTrue(elRecord.getProperties().getRecordSearchHasDispositionSchedule(),
"rma:recordSearchHasDispositionSchedule property should be true for incomplete record");
Assert.assertTrue(nonElRecord.getProperties().getRecordSearchHasDispositionSchedule(),
"rma:recordSearchHasDispositionSchedule property should be true for complete record");
Assert.assertEquals(elRecord.getProperties().getRecordSearchDispositionActionName(),
RETAIN_STEP,
"Disposition action should be retain and not cutoff for incomplete record");
Assert.assertEquals(nonElRecord.getProperties().getRecordSearchDispositionActionName(),
RETAIN_STEP,
"Disposition action should be retain and not cutoff for complete record");
}
/**
* Given following structure is created:
* rootCategory with RS applied on records folder level
* - subCategory1 with another RS applied on records folder level
* - subCategory2 without RS
* - recFolder
* Then recFolder should inherit the RS from subCategory1
*/
@Test
public void testRSInheritanceOnRecordFoldersWhen2CategoriesHaveRS() throws Exception
{
STEP("Create record category with retention schedule and apply it to record folders.");
RecordCategory rootCategory = createRootCategory(getRandomName("rootCategory"));
dispositionScheduleService.createCategoryRetentionSchedule(rootCategory.getName(), false);
STEP("Add retention schedule retain step with 2 days after created date.");
dispositionScheduleService.addRetainAfterPeriodStep(rootCategory.getName(), "day|2");
STEP("Create a subcategory with retention schedule and apply it to record folders.");
RecordCategoryChild subCategory1 = createRecordCategory(rootCategory.getId(), getRandomName("subCategory"));
String subcategory1Path = rootCategory.getName() + "/" + subCategory1.getName();
dispositionScheduleService.createCategoryRetentionSchedule(subcategory1Path, false);
STEP("Add retention schedule cut off step with immediate period.");
dispositionScheduleService.addCutOffAfterPeriodStep(subcategory1Path, "immediately");
STEP("Create a subcategory2 with a record folder in subcategory1");
RecordCategoryChild subCategory2 = createRecordCategory(subCategory1.getId(), getRandomName("subCategory"));
RecordCategoryChild recFolder = createFolder(subCategory2.getId(), getRandomName("recFolder"));
STEP("Check that recFolder inherits subCategory1 retention schedule");
Assert.assertTrue(recFolder.getProperties().getRecordSearchHasDispositionSchedule(),
"rma:recordSearchHasDispositionSchedule property should be true");
Assert.assertEquals(recFolder.getProperties().getRecordSearchDispositionActionName(),
CUTOFF_STEP,
"Disposition action should be cutoff and not retain for the record folder");
}
/**
* Given following structure is created:
* rootCategory with RS applied on folder records level
* - subCategory with another RS applied on records level
* - recFolder
* - incomplete electronic record
* - complete non-electronic record
* Then both records should inherit the RS from subCategory
*/
@Test
public void testMixedRSInheritanceWhenFirstParentHasRSOnRecords() throws Exception
{
STEP("Create record category with retention schedule and apply it to folder records.");
RecordCategory rootCategory = createRootCategory(getRandomName("rootCategory"));
dispositionScheduleService.createCategoryRetentionSchedule(rootCategory.getName(), false);
STEP("Add retention schedule cut off step with immediate period.");
dispositionScheduleService.addCutOffAfterPeriodStep(rootCategory.getName(), "immediately");
STEP("Create a subcategory with retention schedule and apply it to records.");
RecordCategoryChild subCategory = createRecordCategory(rootCategory.getId(), getRandomName("subCategory"));
String subcategoryPath = rootCategory.getName() + "/" + subCategory.getName();
dispositionScheduleService.createCategoryRetentionSchedule(subcategoryPath, true);
STEP("Add retention schedule retain step with 1 day after created date.");
dispositionScheduleService.addRetainAfterPeriodStep(subcategoryPath, "day|1");
STEP("Create a record folder with 2 records. Complete one of them.");
RecordCategoryChild recFolder = createFolder(subCategory.getId(), getRandomName("recFolder"));
Record elRecord = createElectronicRecord(recFolder.getId(), getRandomName("elRecord"));
Record nonElRecord = createNonElectronicRecord(recFolder.getId(), getRandomName("nonElRecord"));
getRestAPIFactory().getRecordsAPI().completeRecord(nonElRecord.getId());
STEP("Check that both records inherit subCategory retention schedule");
Assert.assertTrue(elRecord.getProperties().getRecordSearchHasDispositionSchedule(),
"rma:recordSearchHasDispositionSchedule property should be true for incomplete record");
Assert.assertTrue(nonElRecord.getProperties().getRecordSearchHasDispositionSchedule(),
"rma:recordSearchHasDispositionSchedule property should be true for complete record");
Assert.assertEquals(elRecord.getProperties().getRecordSearchDispositionActionName(),
RETAIN_STEP,
"Disposition action should be retain and not cutoff for incomplete record");
Assert.assertEquals(nonElRecord.getProperties().getRecordSearchDispositionActionName(),
RETAIN_STEP,
"Disposition action should be retain and not cutoff for complete record");
}
/**
* Given following structure is created:
* rootCategory with RS applied on records level
* - subCategory with another RS applied on folder records level
* - recFolder
* - incomplete electronic record
* - complete non-electronic record
* Then both records should not have RS (rma:recordSearchHasDispositionSchedule property is set to false)
* and record folder inherits the RS from subCategory
*/
@Test
public void testMixedRSInheritanceWhenFirstParentHasRSOnFolders() throws Exception
{
STEP("Create record category with retention schedule and apply it to records.");
RecordCategory rootCategory = createRootCategory(getRandomName("rootCategory"));
dispositionScheduleService.createCategoryRetentionSchedule(rootCategory.getName(), true);
STEP("Add retention schedule cut off step with immediate period.");
dispositionScheduleService.addCutOffAfterPeriodStep(rootCategory.getName(), "immediately");
STEP("Create a subcategory with retention schedule and apply it to record folders.");
RecordCategoryChild subCategory = createRecordCategory(rootCategory.getId(), getRandomName("subCategory"));
String subcategoryPath = rootCategory.getName() + "/" + subCategory.getName();
dispositionScheduleService.createCategoryRetentionSchedule(subcategoryPath, false);
STEP("Add retention schedule retain step with 1 day after created date.");
dispositionScheduleService.addRetainAfterPeriodStep(subcategoryPath, "day|1");
STEP("Create a record folder with 2 records. Complete one of them.");
RecordCategoryChild recFolder = createFolder(subCategory.getId(), getRandomName("recFolder"));
Record elRecord = createElectronicRecord(recFolder.getId(), getRandomName("elRecord"));
Record nonElRecord = createNonElectronicRecord(recFolder.getId(), getRandomName("nonElRecord"));
getRestAPIFactory().getRecordsAPI().completeRecord(nonElRecord.getId());
STEP("Check that the records don't have retention schedule");
Assert.assertFalse(elRecord.getProperties().getRecordSearchHasDispositionSchedule(),
"rma:recordSearchHasDispositionSchedule property should be false for incomplete record");
Assert.assertFalse(nonElRecord.getProperties().getRecordSearchHasDispositionSchedule(),
"rma:recordSearchHasDispositionSchedule property should be false for complete record");
STEP("Check that recFolder inherits subCategory retention schedule");
Assert.assertTrue(recFolder.getProperties().getRecordSearchHasDispositionSchedule(),
"rma:recordSearchHasDispositionSchedule property should be true");
Assert.assertEquals(recFolder.getProperties().getRecordSearchDispositionActionName(),
RETAIN_STEP,
"Disposition action should be retain and not cutoff for the record folder");
}
}

View File

@@ -35,45 +35,28 @@ import static org.alfresco.rest.rm.community.model.user.UserPermissions.PERMISSI
import static org.alfresco.rest.rm.community.model.user.UserRoles.ROLE_RM_POWER_USER;
import static org.alfresco.rest.rm.community.utils.FilePlanComponentsUtil.IMAGE_FILE;
import static org.alfresco.rest.rm.community.utils.FilePlanComponentsUtil.createElectronicRecordModel;
import static org.alfresco.rest.rm.community.utils.FilePlanComponentsUtil.createElectronicUnfiledContainerChildModel;
import static org.alfresco.rest.rm.community.utils.FilePlanComponentsUtil.createNonElectronicRecordModel;
import static org.alfresco.rest.rm.community.utils.FilePlanComponentsUtil.createElectronicUnfiledContainerChildModel;
import static org.alfresco.rest.rm.community.utils.FilePlanComponentsUtil.createNonElectronicUnfiledContainerChildModel;
import static org.alfresco.rest.rm.community.utils.FilePlanComponentsUtil.getFile;
import static org.alfresco.utility.constants.UserRole.SiteCollaborator;
import static org.alfresco.utility.data.RandomData.getRandomName;
import static org.alfresco.utility.report.log.Step.STEP;
import static org.springframework.http.HttpStatus.CREATED;
import static org.springframework.http.HttpStatus.FORBIDDEN;
import static org.springframework.http.HttpStatus.NOT_FOUND;
import static org.springframework.http.HttpStatus.NO_CONTENT;
import static org.springframework.http.HttpStatus.OK;
import org.alfresco.dataprep.CMISUtil;
import org.alfresco.rest.core.JsonBodyGenerator;
import org.alfresco.rest.core.RestResponse;
import org.alfresco.rest.core.v0.BaseAPI.RM_ACTIONS;
import org.alfresco.rest.model.RestNodeBodyMoveCopyModel;
import org.alfresco.rest.model.RestNodeModel;
import org.alfresco.rest.requests.Node;
import org.alfresco.rest.rm.community.base.BaseRMRestTest;
import org.alfresco.rest.rm.community.model.record.Record;
import org.alfresco.rest.rm.community.model.record.RecordBodyFile;
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategory;
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategoryChild;
import org.alfresco.rest.rm.community.model.unfiledcontainer.UnfiledContainerChild;
import org.alfresco.rest.rm.community.requests.gscore.api.RecordCategoryAPI;
import org.alfresco.rest.rm.community.requests.gscore.api.RecordFolderAPI;
import org.alfresco.rest.rm.community.requests.gscore.api.RecordsAPI;
import org.alfresco.rest.v0.RMRolesAndActionsAPI;
import org.alfresco.rest.v0.service.DispositionScheduleService;
import org.alfresco.test.AlfrescoTest;
import org.alfresco.utility.data.RandomData;
import org.alfresco.utility.model.FileModel;
import org.alfresco.utility.model.FolderModel;
import org.alfresco.utility.model.RepoTestModel;
import org.alfresco.utility.model.SiteModel;
import org.alfresco.utility.model.UserModel;
import org.springframework.beans.factory.annotation.Autowired;
import org.testng.annotations.Test;
/**
@@ -84,13 +67,6 @@ import org.testng.annotations.Test;
*/
public class DeleteRecordTests extends BaseRMRestTest
{
@Autowired
private DispositionScheduleService dispositionScheduleService;
@Autowired
private RMRolesAndActionsAPI rmRolesAndActionsAPI;
@Autowired
private org.alfresco.rest.v0.RecordsAPI recordsAPI;
/**
* <pre>
* Given an electronic record
@@ -269,170 +245,6 @@ public class DeleteRecordTests extends BaseRMRestTest
assertStatusCode(FORBIDDEN);
}
/**
* <pre>
* Given a record
* And a copy of that record
* When I delete the copy
* Then it is still possible to view the content of the original record
* </pre>
*/
@Test(description = "Deleting copy of record doesn't delete original content")
@AlfrescoTest(jira="MNT-18806")
public void deleteCopyOfRecord()
{
STEP("Create two record categories and folders.");
RecordCategoryChild recordFolderA = createCategoryFolderInFilePlan();
RecordCategoryChild recordFolderB = createCategoryFolderInFilePlan();
STEP("Create a record in folder A and copy it into folder B.");
String recordId = getRestAPIFactory().getRecordFolderAPI()
.createRecord(createElectronicRecordModel(), recordFolderA.getId(), getFile(IMAGE_FILE)).getId();
String copyId = copyNode(recordId, recordFolderB.getId()).getId();
assertStatusCode(CREATED);
STEP("Check that it's possible to load the original content.");
getNodeContent(recordId);
assertStatusCode(OK);
STEP("Delete the copy.");
deleteAndVerify(copyId);
STEP("Check that the original record node and content still exist.");
checkNodeExists(recordId);
getNodeContent(recordId);
}
/**
* <pre>
* Given a file that has a copy
* And the original file is declared as record
* When I delete the original
* Then it is still possible to view the content of the copy
* </pre>
*/
@Test (description = "Deleting record doesn't delete the content for the copies")
@AlfrescoTest (jira = "MNT-20145")
public void deleteOriginOfRecord() throws Exception
{
STEP("Create a file.");
testSite = dataSite.usingAdmin().createPublicRandomSite();
FileModel testFile = dataContent.usingSite(testSite).createContent(CMISUtil.DocumentType.TEXT_PLAIN);
STEP("Create a copy of the file.");
RestNodeModel copyOfTestFile = copyNode(testFile.getNodeRefWithoutVersion(), testSite.getGuid());
STEP("Declare original file as record");
getRestAPIFactory().getFilesAPI().declareAsRecord(testFile.getNodeRefWithoutVersion());
assertStatusCode(CREATED);
STEP("Delete the record.");
deleteAndVerify(testFile.getNodeRefWithoutVersion());
STEP("Check that it's possible to load the copy content.");
getNodeContent(copyOfTestFile.getId());
assertStatusCode(OK);
STEP("Clean up.");
dataSite.deleteSite(testSite);
}
/**
* <pre>
* Given a file that has a copy
* And the original file is declared as record
* And the record becomes part of a disposition schedule with a destroy step
* When the record is destroyed
* Then it is still possible to view the content of the copy
* </pre>
*/
@Test (description = "Destroying record doesn't delete the content for the associated copy")
@AlfrescoTest (jira = "MNT-20145")
public void destroyOfRecord() throws Exception
{
STEP("Create a file.");
testSite = dataSite.usingAdmin().createPublicRandomSite();
FileModel testFile = dataContent.usingSite(testSite).createContent(CMISUtil.DocumentType.TEXT_PLAIN);
FolderModel folderModel = dataContent.usingSite(testSite).createFolder();
STEP("Create a copy of the file.");
RestNodeModel copy = copyNode(testFile.getNodeRefWithoutVersion(), folderModel.getNodeRefWithoutVersion());
assertStatusCode(CREATED);
STEP("Declare the file as record.");
getRestAPIFactory().getFilesAPI().declareAsRecord(testFile.getNodeRefWithoutVersion());
assertStatusCode(CREATED);
STEP("Create a record category with a disposition schedule.");
RecordCategory recordCategory = createRootCategory(getRandomName("Category with disposition"));
dispositionScheduleService.createCategoryRetentionSchedule(recordCategory.getName(), true);
STEP("Add retention schedule cut off and destroy step with immediate period.");
dispositionScheduleService.addCutOffAfterPeriodStep(recordCategory.getName(), "immediately");
dispositionScheduleService.addDestroyWithGhostingAfterPeriodStep(recordCategory.getName(), "immediately");
STEP("Create a record folder and file the record");
RecordCategoryChild recFolder = createFolder(recordCategory.getId(), getRandomName("recFolder"));
RecordBodyFile recordBodyFile = RecordBodyFile.builder().targetParentId(recFolder.getId()).build();
Record recordFiled = getRestAPIFactory().getRecordsAPI().fileRecord(recordBodyFile, testFile.getNodeRefWithoutVersion());
getRestAPIFactory().getRecordsAPI().completeRecord(recordFiled.getId());
assertStatusCode(CREATED);
STEP("Execute the disposition schedule steps.");
rmRolesAndActionsAPI.executeAction(getAdminUser().getUsername(), getAdminUser().getUsername(), recordFiled.getName(),
RM_ACTIONS.CUT_OFF);
rmRolesAndActionsAPI.executeAction(getAdminUser().getUsername(), getAdminUser().getUsername(), recordFiled.getName(),
RM_ACTIONS.DESTROY);
STEP("Check that it's possible to load the copy content.");
getNodeContent(copy.getId());
assertStatusCode(OK);
STEP("Clean up.");
dataSite.deleteSite(testSite);
}
/**
* <pre>
* Given a file that has version declared as record
* When the record is deleted
* Then it is still possible to view the content of the file
* </pre>
*/
@Test (description = "Deleting record made from version doesn't delete the content for the file")
@AlfrescoTest (jira = "MNT-20145")
public void deleteVersionDeclaredAsRecord() throws Exception
{
STEP("Create a file.");
testSite = dataSite.usingAdmin().createPublicRandomSite();
FileModel testFile = dataContent.usingSite(testSite).createContent(CMISUtil.DocumentType.TEXT_PLAIN);
STEP("Declare file version as record.");
recordsAPI.declareDocumentVersionAsRecord(getAdminUser().getUsername(), getAdminUser().getPassword(), testSite.getId(),
testFile.getName());
UnfiledContainerChild unfiledContainerChild = getRestAPIFactory().getUnfiledContainersAPI()
.getUnfiledContainerChildren(UNFILED_RECORDS_CONTAINER_ALIAS)
.getEntries().stream()
.filter(child -> child.getEntry().getName()
.startsWith(testFile.getName().substring(0, testFile.getName().indexOf("."))))
.findFirst()
.get().getEntry();
STEP("Delete the record.");
deleteAndVerify(unfiledContainerChild.getId());
STEP("Check that it's possible to load the file declared version as record.");
getNodeContent(testFile.getNodeRefWithoutVersion());
assertStatusCode(OK);
STEP("Clean up.");
dataSite.deleteSite(testSite);
}
/**
* Utility method to delete a record and verify successful deletion
*
@@ -447,77 +259,8 @@ public class DeleteRecordTests extends BaseRMRestTest
assertStatusCode(NO_CONTENT);
// Try to get deleted record
recordsAPI.getRecord(recordId);
recordsAPI.deleteRecord(recordId);
assertStatusCode(NOT_FOUND);
}
/**
* Copy a node to a folder.
*
* @param nodeId The id of the node to copy.
* @param destinationFolder The id of the folder to copy it to.
* @return The model returned by the copy API.
*/
private RestNodeModel copyNode(String nodeId, String destinationFolder)
{
Node node = getNode(nodeId);
RestNodeBodyMoveCopyModel copyBody = new RestNodeBodyMoveCopyModel();
copyBody.setTargetParentId(destinationFolder);
try
{
return node.copy(copyBody);
}
catch (Exception e)
{
throw new RuntimeException("Problem copying record.", e);
}
}
/**
* Get the content from a node.
*
* @param nodeId
* @return The response containing the node content.
*/
private RestResponse getNodeContent(String nodeId)
{
try
{
return getNode(nodeId).getNodeContent();
}
catch (Exception e)
{
throw new RuntimeException("Failed to load content for node.", e);
}
}
/**
* Check that the given node exists.
*
* @param nodeId The node to check.
*/
private void checkNodeExists(String nodeId)
{
try
{
getNode(nodeId).getNode();
}
catch (Exception e)
{
throw new RuntimeException("Node does not exist.", e);
}
}
/**
* Get the node from a record id.
*
* @param recordId The record to get.
* @return The node object.
*/
private Node getNode(String recordId)
{
RepoTestModel repoTestModel = new RepoTestModel() {};
repoTestModel.setNodeRef(recordId);
return getRestAPIFactory().getNodeAPI(repoTestModel);
}
}

View File

@@ -1,60 +0,0 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2019 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.rm.community.search;
import static org.testng.Assert.assertTrue;
import java.util.Arrays;
import java.util.List;
import org.alfresco.rest.rm.community.base.BaseRMRestTest;
import org.alfresco.rest.v0.SearchAPI;
import org.alfresco.test.AlfrescoTest;
import org.springframework.beans.factory.annotation.Autowired;
import org.testng.annotations.Test;
public class ShareLiveSearch extends BaseRMRestTest
{
@Autowired
SearchAPI searchApi;
/**
* Given the RM site has been created When I search for "vital" Then the "Vital Records Due for Review" search
* object should not appear as a link in the quick search results drop down
*/
@Test
@AlfrescoTest(jira = "RM-5882")
public void liveSearchForVitalWord() throws Exception
{
createRMSiteIfNotExists();
List<String> results = searchApi.liveSearchForDocumentsAsUser(getAdminUser().getUsername(), getAdminUser().getPassword(), "vital");
assertTrue(results.isEmpty() || !results.stream().anyMatch("Vital Records due for Review"::equalsIgnoreCase),
"Share Live Search should return 0 results when searching for RM Saved Search filter words, but it returned:"
+ Arrays.toString(results.toArray()));
}
}

View File

@@ -1,6 +1,6 @@
<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd" >
<suite name="TestNG AllTestSuite" configfailurepolicy="continue" verbose="1" time-out="300000">
<suite name="TestNG AllTestSuite" verbose="1" time-out="300000">
<test name="restapi">
<packages>
<package name="org.alfresco.rest.rm.community.*"/>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-rm</artifactId>
<version>2.6.2</version>
<version>2.6.0.1</version>
</parent>
<licenses>
@@ -43,7 +43,7 @@
</dependencyManagement>
<properties>
<alfresco.version>5.2.4</alfresco.version>
<share.version>5.2.4</share.version>
<alfresco.version>5.2.g</alfresco.version>
<share.version>5.2.f</share.version>
</properties>
</project>

View File

@@ -708,10 +708,6 @@
</mandatory-aspects>
</aspect>
<aspect name="rma:savedSearch">
<title>Saved search</title>
</aspect>
<aspect name="rma:vitalRecordDefinition">
<title>Vital Record Definition</title>
<properties>

View File

@@ -34,14 +34,4 @@
<property name="authorityService" ref="authorityService"/>
</bean>
<bean id="rm.savedSearchPatch"
parent="rm.parentModulePatch"
class="org.alfresco.module.org_alfresco_module_rm.patch.v23.RMv23SavedSearchesPatch">
<property name="description" value="Add aspect to saved searches."/>
<property name="fixesToSchema" value="2700"/>
<property name="recordsManagementSearchService" ref="RecordsManagementSearchService"/>
<property name="nodeService" ref="NodeService"/>
</bean>
</beans>

View File

@@ -202,7 +202,7 @@
</bean>
<!-- Map RM exceptions to HTML status codes -->
<bean id="rm.simpleMappingExceptionResolver" abstract="true" parent="simpleMappingExceptionResolverParent">
<bean id="rm.simpleMappingExceptionResolver" abstract="true" parent="simpleMappingExceptionResolver">
<property name="exceptionMappings">
<map merge="true">
<entry key="org.alfresco.service.cmr.attributes.DuplicateAttributeException" value="#{T(org.springframework.extensions.webscripts.Status).STATUS_CONFLICT}" />
@@ -212,7 +212,7 @@
</property>
</bean>
<bean class="org.alfresco.util.BeanExtender">
<property name="beanName" value="simpleMappingExceptionResolverParent"/>
<property name="beanName" value="simpleMappingExceptionResolver"/>
<property name="extendingBeanName" value="rm.simpleMappingExceptionResolver"/>
</bean>
</beans>

View File

@@ -45,7 +45,6 @@
<property name="authenticationUtil" ref="rm.authenticationUtil"/>
<property name="transactionalResourceHelper" ref="rm.transactionalResourceHelper" />
<property name="renditionService" ref="RenditionService" />
<property name="contentService" ref="ContentService" />
</bean>
<!-- Records Management Service Registry -->
@@ -342,7 +341,6 @@
]]>
</value>
</property>
<property name="nodeService" ref="NodeService" />
</bean>
<bean id="RecordsManagementSearchService" class="org.springframework.aop.framework.ProxyFactoryBean">

View File

@@ -1,316 +0,0 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2019 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
/**
* NOTE: This file is a copy of the ~/slingshot/search/live-search.lib.js from core repository code and replaces that
* file when built with RM, in order to exclude some RM specific files from live search results. Ideally, any changes
* to the core file should be replicated here.
*/
/**
* Live Search Component
*
* Takes the following object as Input:
* params
* {
* type: search mode type - one of "documents|sites|people"
* term: search terms
* maxResults: maximum results to return
* };
*
* Outputs:
* items - Array of objects containing the search results
*/
const DEFAULT_MAX_RESULTS = 5;
const SITES_SPACE_QNAME_PATH = "/app:company_home/st:sites/";
const SURF_CONFIG_QNAMEPATH = "/cm:surf-config/";
/**
* Returns site information data structure.
* { shortName: siteId, title: title }
*
* Caches the data to avoid repeatedly querying the repository.
*/
var siteDataCache = {};
function getSiteData(siteId) {
if (typeof siteDataCache[siteId] === "object")
{
return siteDataCache[siteId];
}
var site = siteService.getSite(siteId);
var data =
{
shortName : siteId,
title : (site !== null ? site.title : "unknown")
};
siteDataCache[siteId] = data;
return data;
}
/**
* Return the fts-alfresco query template to use.
* The default searches name, title, descripton, calendar, link, full text and tag fields.
* It is configurable via the .config.xml attached to this webscript.
*/
function getQueryTemplate() {
var t =
[{
field: "keywords",
template: "%(cm:name cm:title cm:description TEXT TAG)"
}],
qt = new XML(config.script)["default-query-template"];
if (qt != null && qt.length() != 0)
{
t[0].template = qt.toString();
}
return t;
}
/**
* Process and return a document item node
*/
function getDocumentItem(container, node) {
// check whether this is a valid folder or a file
var item = null;
if (node.qnamePath.indexOf(SURF_CONFIG_QNAMEPATH) === -1)
{
if (node.isDocument)
{
item =
{
nodeRef: node.nodeRef.toString(),
name: node.name,
title: node.properties["cm:title"],
description: node.properties["cm:description"],
modifiedOn: node.properties["cm:modified"],
modifiedBy: node.properties["cm:modifier"],
createdOn: node.properties["cm:created"],
createdBy: node.properties["cm:creator"],
mimetype: node.mimetype,
size: node.size
};
if (container.siteId !== null)
{
item.site = getSiteData(container.siteId);
item.container = container.containerId;
}
if (node.hasAspect("{http://www.alfresco.org/model/content/1.0}thumbnailModification"))
{
var dates = node.properties["lastThumbnailModification"];
for (var i=0; i<dates.length; i++)
{
if (dates[i].indexOf("doclib") !== -1)
{
item.lastThumbnailModification = dates[i];
break;
}
}
}
}
}
return item;
}
/**
* Splits the qname path to a node.
*
* Returns container meta object containing the following properties:
* siteId
* containerId
*/
function splitQNamePath(node) {
var path = node.qnamePath,
container = {
siteId: null,
containerId: null
};
if (path.match("^"+SITES_SPACE_QNAME_PATH) == SITES_SPACE_QNAME_PATH)
{
var tmp = path.substring(SITES_SPACE_QNAME_PATH.length),
pos = tmp.indexOf('/');
if (pos >= 1)
{
var siteQName = Packages.org.alfresco.util.ISO9075.decode(tmp.split("/")[0]);
siteId = siteQName.substring(siteQName.indexOf(":") + 1);
tmp = tmp.substring(pos + 1);
pos = tmp.indexOf('/');
if (pos >= 1)
{
// strip container id from the path
var containerId = tmp.substring(0, pos);
containerId = containerId.substring(containerId.indexOf(":") + 1);
container.siteId = siteId;
container.containerId = containerId;
}
}
}
return container;
}
/**
* Dispatch a live search to the appropriate search method for the requested result type.
*/
function liveSearch(params) {
switch (params.type)
{
case "documents":
return getDocResults(params);
break;
case "sites":
return getSiteResults(params);
break;
case "people":
return getPeopleResults(params);
break;
}
}
/**
* Return Document Search results with the given search terms.
*
* "AND" is the default operator unless configured otherwise, OR, AND and NOT are also supported -
* as is any other valid fts-alfresco elements such as "quoted terms" and (bracket terms) and also
* propname:propvalue syntax.
*
* @param params Object containing search parameters - see API description above
*/
function getDocResults(params) {
// ensure a TYPE is specified
var ftsQuery = params.term + ' AND +TYPE:"cm:content"';
// site constraint
if (params.siteId !== null)
{
// use SITE syntax to restrict to specific site
ftsQuery += ' AND SITE:"' + params.siteId + '"';
}
// root node - generally used for overridden Repository root in Share
if (params.rootNode !== null)
{
ftsQuery = 'PATH:"' + rootNode.qnamePath + '//*" AND (' + ftsQuery + ')';
}
// main query construction
ftsQuery = '(' + ftsQuery + ') AND -TYPE:"cm:thumbnail" AND -TYPE:"cm:failedThumbnail" AND -TYPE:"cm:rating" AND -TYPE:"fm:post" AND -ASPECT:"sys:hidden" AND -ASPECT:"rma:savedSearch" AND -cm:creator:system';
if (logger.isLoggingEnabled())
logger.log("LiveQuery:\r\n" + ftsQuery);
// get default fts operator from the config
//
// TODO: common search lib - for both live and standard e.g. to get values like this...
//
var operator = "AND";
var cf = new XML(config.script)["default-operator"];
if (cf != null && cf.length != 0)
{
operator = cf.toString();
}
// perform fts-alfresco language query
var queryDef = {
query: ftsQuery,
language: "fts-alfresco",
templates: getQueryTemplate(),
defaultField: "keywords",
defaultOperator: operator,
onerror: "no-results",
page: {
maxItems: params.maxResults,
skipCount: params.startIndex
}
};
var rs = search.queryResultSet(queryDef);
nodes = rs.nodes,
results = [];
if (logger.isLoggingEnabled())
logger.log("Processing resultset of length: " + nodes.length);
for (var i=0, item; i<nodes.length && i<params.maxResults; i++)
{
// For each node we extract the site/container qname path and then
// let the per-container helper function decide what to do.
try
{
item = getDocumentItem(splitQNamePath(nodes[i]), nodes[i]);
if (item !== null)
{
results.push(item);
}
}
catch (e)
{
if (logger.isWarnLoggingEnabled() == true)
{
logger.warn("live-search.lib.js: Skipping node due to exception when processing query result: " + e);
logger.warn("..." + nodes[i].nodeRef);
}
}
}
return buildResults(results, params, rs.meta.hasMore);
}
/**
* Return Site Search results with the given search terms.
*
* @param params Object containing search parameters - see API description above
*/
function getSiteResults(params) {
// Get the list of sites - ensure we use the faster fts based search code path
var t = params.term;
var sites = siteService.findSites(t, params.maxResults);
return buildResults(sites, params);
}
/**
* Return People Search results with the given search terms.
*
* @param params Object containing search parameters - see API description above
*/
function getPeopleResults(params) {
// Get the list of people
var persons = people.getPeople(params.term, params.maxResults);
return buildResults(persons, params);
}
function buildResults(data, params, more) {
return {
totalRecords: data.length,
startIndex: params.startIndex,
hasMoreRecords: more,
items: data
};
}

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-rm-community</artifactId>
<version>2.6.2</version>
<version>2.6.0.1</version>
</parent>
<properties>
@@ -25,8 +25,6 @@
<alfresco.rm.artifactId>alfresco-rm-community-repo</alfresco.rm.artifactId>
<skip.integrationtests>true</skip.integrationtests>
<alfresco.solr.home>${project.build.directory}/solr/home</alfresco.solr.home>
<!-- FIXME: Cannot set it to the Alfresco version as some SQL Mapping files are missing in 5.2-SNAPSHOT. See BDE-843 -->
<alfresco.h2scripts.version>5.1.1</alfresco.h2scripts.version>
<api.explorer.version>1.4</api.explorer.version>
</properties>
@@ -364,7 +362,7 @@
<dependency>
<groupId>${alfresco.groupId}</groupId>
<artifactId>alfresco-repository</artifactId>
<version>${alfresco.h2scripts.version}</version>
<version>${alfresco.version}</version>
<classifier>h2scripts</classifier>
<exclusions>
<exclusion>
@@ -380,20 +378,6 @@
<version>0.9.10</version>
<scope>test</scope>
</dependency>
<!-- swagger parser -->
<dependency>
<groupId>io.swagger</groupId>
<artifactId>swagger-parser</artifactId>
<version>1.0.23</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/com.github.fge/json-schema-validator -->
<dependency>
<groupId>com.github.fge</groupId>
<artifactId>json-schema-validator</artifactId>
<version>2.2.6</version>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
@@ -615,7 +599,7 @@
<dependency>
<groupId>${alfresco.groupId}</groupId>
<artifactId>alfresco-repository</artifactId>
<version>${alfresco.h2scripts.version}</version>
<version>${alfresco.version}</version>
<classifier>h2scripts</classifier>
<exclusions>
<exclusion>

View File

@@ -485,7 +485,6 @@ public class RMAfterInvocationProvider extends RMSecurityCommon
returnedObject.getResultSetMetaData().getLimitedBy(),
PermissionEvaluationMode.EAGER,
returnedObject.getResultSetMetaData().getSearchParameters()));
filteringResultSet.setNumberFound(returnedObject.getNumberFound());
return filteringResultSet;
}
else
@@ -499,7 +498,6 @@ public class RMAfterInvocationProvider extends RMSecurityCommon
returnedObject.getResultSetMetaData().getLimitedBy(),
PermissionEvaluationMode.EAGER,
returnedObject.getResultSetMetaData().getSearchParameters()));
filteringResultSet.setNumberFound(returnedObject.getNumberFound());
return filteringResultSet;
}
}

View File

@@ -35,7 +35,6 @@ import java.util.Map;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.RecordsManagementServiceRegistry;
import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
@@ -52,10 +51,7 @@ public class DispositionScheduleImpl implements DispositionSchedule,
private NodeService nodeService;
private RecordsManagementServiceRegistry services;
private NodeRef dispositionDefinitionNodeRef;
/** authentication helper */
private AuthenticationUtil authenticationUtil;
private List<DispositionActionDefinition> actions;
private Map<String, DispositionActionDefinition> actionsById;
@@ -65,11 +61,6 @@ public class DispositionScheduleImpl implements DispositionSchedule,
/** Map of disposition definitions by disposition action name */
private Map<String, DispositionActionDefinition> actionsByDispositionActionName;
public void setAuthenticationUtil(AuthenticationUtil authenticationUtil)
{
this.authenticationUtil = authenticationUtil;
}
public DispositionScheduleImpl(RecordsManagementServiceRegistry services, NodeService nodeService, NodeRef nodeRef)
{
@@ -109,18 +100,13 @@ public class DispositionScheduleImpl implements DispositionSchedule,
*/
public boolean isRecordLevelDisposition()
{
return authenticationUtil.runAsSystem(new AuthenticationUtil.RunAsWork<Boolean>()
boolean result = false;
Boolean value = (Boolean)this.nodeService.getProperty(this.dispositionDefinitionNodeRef, PROP_RECORD_LEVEL_DISPOSITION);
if (value != null)
{
public Boolean doWork() throws Exception
{
Boolean value = (Boolean)nodeService.getProperty(dispositionDefinitionNodeRef, PROP_RECORD_LEVEL_DISPOSITION);
if (value != null)
{
return value.booleanValue();
}
return null;
}
});
result = value.booleanValue();
}
return result;
}
/**

View File

@@ -348,6 +348,9 @@ public class DispositionServiceImpl extends ServiceBaseImpl
return ds;
}
/**
* This method returns a NodeRef
* Gets the disposition instructions
@@ -380,7 +383,7 @@ public class DispositionServiceImpl extends ServiceBaseImpl
NodeRef result = getAssociatedDispositionScheduleImpl(parent);
if (result == null)
{
return getOriginDispositionSchedule(parent);
return null;
}
return new DispositionScheduleImpl(serviceRegistry, nodeService, result);
}

View File

@@ -42,7 +42,6 @@ import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.cmr.security.PersonService;
import org.apache.commons.logging.Log;
@@ -123,7 +122,7 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
{
StringBuilder sb = new StringBuilder();
sb.append("TYPE:\"rma:dispositionAction\" AND ");
sb.append("TYPE:\"rma:dispositionAction\" + ");
sb.append("(@rma\\:dispositionAction:(");
boolean bFirst = true;
@@ -165,32 +164,68 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
if (dispositionActions != null && !dispositionActions.isEmpty())
{
boolean hasMore = true;
int skipCount = 0;
while(hasMore)
// execute search
ResultSet results = searchService.query(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE,
SearchService.LANGUAGE_FTS_ALFRESCO, getQuery());
List<NodeRef> resultNodes = results.getNodeRefs();
results.close();
if (logger.isDebugEnabled())
{
SearchParameters params = new SearchParameters();
params.addStore(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
params.setLanguage(SearchService.LANGUAGE_FTS_ALFRESCO);
params.setQuery(getQuery());
params.setSkipCount(skipCount);
logger.debug("Processing " + resultNodes.size() + " nodes");
}
// execute search
ResultSet results = searchService.query(params);
List<NodeRef> resultNodes = results.getNodeRefs();
hasMore = results.hasMore();
skipCount += resultNodes.size(); // increase by page size
results.close();
// process search results
for (NodeRef node : resultNodes)
{
final NodeRef currentNode = node;
if (logger.isDebugEnabled())
RetryingTransactionCallback<Boolean> processTranCB = new RetryingTransactionCallback<Boolean>()
{
logger.debug("Processing " + resultNodes.size() + " nodes");
}
public Boolean execute()
{
final String dispAction = (String) nodeService.getProperty(currentNode,
RecordsManagementModel.PROP_DISPOSITION_ACTION);
// process search results
for (NodeRef node : resultNodes)
// Run disposition action
if (dispAction != null && dispositionActions.contains(dispAction))
{
ChildAssociationRef parent = nodeService.getPrimaryParent(currentNode);
if (parent.getTypeQName().equals(RecordsManagementModel.ASSOC_NEXT_DISPOSITION_ACTION))
{
Map<String, Serializable> props = new HashMap<String, Serializable>(1);
props.put(RMDispositionActionExecuterAbstractBase.PARAM_NO_ERROR_CHECK,
Boolean.FALSE);
try
{
// execute disposition action
recordsManagementActionService.executeRecordsManagementAction(
parent.getParentRef(), dispAction, props);
if (logger.isDebugEnabled())
{
logger.debug("Processed action: " + dispAction + "on" + parent);
}
}
catch (AlfrescoRuntimeException exception)
{
if (logger.isDebugEnabled())
{
logger.debug(exception);
}
}
}
}
return Boolean.TRUE;
}
};
// if exists
if (nodeService.exists(currentNode))
{
executeAction(node);
retryingTransactionHelper.doInTransaction(processTranCB);
}
}
}
@@ -206,62 +241,6 @@ public class DispositionLifecycleJobExecuter extends RecordsManagementJobExecute
}
}
/**
* Helper method that executes a disposition action
*
* @param actionNode - the disposition action to execute
*/
private void executeAction(final NodeRef actionNode)
{
RetryingTransactionCallback<Boolean> processTranCB = new RetryingTransactionCallback<Boolean>()
{
public Boolean execute()
{
final String dispAction = (String) nodeService.getProperty(actionNode,
RecordsManagementModel.PROP_DISPOSITION_ACTION);
// Run disposition action
if (dispAction != null && dispositionActions.contains(dispAction))
{
ChildAssociationRef parent = nodeService.getPrimaryParent(actionNode);
if (parent.getTypeQName().equals(RecordsManagementModel.ASSOC_NEXT_DISPOSITION_ACTION))
{
Map<String, Serializable> props = new HashMap<String, Serializable>(1);
props.put(RMDispositionActionExecuterAbstractBase.PARAM_NO_ERROR_CHECK,
Boolean.FALSE);
try
{
// execute disposition action
recordsManagementActionService.executeRecordsManagementAction(
parent.getParentRef(), dispAction, props);
if (logger.isDebugEnabled())
{
logger.debug("Processed action: " + dispAction + "on" + parent);
}
}
catch (AlfrescoRuntimeException exception)
{
if (logger.isDebugEnabled())
{
logger.debug(exception);
}
}
}
}
return Boolean.TRUE;
}
};
// if exists
if (nodeService.exists(actionNode))
{
retryingTransactionHelper.doInTransaction(processTranCB);
}
}
public PersonService getPersonService()
{
return personService;

View File

@@ -231,6 +231,7 @@ public class PublishUpdatesJobExecuter extends RecordsManagementJobExecuter
List<NodeRef> resultNodes = null;
SearchParameters searchParameters = new SearchParameters();
searchParameters.setQueryConsistency(QueryConsistency.TRANSACTIONAL);
searchParameters.setQuery(query);
searchParameters.addStore(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
searchParameters.setLanguage(SearchService.LANGUAGE_FTS_ALFRESCO);

View File

@@ -280,6 +280,4 @@ public interface RecordsManagementModel extends RecordsManagementCustomModel
// Countable aspect
QName ASPECT_COUNTABLE = QName.createQName(RM_URI, "countable");
QName PROP_COUNT = QName.createQName(RM_URI, "count");
QName ASPECT_SAVED_SEARCH = QName.createQName(RM_URI, "savedSearch");
}

View File

@@ -335,7 +335,11 @@ public class RecordAspect extends AbstractDisposableItem
/**
* On copy complete behaviour for record aspect.
*
* @see org.alfresco.repo.copy.CopyServicePolicies.OnCopyCompletePolicy#onCopyComplete(QName, NodeRef, NodeRef, boolean, Map)
* @param classRef
* @param sourceNodeRef
* @param targetNodeRef
* @param copyToNewNode
* @param copyMap
*/
@Override
@Behaviour
@@ -354,9 +358,6 @@ public class RecordAspect extends AbstractDisposableItem
{
// then remove any extended security from the newly copied record
extendedSecurityService.remove(targetNodeRef);
//create a new content URL for the copy
createNewContentURL(targetNodeRef);
}
}
@@ -377,7 +378,6 @@ public class RecordAspect extends AbstractDisposableItem
/**
* Behaviour to remove the shared link before declare a record
* and to create new bin if the node is a copy or has copies
*
* @see org.alfresco.repo.node.NodeServicePolicies.BeforeAddAspectPolicy#beforeAddAspect(org.alfresco.service.cmr.repository.NodeRef,
* org.alfresco.service.namespace.QName)
@@ -397,26 +397,6 @@ public class RecordAspect extends AbstractDisposableItem
quickShareService.unshareContent(sharedId);
}
// if the node has a copy or is a copy of an existing node
if (!nodeService.getTargetAssocs(nodeRef, ContentModel.ASSOC_ORIGINAL).isEmpty() ||
!nodeService.getSourceAssocs(nodeRef, ContentModel.ASSOC_ORIGINAL).isEmpty())
{
//disable versioning and auditing
behaviourFilter.disableBehaviour(ContentModel.ASPECT_AUDITABLE);
behaviourFilter.disableBehaviour(ContentModel.ASPECT_VERSIONABLE);
try
{
//create a new content URL for the copy/original node
createNewContentURL(nodeRef);
}
finally
{
//enable versioning and auditing
behaviourFilter.enableBehaviour(ContentModel.ASPECT_AUDITABLE);
behaviourFilter.enableBehaviour(ContentModel.ASPECT_VERSIONABLE);
}
}
return null;
}
}, AuthenticationUtil.getSystemUserName());

View File

@@ -34,14 +34,12 @@ import org.alfresco.module.org_alfresco_module_rm.relationship.Relationship;
import org.alfresco.module.org_alfresco_module_rm.relationship.RelationshipService;
import org.alfresco.module.org_alfresco_module_rm.version.RecordableVersionService;
import org.alfresco.repo.node.NodeServicePolicies;
import org.alfresco.repo.policy.Behaviour.NotificationFrequency;
import org.alfresco.repo.policy.annotation.Behaviour;
import org.alfresco.repo.policy.annotation.BehaviourBean;
import org.alfresco.repo.policy.annotation.BehaviourKind;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.version.Version;
import org.alfresco.service.namespace.QName;
/**
* rmv:versionRecord behaviour bean
@@ -54,15 +52,14 @@ import org.alfresco.service.namespace.QName;
defaultType = "rmv:versionRecord"
)
public class VersionRecordAspect extends BaseBehaviourBean
implements NodeServicePolicies.BeforeAddAspectPolicy,
NodeServicePolicies.BeforeDeleteNodePolicy
implements NodeServicePolicies.BeforeDeleteNodePolicy
{
/** recordable version service */
private RecordableVersionService recordableVersionService;
/** relationship service */
private RelationshipService relationshipService;
/**
* @param recordableVersionService recordable version service
*/
@@ -78,7 +75,7 @@ public class VersionRecordAspect extends BaseBehaviourBean
{
this.relationshipService = relationshipService;
}
/**
* If the record is a version record then delete the associated version entry
*
@@ -132,18 +129,4 @@ public class VersionRecordAspect extends BaseBehaviourBean
});
}
}
/**
* Behaviour to duplicate the bin before declaring a version record
*
* @see org.alfresco.repo.node.NodeServicePolicies.BeforeAddAspectPolicy#beforeAddAspect(org.alfresco.service.cmr.repository.NodeRef,
* org.alfresco.service.namespace.QName)
*/
@Override
@Behaviour(kind = BehaviourKind.CLASS, notificationFrequency = NotificationFrequency.FIRST_EVENT)
public void beforeAddAspect(NodeRef nodeRef, QName qName)
{
//create a new content URL for the version record
createNewContentURL(nodeRef);
}
}

View File

@@ -40,6 +40,7 @@ import org.alfresco.module.org_alfresco_module_rm.capability.CapabilityService;
import org.alfresco.module.org_alfresco_module_rm.model.BaseBehaviourBean;
import org.alfresco.module.org_alfresco_module_rm.search.RecordsManagementSearchService;
import org.alfresco.repo.node.NodeServicePolicies;
import org.alfresco.repo.node.integrity.IntegrityException;
import org.alfresco.repo.policy.Behaviour.NotificationFrequency;
import org.alfresco.repo.policy.annotation.Behaviour;
import org.alfresco.repo.policy.annotation.BehaviourBean;
@@ -58,6 +59,7 @@ import org.alfresco.service.cmr.site.SiteVisibility;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.ParameterCheck;
import org.alfresco.util.PropertyMap;
import org.springframework.extensions.surf.util.I18NUtil;
import com.google.common.collect.Sets;
@@ -314,8 +316,7 @@ public class RmSiteType extends BaseBehaviourBean
/**
* Add the limitation of creating only one rma:filePlan or one dod:filePlan depending on the type of rm site.
* Let multiple cm:folder type be created under rm site.
*
* Also added the limitation of crating two cm:folder type under rm site.
*
* Other than this nothing can be created under rm site nodeRef
*
@@ -343,6 +344,25 @@ public class RmSiteType extends BaseBehaviourBean
});
}
/**
* Overridden this because in this case we need to have multiple cm:folder types but not more than two of them.
* The two mentioned folders are created when rm site is created and one of them is Saved Searches and the other surf-config folder.
After that creation of cm:folder should not be allowed under rm site node
*
*/
@Override
protected void validateNewChildAssociation(NodeRef parent, NodeRef child, List<QName> acceptedUniqueChildType,
List<QName> acceptedMultipleChildType) throws IntegrityException
{
super.validateNewChildAssociation(parent, child, acceptedUniqueChildType, acceptedMultipleChildType);
// check the user is not trying to create more than 2 folders that are created by default.
if(nodeService.getChildAssocs(parent, Sets.newHashSet(ContentModel.TYPE_FOLDER)).size() > 2)
{
throw new IntegrityException(I18NUtil.getMessage(MULTIPLE_CHILDREN_TYPE_ERROR, ContentModel.TYPE_FOLDER), null);
}
}
@Behaviour
(
kind = BehaviourKind.CLASS,

View File

@@ -27,8 +27,6 @@
package org.alfresco.module.org_alfresco_module_rm.patch.v20;
import static org.alfresco.module.org_alfresco_module_rm.model.rma.type.RmSiteType.DEFAULT_SITE_NAME;
import java.util.List;
import org.alfresco.model.ContentModel;
@@ -53,6 +51,9 @@ import org.springframework.beans.factory.BeanNameAware;
public class RMv2SavedSearchPatch extends ModulePatchComponent
implements BeanNameAware, RecordsManagementModel, DOD5015Model
{
/** RM site id */
private static final String RM_SITE_ID = "rm";
/** Records management search service */
private RecordsManagementSearchService recordsManagementSearchService;
@@ -92,10 +93,10 @@ public class RMv2SavedSearchPatch extends ModulePatchComponent
@Override
protected void executePatch()
{
if (siteService.getSite(DEFAULT_SITE_NAME) != null)
if (siteService.getSite(RM_SITE_ID) != null)
{
// get the saved searches
List<SavedSearchDetails> savedSearches = recordsManagementSearchService.getSavedSearches(DEFAULT_SITE_NAME);
List<SavedSearchDetails> savedSearches = recordsManagementSearchService.getSavedSearches(RM_SITE_ID);
if (LOGGER.isDebugEnabled())
{

View File

@@ -1,88 +0,0 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2019 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.module.org_alfresco_module_rm.patch.v23;
import static org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel.ASPECT_SAVED_SEARCH;
import static org.alfresco.module.org_alfresco_module_rm.model.rma.type.RmSiteType.DEFAULT_SITE_NAME;
import org.alfresco.module.org_alfresco_module_rm.patch.AbstractModulePatch;
import org.alfresco.module.org_alfresco_module_rm.search.RecordsManagementSearchService;
import org.alfresco.module.org_alfresco_module_rm.search.SavedSearchDetails;
import org.alfresco.service.cmr.repository.NodeService;
/**
* RM v2.3 patch that adds the saved search aspect.
*
* @author Ross Gale
* @since 2.3
*/
public class RMv23SavedSearchesPatch extends AbstractModulePatch
{
/**
* records management search service
*/
private RecordsManagementSearchService recordsManagementSearchService;
/**
* node service
*/
private NodeService nodeService;
/**
* @param recordsManagementSearchService records management search service
*/
public void setRecordsManagementSearchService(RecordsManagementSearchService recordsManagementSearchService)
{
this.recordsManagementSearchService = recordsManagementSearchService;
}
/**
* @param nodeService node service
*/
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
/**
* Retrieves all saved searches for the records management site and adds ASPECT_SAVED_SEARCH
*/
@Override
public void applyInternal()
{
for (SavedSearchDetails savedSearchDetails : recordsManagementSearchService.getSavedSearches(DEFAULT_SITE_NAME))
{
if(nodeService.hasAspect(savedSearchDetails.getNodeRef(),ASPECT_SAVED_SEARCH))
{
break;
}
nodeService.addAspect(savedSearchDetails.getNodeRef(), ASPECT_SAVED_SEARCH, null);
}
}
}

View File

@@ -36,6 +36,10 @@ import java.util.Map;
import org.alfresco.api.AlfrescoPublicApi;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.cmr.repository.datatype.TypeConversionException;
import org.alfresco.service.cmr.repository.datatype.TypeConverter;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.json.JSONArray;
@@ -377,5 +381,20 @@ public class RecordsManagementSearchParameters
{
return includedContainerTypes;
}
// This code needs to be removed once MNT-14795 (Search does not work when RM is installed) has been fixed.
static
{
DefaultTypeConverter.INSTANCE.addConverter(
SearchParameters.class,
String.class,
new TypeConverter.Converter<SearchParameters, String>()
{
public String convert(SearchParameters source)
{
throw new TypeConversionException("Dummy converter! Should throw a TypeConversionException");
}
}
);
}
}

View File

@@ -43,7 +43,6 @@ import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.SearchParameters;
@@ -59,8 +58,6 @@ import org.json.JSONException;
import org.json.JSONObject;
import org.springframework.extensions.surf.util.I18NUtil;
import static org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel.ASPECT_SAVED_SEARCH;
/**
* Records management search service implementation
*
@@ -85,12 +82,7 @@ public class RecordsManagementSearchServiceImpl implements RecordsManagementSear
/** Namespace service */
private NamespaceService namespaceService;
/**
* Node service
*/
private NodeService nodeService;
/** List of report details */
/** List of report details */
private List<ReportDetails> reports = new ArrayList<ReportDetails>(13);
/**
@@ -125,15 +117,7 @@ public class RecordsManagementSearchServiceImpl implements RecordsManagementSear
this.namespaceService = namespaceService;
}
/**
* @param nodeService Node service
*/
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
/**
/**
* @param reportsJSON
*/
public void setReportsJSON(String reportsJSON)
@@ -542,7 +526,7 @@ public class RecordsManagementSearchServiceImpl implements RecordsManagementSear
}
}, AuthenticationUtil.getSystemUserName());
}
nodeService.addAspect(searchNode, ASPECT_SAVED_SEARCH, null);
// Write the JSON content to search node
final NodeRef writableSearchNode = searchNode;
AuthenticationUtil.runAs(new RunAsWork<Void>()

View File

@@ -31,7 +31,6 @@ import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanComponentKind;
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService;
import org.alfresco.module.org_alfresco_module_rm.hold.HoldService;
@@ -39,9 +38,6 @@ import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.rendition.RenditionService;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.namespace.QName;
@@ -75,12 +71,9 @@ public class ServiceBaseImpl implements RecordsManagementModel, ApplicationConte
/** authentication helper */
protected AuthenticationUtil authenticationUtil;
/** transactional resource helper */
protected TransactionalResourceHelper transactionalResourceHelper;
/** Content service */
protected ContentService contentService;
protected TransactionalResourceHelper transactionalResourceHelper;
/**
* @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext)
@@ -131,16 +124,6 @@ public class ServiceBaseImpl implements RecordsManagementModel, ApplicationConte
this.transactionalResourceHelper = transactionalResourceHelper;
}
/**
* Set the content service
*
* @param contentService content service
*/
public void setContentService(ContentService contentService)
{
this.contentService = contentService;
}
/**
* Helper to get internal node service.
* <p>
@@ -554,30 +537,4 @@ public class ServiceBaseImpl implements RecordsManagementModel, ApplicationConte
result.add(nodeService.getType(nodeRef));
return result;
}
/**
* Helper to update the given content property for the node
*
* @param nodeRef the node
* @param contentProperty the property to be updated
*/
protected void updateContentProperty(NodeRef nodeRef, QName contentProperty)
{
ContentReader reader = contentService.getReader(nodeRef, contentProperty);
if (reader != null)
{
ContentWriter writer = contentService.getWriter(nodeRef, contentProperty, true);
writer.putContent(reader);
}
}
/**
* Helper to create a new content URL for the node
*
* @param nodeRef the node
*/
protected void createNewContentURL(NodeRef nodeRef)
{
updateContentProperty(nodeRef, ContentModel.PROP_CONTENT);
}
}

View File

@@ -38,14 +38,12 @@ import java.io.Serializable;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import net.sf.acegisecurity.vote.AccessDecisionVoter;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.capability.CapabilityService;
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService;
@@ -111,6 +109,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.social.InternalServerErrorException;
import net.sf.acegisecurity.vote.AccessDecisionVoter;
/**
* Utility class that handles common api endpoint tasks
*
@@ -914,11 +914,7 @@ public class FilePlanComponentsApiUtils
if (permissionService.hasPermission(childNodeRef, PermissionService.READ) == AccessStatus.ALLOWED)
{
Serializable nameProp = nodeService.getProperty(childNodeRef, ContentModel.PROP_NAME);
String type = nodeService.getType(childNodeRef).toPrefixString();
Set<QName> aspects = nodeService.getAspects(childNodeRef);
List<String> aspectNames = nodes.mapFromNodeAspects(aspects, Collections.emptyList(), Collections.emptyList());
pathElements.add(0, new ElementInfo(childNodeRef.getId(), nameProp.toString(), type, aspectNames));
pathElements.add(0, new ElementInfo(childNodeRef.getId(), nameProp.toString()));
}
else
{

View File

@@ -1,111 +0,0 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2019 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.module.org_alfresco_module_rm.test.integration.issue;
import static org.alfresco.repo.site.SiteServiceImpl.getSiteContainer;
import static org.alfresco.util.GUID.generate;
import org.alfresco.module.org_alfresco_module_rm.test.util.BaseRMTestCase;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.site.SiteInfo;
import org.alfresco.service.cmr.site.SiteVisibility;
/**
* Test class for MNT 19114, wiki page can not be created under RM site
*/
public class MNT19114Test extends BaseRMTestCase
{
public static final String PARENT_NODE = "RMSite";
public static final String DOCUMENT_LIBRARY_FOLDER_TYPE = "documentLibrary";
public static final String SURF_CONFIG_FOLDER_TYPE = "surfConfigFolder";
public static final String WIKI_PAGE_FOLDER_TYPE = "wikiPage";
@Override
protected boolean isRMSiteTest()
{
return true;
}
/**
* Given a RM site and two folder type children
* When creating a third folder type child as a Wiki page
* The page will be created and no exception will be thrown.
*/
public void testCreateWikiPageInRmSite() throws Exception
{
doBehaviourDrivenTest(new BehaviourDrivenTest()
{
NodeRef wikiPage;
public void given()
{
// Creating a Records Management site
siteService.createSite("rmSite", PARENT_NODE, generate(), generate(), SiteVisibility.PUBLIC, TYPE_RM_SITE);
// Adding two immediate folder type children
getSiteContainer(
PARENT_NODE,
DOCUMENT_LIBRARY_FOLDER_TYPE,
true,
siteService,
transactionService,
taggingService);
getSiteContainer(
PARENT_NODE,
SURF_CONFIG_FOLDER_TYPE,
true,
siteService,
transactionService,
taggingService);
}
public void when() throws Exception
{
wikiPage = getSiteContainer(
PARENT_NODE,
WIKI_PAGE_FOLDER_TYPE,
true,
siteService,
transactionService,
taggingService);
}
public void then() throws Exception
{
// Check if the new folder type wiki page has been created
assertEquals(true, nodeService.exists(wikiPage));
}
public void after()
{
siteService.deleteSite(PARENT_NODE);
}
});
}
}

View File

@@ -238,16 +238,7 @@ public abstract class RecordableVersionsBaseTest extends BaseRMTestCase implemen
if (frozenProperties.containsKey(beforePropertyName))
{
Serializable frozenValue = frozenProperties.get(beforePropertyName);
if(beforePropertyName.equals(ContentModel.PROP_CONTENT))
{
assertTrue("Content property value should be different.",
entry.getValue() != frozenValue);
}
else
{
assertEquals("Frozen property " + beforePropertyName.getLocalName() + " value is incorrect.",
entry.getValue(), frozenValue);
}
assertEquals("Frozen property " + beforePropertyName.getLocalName() + " value is incorrect.", entry.getValue(), frozenValue);
cloneFrozenProperties.remove(beforePropertyName);
}
else if (!PROP_FILE_PLAN.equals(beforePropertyName) &&

View File

@@ -29,19 +29,17 @@ package org.alfresco.module.org_alfresco_module_rm.job;
import static org.alfresco.module.org_alfresco_module_rm.test.util.AlfMock.generateQName;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyMap;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.contains;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@@ -50,15 +48,13 @@ import org.alfresco.module.org_alfresco_module_rm.test.util.BaseUnitTest;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.cmr.search.SearchService;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
/**
* Disposition lifecycle job execution unit test.
@@ -96,8 +92,7 @@ public class DispositionLifecycleJobExecuterUnitTest extends BaseUnitTest
executer.setDispositionActions(dispositionActions);
// setup interactions
doReturn(mockedResultSet).when(mockedSearchService).query(any(SearchParameters.class));
when(mockedResultSet.hasMore()).thenReturn(false);
doReturn(mockedResultSet).when(mockedSearchService).query(eq(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE), eq(SearchService.LANGUAGE_FTS_ALFRESCO), anyString());
}
/**
@@ -105,9 +100,7 @@ public class DispositionLifecycleJobExecuterUnitTest extends BaseUnitTest
*/
private void verifyQuery()
{
ArgumentCaptor<SearchParameters> paramsCaptor = ArgumentCaptor.forClass(SearchParameters.class);
verify(mockedSearchService, times(1)).query(paramsCaptor.capture());
assertTrue(paramsCaptor.getValue().getQuery().contains(QUERY));
verify(mockedSearchService, times(1)).query(eq(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE), eq(SearchService.LANGUAGE_FTS_ALFRESCO), contains(QUERY));
verify(mockedResultSet, times(1)).getNodeRefs();
verify(mockedResultSet, times(1)).close();
}
@@ -256,64 +249,7 @@ public class DispositionLifecycleJobExecuterUnitTest extends BaseUnitTest
{
String actual = executer.getQuery();
String expected = "TYPE:\"rma:dispositionAction\" AND " +
"(@rma\\:dispositionAction:(\"cutoff\" OR \"retain\")) " +
"AND ISUNSET:\"rma:dispositionActionCompletedAt\" " +
"AND ( @rma\\:dispositionEventsEligible:true OR @rma\\:dispositionAsOf:[MIN TO NOW] ) ";
String expected = "TYPE:\"rma:dispositionAction\" + (@rma\\:dispositionAction:(\"cutoff\" OR \"retain\")) AND ISUNSET:\"rma:dispositionActionCompletedAt\" AND ( @rma\\:dispositionEventsEligible:true OR @rma\\:dispositionAsOf:[MIN TO NOW] ) ";
assertEquals(expected, actual);
}
/**
* Given the maximum page of elements for search service is 2
* and search service finds more than one page of elements
* When the job executer runs
* Then the executer retrieves both pages and iterates all elements
*/
@Test
public void testPagination()
{
final NodeRef node1 = generateNodeRef();
final NodeRef node2 = generateNodeRef();
final NodeRef node3 = generateNodeRef();
final NodeRef node4 = generateNodeRef();
// mock the search service to return the right page
when(mockedSearchService.query(any(SearchParameters.class))).thenAnswer(
new Answer<ResultSet>()
{
@Override
public ResultSet answer(InvocationOnMock invocation)
{
SearchParameters params = invocation.getArgumentAt(0, SearchParameters.class);
if (params.getSkipCount() == 0)
{
// mock first page
ResultSet result1 = mock(ResultSet.class);
when(result1.getNodeRefs()).thenReturn(Arrays.asList(node1, node2));
when(result1.hasMore()).thenReturn(true);
return result1;
}
else if (params.getSkipCount() == 2)
{
// mock second page
ResultSet result2 = mock(ResultSet.class);
when(result2.getNodeRefs()).thenReturn(Arrays.asList(node3, node4));
when(result2.hasMore()).thenReturn(false);
return result2;
}
throw new IndexOutOfBoundsException("Pagination did not stop after the second page!");
}
});
// call the service
executer.executeImpl();
// check the loop iterated trough all the elements
verify(mockedNodeService).exists(node1);
verify(mockedNodeService).exists(node2);
verify(mockedNodeService).exists(node3);
verify(mockedNodeService).exists(node4);
verify(mockedSearchService, times(2)).query(any(SearchParameters.class));
}
}

View File

@@ -1,176 +0,0 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2019 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.module.org_alfresco_module_rm.model.rma.aspect;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel.ASPECT_RECORD;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.security.ExtendedSecurityService;
import org.alfresco.repo.policy.BehaviourFilter;
import org.alfresco.service.cmr.repository.AssociationRef;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InjectMocks;
import org.mockito.Mock;
/**
* Unit tests for the {@link RecordAspect}.
*
* @author Claudia Agache
*/
public class RecordAspectUnitTest
{
private static final NodeRef NODE_REF = new NodeRef("node://Ref/");
private static final NodeRef COPY_REF = new NodeRef("node://Copy/");
private static final AssociationRef SOURCE_ASSOC_REF = new AssociationRef(COPY_REF, ContentModel.ASSOC_ORIGINAL,
NODE_REF);
private static final AssociationRef TARGET_ASSOC_REF = new AssociationRef(NODE_REF, ContentModel.ASSOC_ORIGINAL,
COPY_REF);
@InjectMocks
private RecordAspect recordAspect;
@Mock
private NodeService mockNodeService;
@Mock
private BehaviourFilter mockBehaviorFilter;
@Mock
private ContentService mockContentService;
@Mock
private ContentReader mockContentReader;
@Mock
private ContentWriter mockContentWriter;
@Mock
private ExtendedSecurityService mockExtendedSecurityService;
@Before
public void setUp()
{
initMocks(this);
}
/** Check that the bin is duplicated before adding the aspect if the file has a copy. */
@Test
public void testDuplicateBinBeforeAddingAspectForFileWithCopy()
{
when(mockNodeService.getSourceAssocs(NODE_REF, ContentModel.ASSOC_ORIGINAL)).thenReturn(asList(SOURCE_ASSOC_REF));
when(mockContentService.getReader(NODE_REF, ContentModel.PROP_CONTENT)).thenReturn(mockContentReader);
when(mockContentService.getWriter(NODE_REF, ContentModel.PROP_CONTENT, true)).thenReturn(mockContentWriter);
recordAspect.beforeAddAspect(NODE_REF, ASPECT_RECORD);
verifyBeforeAddAspectMethodsInvocations(1);
}
/** Check that the bin is duplicated before adding the aspect if the file is a copy. */
@Test
public void testDuplicateBinBeforeAddingAspectForCopy()
{
when(mockNodeService.getTargetAssocs(NODE_REF, ContentModel.ASSOC_ORIGINAL)).thenReturn(asList(TARGET_ASSOC_REF));
when(mockContentService.getReader(NODE_REF, ContentModel.PROP_CONTENT)).thenReturn(mockContentReader);
when(mockContentService.getWriter(NODE_REF, ContentModel.PROP_CONTENT, true)).thenReturn(mockContentWriter);
recordAspect.beforeAddAspect(NODE_REF, ASPECT_RECORD);
verifyBeforeAddAspectMethodsInvocations(1);
}
/** Check that no content bin is created if the file does not have content. */
@Test
public void testBeforeAddAspectOnFileWithNoContent()
{
when(mockNodeService.getTargetAssocs(NODE_REF, ContentModel.ASSOC_ORIGINAL)).thenReturn(asList(TARGET_ASSOC_REF));
when(mockContentService.getReader(NODE_REF, ContentModel.PROP_CONTENT)).thenReturn(null);
recordAspect.beforeAddAspect(NODE_REF, ASPECT_RECORD);
verify(mockBehaviorFilter, times(1)).disableBehaviour(eq(ContentModel.ASPECT_AUDITABLE));
verify(mockBehaviorFilter, times(1)).disableBehaviour(eq(ContentModel.ASPECT_VERSIONABLE));
verify(mockContentService, times(1)).getReader(NODE_REF, ContentModel.PROP_CONTENT);
verify(mockContentService, never()).getWriter(NODE_REF, ContentModel.PROP_CONTENT, true);
verify(mockBehaviorFilter, times(1)).enableBehaviour(eq(ContentModel.ASPECT_AUDITABLE));
verify(mockBehaviorFilter, times(1)).enableBehaviour(eq(ContentModel.ASPECT_VERSIONABLE));
}
/** Check that the bin is not duplicated before adding the aspect if the node has no copies. */
@Test
public void testNotDuplicateBinForFileWithNoCopies()
{
when(mockNodeService.getSourceAssocs(NODE_REF, ContentModel.ASSOC_ORIGINAL)).thenReturn(emptyList());
when(mockNodeService.getTargetAssocs(NODE_REF, ContentModel.ASSOC_ORIGINAL)).thenReturn(emptyList());
recordAspect.beforeAddAspect(NODE_REF, ASPECT_RECORD);
verifyBeforeAddAspectMethodsInvocations(0);
}
/** Check that the bin is duplicated when copying a record. */
@Test
public void testDuplicateBinWhenCopyingRecord()
{
when(mockNodeService.exists(COPY_REF)).thenReturn(true);
when(mockNodeService.hasAspect(COPY_REF, ASPECT_RECORD)).thenReturn(true);
when(mockContentService.getReader(COPY_REF, ContentModel.PROP_CONTENT)).thenReturn(mockContentReader);
when(mockContentService.getWriter(COPY_REF, ContentModel.PROP_CONTENT, true)).thenReturn(mockContentWriter);
recordAspect.onCopyComplete(null, NODE_REF, COPY_REF, true, null);
verify(mockExtendedSecurityService, times(1)).remove(COPY_REF);
verify(mockContentService, times(1)).getReader(COPY_REF, ContentModel.PROP_CONTENT);
verify(mockContentService, times(1)).getWriter(COPY_REF, ContentModel.PROP_CONTENT, true);
verify(mockContentWriter, times(1)).putContent(mockContentReader);
}
/**
* Helper to verify beforeAddAspect methods invocations
*
* @param wantedNumberOfInvocations wanted number of invocations for each method
*/
private void verifyBeforeAddAspectMethodsInvocations(int wantedNumberOfInvocations)
{
verify(mockBehaviorFilter, times(wantedNumberOfInvocations)).disableBehaviour(eq(ContentModel.ASPECT_AUDITABLE));
verify(mockBehaviorFilter, times(wantedNumberOfInvocations)).disableBehaviour(eq(ContentModel.ASPECT_VERSIONABLE));
verify(mockContentService, times(wantedNumberOfInvocations)).getReader(NODE_REF, ContentModel.PROP_CONTENT);
verify(mockContentService, times(wantedNumberOfInvocations)).getWriter(NODE_REF, ContentModel.PROP_CONTENT, true);
verify(mockContentWriter, times(wantedNumberOfInvocations)).putContent(mockContentReader);
verify(mockBehaviorFilter, times(wantedNumberOfInvocations)).enableBehaviour(eq(ContentModel.ASPECT_AUDITABLE));
verify(mockBehaviorFilter, times(wantedNumberOfInvocations)).enableBehaviour(eq(ContentModel.ASPECT_VERSIONABLE));
}
}

View File

@@ -32,8 +32,6 @@ import static org.mockito.Mockito.when;
import java.util.ArrayList;
import com.google.common.collect.Sets;
import org.alfresco.module.org_alfresco_module_rm.dod5015.DOD5015Model;
import org.alfresco.module.org_alfresco_module_rm.test.util.AlfMock;
import org.alfresco.module.org_alfresco_module_rm.test.util.BaseUnitTest;
@@ -51,6 +49,8 @@ import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import com.google.common.collect.Sets;
/**
* Unit test for RmSiteType
*
@@ -151,7 +151,7 @@ public class RmSiteTypeUnitTest extends BaseUnitTest implements DOD5015Model
* Given that we try to add more than two cm:folder to rm site,
* Then IntegrityException is thrown.
*/
@Test
@Test(expected = IntegrityException.class)
public void testAddMoreThanTwhoFolderTypeToRmSite()
{
NodeRef rmSiteNodeRef = generateNodeRef(TYPE_RM_SITE, true);

View File

@@ -1,91 +0,0 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2019 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.module.org_alfresco_module_rm.patch.v23;
import static java.util.Arrays.asList;
import static org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel.ASPECT_SAVED_SEARCH;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.List;
import org.alfresco.module.org_alfresco_module_rm.search.RecordsManagementSearchServiceImpl;
import org.alfresco.module.org_alfresco_module_rm.search.SavedSearchDetails;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.junit.Test;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
/**
* patch.v23 unit test
*
* @author Ross Gale
* @since 2.3
*/
public class RMv23SavedSearchesPatchUnitTest
{
@Mock
private NodeService nodeService;
@Mock
private RecordsManagementSearchServiceImpl recordsManagementSearchService;
@Mock
private SavedSearchDetails mockSavedSearchDetails1, mockSavedSearchDetails2;
@InjectMocks
private RMv23SavedSearchesPatch patch;
/**
* Given that I am upgrading an existing repository to v2.3
* When I execute the patch
* Then any existing rm saved searches will have the saved search aspect applied
*/
@Test
public void executePatch()
{
MockitoAnnotations.initMocks(this);
NodeRef noderef1 = new NodeRef("foo://123/456");
NodeRef noderef2 = new NodeRef("bar://123/456");
List<SavedSearchDetails> searches = asList(mockSavedSearchDetails1, mockSavedSearchDetails2);
when(mockSavedSearchDetails1.getNodeRef()).thenReturn(noderef1);
when(mockSavedSearchDetails2.getNodeRef()).thenReturn(noderef2);
when(recordsManagementSearchService.getSavedSearches("rm")).thenReturn(searches);
// execute patch
patch.applyInternal();
verify(nodeService, times(1)).addAspect(noderef1, ASPECT_SAVED_SEARCH, null);
verify(nodeService, times(1)).addAspect(noderef2, ASPECT_SAVED_SEARCH, null);
}
}

View File

@@ -1,160 +0,0 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2019 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.module.org_alfresco_module_rm.test.util;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import com.fasterxml.jackson.dataformat.yaml.snakeyaml.parser.ParserException;
import com.github.fge.jackson.JsonLoader;
import com.github.fge.jsonschema.core.exceptions.ProcessingException;
import com.github.fge.jsonschema.core.report.ProcessingReport;
import com.github.fge.jsonschema.main.JsonSchema;
import com.github.fge.jsonschema.main.JsonSchemaFactory;
import io.swagger.models.Swagger;
import io.swagger.parser.SwaggerParser;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.filefilter.WildcardFileFilter;
/**
* Base class for unit tests for Yaml files.
*
* @author Sara Aspery
* @since 2.6
*/
public class BaseYamlUnitTest
{
private static String SWAGGER_2_SCHEMA_LOCATION = "/rest/schema.json";
private static String OPEN_API_SPECIFICATION = "2.0";
/**
* Helper method to obtain path names for all yaml files found on the given path
*/
protected Set<String> getYamlFilesList(String pathName) throws Exception
{
Set<String> yamlFilePathNames = new HashSet<>();
File directory = new File(pathName);
Collection<File> yamlFiles = FileUtils.listFiles(directory, new WildcardFileFilter("*.yaml"), null);
for (File file : yamlFiles) {
yamlFilePathNames.add(file.getCanonicalPath());
}
return yamlFilePathNames;
}
/**
* Helper method to validate that all given yaml files are valid readable Swagger format
*/
protected void validateYamlFiles(final Set<String> yamlFileNames) throws ProcessingException, IOException
{
assertFalse("Expected at least 1 yaml file to validate", yamlFileNames.isEmpty());
final JsonSchema swaggerSchema = getSwaggerSchema(SWAGGER_2_SCHEMA_LOCATION);
assertNotNull("Failed to obtain the Swagger schema", swaggerSchema);
for (String yamlFilePath : yamlFileNames)
{
try
{
// check the yaml file is valid against Swagger JSON schema
assertTrue("Yaml file is not valid Swagger " + OPEN_API_SPECIFICATION + ": " + yamlFilePath,
validateYamlFile(yamlFilePath, swaggerSchema));
// check can read the swagger object to obtain the swagger version
Swagger swagger = new SwaggerParser().read(yamlFilePath);
assertEquals("Failed to obtain Swagger version from yaml file " + yamlFilePath,
swagger.getSwagger(), OPEN_API_SPECIFICATION);
}
catch (ParserException ex)
{
// ensure the yaml filename is included in the message
String context = String.format(yamlFilePath + ": %n" + ex.getContext());
throw new ParserException(context, ex.getContextMark(), ex.getProblem(), ex.getProblemMark()) ;
}
}
}
/**
* Helper method to read in the Swagger JSON schema file
*/
private JsonSchema getSwaggerSchema(final String schemaLocation) throws IOException, ProcessingException
{
JsonSchema swaggerSchema = null;
final InputStream in = this.getClass().getResourceAsStream(schemaLocation);
if (in != null)
{
final String swaggerSchemaAsString = IOUtils.toString(in);
final JsonNode schemaNode = JsonLoader.fromString(swaggerSchemaAsString);
final JsonSchemaFactory factory = JsonSchemaFactory.byDefault();
swaggerSchema = factory.getJsonSchema(schemaNode);
}
return swaggerSchema;
}
/**
* Helper method to validate Yaml file against JSON schema
*/
private boolean validateYamlFile(final String yamlFilePath, final JsonSchema jsonSchema) throws IOException, ProcessingException
{
// Get yaml file as a string
final String yaml = new String(Files.readAllBytes(Paths.get(yamlFilePath)));
// Convert yaml string to JSON string
final ObjectMapper yamlReader = new ObjectMapper(new YAMLFactory());
final Object obj = yamlReader.readValue(yaml, Object.class);
final ObjectMapper jsonWriter = new ObjectMapper();
final String yamlAsJsonString = jsonWriter.writeValueAsString(obj);
return validateJSON(yamlAsJsonString, jsonSchema);
}
/**
* Helper method to validate JSON string against JSON schema
*/
private boolean validateJSON(final String jsonData, final JsonSchema schema) throws IOException, ProcessingException
{
final JsonNode dataNode = JsonLoader.fromString(jsonData);
final ProcessingReport report = schema.validate(dataNode);
return report.isSuccess();
}
}

View File

@@ -1,50 +0,0 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2019 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rm.rest.api.impl;
import org.alfresco.module.org_alfresco_module_rm.test.util.BaseYamlUnitTest;
import org.junit.Test;
/**
* Unit Test class for RM Yaml file validation.
*
* @author Sara Aspery
* @since 2.6
*
*/
public class RMYamlUnitTest extends BaseYamlUnitTest
{
private static String RM_COMMUNITY_YAML_FILES_PATH = "../rm-community-rest-api-explorer/src/main/webapp/definitions";
@Test
public void validateYamlFile() throws Exception
{
validateYamlFiles(getYamlFilesList(RM_COMMUNITY_YAML_FILES_PATH));
}
}

View File

@@ -7,11 +7,11 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-rm-community</artifactId>
<version>2.6.2</version>
<version>2.6.0.1</version>
</parent>
<properties>
<alfresco.api.explorer.version>5.2.2</alfresco.api.explorer.version>
<alfresco.api.explorer.version>5.2.0.1</alfresco.api.explorer.version>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>