Merge release/V2.3 into RM-3386_ManauallySetDate.

This commit is contained in:
Tom Page
2016-10-19 08:31:13 +01:00
15 changed files with 943 additions and 650 deletions

View File

@@ -38,6 +38,9 @@ public class EditDispositionActionAsOfDateAction extends RMActionExecuterAbstrac
private static final String MSG_VALID_DATE_DISP_ASOF = "rm.action.valid-date-disp-asof";
private static final String MSG_DISP_ASOF_LIFECYCLE_APPLIED = "rm.action.disp-asof-lifecycle-applied";
/** Action name */
public static final String NAME = "editDispositionActionAsOfDate";
/** Action parameters */
public static final String PARAM_AS_OF_DATE = "asOfDate";

View File

@@ -1,199 +0,0 @@
/*
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.module.org_alfresco_module_rm.disposition;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
import org.alfresco.service.cmr.repository.NodeRef;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* This class offers the default implementation of a strategy for selection of
* disposition schedule for a record when there is more than one which is applicable.
* An example of where this strategy might be used would be in the case of a record
* which was multiply filed.
*
* @author neilm
*/
public class DispositionSelectionStrategy implements RecordsManagementModel
{
/** Logger */
private static Log logger = LogFactory.getLog(DispositionSelectionStrategy.class);
/** Disposition service */
private DispositionService dispositionService;
/**
* Set the disposition service
*
* @param dispositionService disposition service
*/
public void setDispositionService(DispositionService dispositionService)
{
this.dispositionService = dispositionService;
}
/**
* Select the disposition schedule to use given there is more than one
*
* @param recordFolders
* @return
*/
public NodeRef selectDispositionScheduleFrom(List<NodeRef> recordFolders)
{
if (recordFolders == null || recordFolders.isEmpty())
{
return null;
}
else
{
// 46 CHAPTER 2
// Records assigned more than 1 disposition must be retained and linked to the record folder (category) with the longest
// retention period.
// Assumption: an event-based disposition action has a longer retention
// period than a time-based one - as we cannot know when an event will occur
// TODO Automatic events?
NodeRef recordFolder = null;
if (recordFolders.size() == 1)
{
recordFolder = recordFolders.get(0);
}
else
{
SortedSet<NodeRef> sortedFolders = new TreeSet<NodeRef>(new DispositionableNodeRefComparator());
sortedFolders.addAll(recordFolders);
recordFolder = sortedFolders.first();
}
DispositionSchedule dispSchedule = dispositionService.getDispositionSchedule(recordFolder);
if (logger.isDebugEnabled())
{
logger.debug("Selected disposition schedule: " + dispSchedule);
}
NodeRef result = null;
if (dispSchedule != null)
{
result = dispSchedule.getNodeRef();
}
return result;
}
}
/**
* This class defines a natural comparison order between NodeRefs that have
* the dispositionLifecycle aspect applied.
* This order has the following meaning: NodeRefs with a 'lesser' value are considered
* to have a shorter retention period, although the actual retention period may
* not be straightforwardly determined in all cases.
*/
class DispositionableNodeRefComparator implements Comparator<NodeRef>
{
public int compare(final NodeRef f1, final NodeRef f2)
{
// Run as admin user
return AuthenticationUtil.runAs(new RunAsWork<Integer>()
{
public Integer doWork()
{
return compareImpl(f1, f2);
}
}, AuthenticationUtil.getAdminUserName());
}
private int compareImpl(NodeRef f1, NodeRef f2)
{
// quick check to see if the node references are the same
if (f1.equals(f2))
{
return 0;
}
// get the disposition schedules for the folders
DispositionSchedule ds1 = dispositionService.getDispositionSchedule(f1);
DispositionSchedule ds2 = dispositionService.getDispositionSchedule(f2);
// make sure each folder has a disposition schedule
if (ds1 == null && ds2 != null)
{
return 1;
}
else if (ds1 != null && ds2 == null)
{
return -1;
}
else if (ds1 == null && ds2 == null)
{
return 0;
}
// TODO this won't work correctly if we are trying to compare schedules that are record based!!
DispositionAction da1 = dispositionService.getNextDispositionAction(f1);
DispositionAction da2 = dispositionService.getNextDispositionAction(f2);
if (da1 != null && da2 != null)
{
Date asOfDate1 = da1.getAsOfDate();
Date asOfDate2 = da2.getAsOfDate();
// If both record(Folder)s have asOfDates, then use these to compare
if (asOfDate1 != null && asOfDate2 != null)
{
return asOfDate1.compareTo(asOfDate2);
}
// If one has a date and the other doesn't, the one with the date is "less".
// (Defined date is 'shorter' than undefined date as an undefined date means it may be retained forever - theoretically)
else if (asOfDate1 != null || asOfDate2 != null)
{
return asOfDate1 == null ? +1 : -1;
}
else
{
// Neither has an asOfDate. (Somewhat arbitrarily) we'll use the number of events to compare now.
DispositionActionDefinition dad1 = da1.getDispositionActionDefinition();
DispositionActionDefinition dad2 = da2.getDispositionActionDefinition();
int eventsCount1 = 0;
int eventsCount2 = 0;
if (dad1 != null)
{
eventsCount1 = dad1.getEvents().size();
}
if (dad2 != null)
{
eventsCount2 = dad2.getEvents().size();
}
return Integer.valueOf(eventsCount1).compareTo(eventsCount2);
}
}
return 0;
}
}
}

View File

@@ -233,6 +233,17 @@ public interface DispositionService
* @param nodeRef node reference
*/
void refreshDispositionAction(NodeRef nodeRef);
/**
* Gets date of the disposition action for the given
* disposition schedule with the given action name
*
* @param record
* @param dispositionSchedule nodeRef
* @param dispositionActionName
* @return date
*/
Date getDispositionActionDate(NodeRef record, NodeRef dispositionSchedule, String dispositionActionName);
/**
* Compute the "disposition as of" date (if necessary) for a disposition action and a node.
@@ -244,4 +255,13 @@ public interface DispositionService
*/
Date calculateAsOfDate(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition,
boolean allowContextFromAsOf);
/**
* Gets the origin disposition schedule for the record, not the calculated one
* in case of multiple dispositions applied to record
*
* @param nodeRef record
* @return the initial disposition
*/
DispositionSchedule getOriginDispositionSchedule(NodeRef nodeRef);
}

View File

@@ -27,6 +27,7 @@ import java.util.List;
import java.util.Map;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.RecordsManagementPolicies;
import org.alfresco.module.org_alfresco_module_rm.RecordsManagementServiceRegistry;
import org.alfresco.module.org_alfresco_module_rm.disposition.property.DispositionProperty;
@@ -38,6 +39,7 @@ import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
import org.alfresco.module.org_alfresco_module_rm.record.RecordService;
import org.alfresco.module.org_alfresco_module_rm.recordfolder.RecordFolderService;
import org.alfresco.module.org_alfresco_module_rm.util.ServiceBaseImpl;
import org.alfresco.repo.dictionary.types.period.Immediately;
import org.alfresco.repo.policy.BehaviourFilter;
import org.alfresco.repo.policy.annotation.Behaviour;
import org.alfresco.repo.policy.annotation.BehaviourBean;
@@ -70,15 +72,26 @@ public class DispositionServiceImpl extends ServiceBaseImpl
/** Logger */
private static final Logger LOGGER = LoggerFactory.getLogger(DispositionServiceImpl.class);
/** Transaction mode for setting next action */
public enum WriteMode
{
/** Do not update any data. */
READ_ONLY,
/** Only set the "disposition as of" date. */
DATE_ONLY,
/**
* Set the "disposition as of" date and the name of the next action. This only happens during the creation of a
* disposition schedule impl node under a record or folder.
*/
DATE_AND_NAME
};
/** Behaviour filter */
private BehaviourFilter behaviourFilter;
/** Records management service registry */
private RecordsManagementServiceRegistry serviceRegistry;
/** Disposition selection strategy */
private DispositionSelectionStrategy dispositionSelectionStrategy;
/** File plan service */
private FilePlanService filePlanService;
@@ -168,16 +181,6 @@ public class DispositionServiceImpl extends ServiceBaseImpl
this.freezeService = freezeService;
}
/**
* Set the dispositionSelectionStrategy bean.
*
* @param dispositionSelectionStrategy
*/
public void setDispositionSelectionStrategy(DispositionSelectionStrategy dispositionSelectionStrategy)
{
this.dispositionSelectionStrategy = dispositionSelectionStrategy;
}
/**
* Behavior to initialize the disposition schedule of a newly filed record.
*
@@ -267,31 +270,56 @@ public class DispositionServiceImpl extends ServiceBaseImpl
* @see org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService#getDispositionSchedule(org.alfresco.service.cmr.repository.NodeRef)
*/
@Override
public DispositionSchedule getDispositionSchedule(NodeRef nodeRef)
public DispositionSchedule getDispositionSchedule(final NodeRef nodeRef)
{
DispositionSchedule di = null;
NodeRef diNodeRef = null;
DispositionSchedule ds = null;
NodeRef dsNodeRef = null;
if (isRecord(nodeRef))
{
// Get the record folders for the record
List<NodeRef> recordFolders = recordFolderService.getRecordFolders(nodeRef);
// At this point, we may have disposition instruction objects from 1..n folders.
diNodeRef = dispositionSelectionStrategy.selectDispositionScheduleFrom(recordFolders);
final NextActionFromDisposition dsNextAction = getDispositionActionByNameForRecord(nodeRef);
if (dsNextAction != null)
{
if (!dsNextAction.getWriteMode().equals(WriteMode.READ_ONLY))
{
final NodeRef action = dsNextAction.getNextActionNodeRef();
final String dispositionActionName = dsNextAction.getNextActionName();
final Date dispositionActionDate = dsNextAction.getNextActionDateAsOf();
AuthenticationUtil.runAsSystem(new RunAsWork<Void>()
{
@Override
public Void doWork()
{
nodeService.setProperty(action, PROP_DISPOSITION_AS_OF, dispositionActionDate);
if (dsNextAction.getWriteMode().equals(WriteMode.DATE_AND_NAME))
{
nodeService.setProperty(action, PROP_DISPOSITION_ACTION_NAME, dispositionActionName);
}
return null;
}
});
}
dsNodeRef = dsNextAction.getDispositionNodeRef();
}
}
else
{
// Get the disposition instructions for the node reference provided
diNodeRef = getDispositionScheduleImpl(nodeRef);
dsNodeRef = getDispositionScheduleImpl(nodeRef);
}
if (diNodeRef != null)
if (dsNodeRef != null)
{
di = new DispositionScheduleImpl(serviceRegistry, nodeService, diNodeRef);
ds = new DispositionScheduleImpl(serviceRegistry, nodeService, dsNodeRef);
}
return di;
return ds;
}
/**
* This method returns a NodeRef
* Gets the disposition instructions
@@ -313,6 +341,28 @@ public class DispositionServiceImpl extends ServiceBaseImpl
}
return result;
}
public DispositionSchedule getOriginDispositionSchedule(NodeRef nodeRef)
{
NodeRef parent = this.nodeService.getPrimaryParent(nodeRef).getParentRef();
if (parent != null)
{
if (filePlanService.isRecordCategory(parent))
{
NodeRef result = getAssociatedDispositionScheduleImpl(parent);
if (result == null)
{
return null;
}
return new DispositionScheduleImpl(serviceRegistry, nodeService, result);
}
else
{
return getOriginDispositionSchedule(parent);
}
}
return null;
}
/**
* @see org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService#getAssociatedDispositionSchedule(org.alfresco.service.cmr.repository.NodeRef)
@@ -620,8 +670,14 @@ public class DispositionServiceImpl extends ServiceBaseImpl
* @param dispositionActionDefinition disposition action definition
* @param allowContextFromAsOf true if the context date is allowed to be obtained from the disposition "as of" property.
*/
private void initialiseDispositionAction(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition, boolean allowContextFromAsOf)
private DispositionAction initialiseDispositionAction(NodeRef nodeRef, DispositionActionDefinition dispositionActionDefinition, boolean allowContextFromAsOf)
{
List<ChildAssociationRef> childAssocs = nodeService.getChildAssocs(nodeRef, ASSOC_NEXT_DISPOSITION_ACTION, ASSOC_NEXT_DISPOSITION_ACTION, 1, true);
if (childAssocs != null && childAssocs.size() > 0)
{
return new DispositionActionImpl(serviceRegistry, childAssocs.get(0).getChildRef());
}
// Create the properties
Map<QName, Serializable> props = new HashMap<QName, Serializable>(10);
@@ -651,6 +707,7 @@ public class DispositionServiceImpl extends ServiceBaseImpl
// For every event create an entry on the action
da.addEventCompletionDetails(event);
}
return da;
}
/**
@@ -683,9 +740,16 @@ public class DispositionServiceImpl extends ServiceBaseImpl
}
else
{
// for now use 'NOW' as the default context date
// TODO set the default period property ... cut off date or last disposition date depending on context
contextDate = new Date();
if (period.getPeriodType().equals(Immediately.PERIOD_TYPE))
{
contextDate = (Date)nodeService.getProperty(nodeRef, ContentModel.PROP_CREATED);
}
else
{
// for now use 'NOW' as the default context date
// TODO set the default period property ... cut off date or last disposition date depending on context
contextDate = new Date();
}
}
// Calculate the as of date
@@ -897,6 +961,14 @@ public class DispositionServiceImpl extends ServiceBaseImpl
String currentADId = (String) nodeService.getProperty(currentDispositionAction, PROP_DISPOSITION_ACTION_ID);
currentDispositionActionDefinition = di.getDispositionActionDefinition(currentADId);
// When the record has multiple disposition schedules the current disposition action may not be found by id
// In this case it will be searched by name
if(currentDispositionActionDefinition == null)
{
String currentADName = (String) nodeService.getProperty(currentDispositionAction, PROP_DISPOSITION_ACTION);
currentDispositionActionDefinition = di.getDispositionActionDefinitionByName(currentADName);
}
// Get the next disposition action
int index = currentDispositionActionDefinition.getIndex();
index++;
@@ -983,6 +1055,24 @@ public class DispositionServiceImpl extends ServiceBaseImpl
}
}
public Date getDispositionActionDate(NodeRef record, NodeRef dispositionSchedule, String dispositionActionName)
{
DispositionSchedule ds = new DispositionScheduleImpl(serviceRegistry, nodeService, dispositionSchedule);
List<ChildAssociationRef> assocs = nodeService.getChildAssocs(dispositionSchedule);
if (assocs != null && assocs.size() > 0)
{
for (ChildAssociationRef assoc : assocs)
{
if (assoc != null && assoc.getQName().getLocalName().contains(dispositionActionName))
{
DispositionActionDefinition actionDefinition = ds.getDispositionActionDefinition(assoc.getChildRef().getId());
return calculateAsOfDate(record, actionDefinition, true);
}
}
}
return null;
}
/**
* Helper method to determine if a node is frozen or has frozen children
*
@@ -1030,4 +1120,162 @@ public class DispositionServiceImpl extends ServiceBaseImpl
}
});
}
/**
* Calculate next disposition action for a record
*
* @param record
* @return next disposition action (name, date) and the disposition associated
*/
protected NextActionFromDisposition getDispositionActionByNameForRecord(NodeRef record)
{
List<NodeRef> recordFolders = recordFolderService.getRecordFolders(record);
DispositionAction nextDispositionAction = getNextDispositionAction(record);
if (nextDispositionAction == null)
{
return getFirstDispositionAction(record, recordFolders);
}
else
{
return getNextDispositionAction(record, recordFolders, nextDispositionAction);
}
}
/**
* Calculate next disposition action when the record already has one
* @param recordFolders
* @param nextDispositionAction
* @return next disposition action and the associated disposition schedule
*/
private NextActionFromDisposition getNextDispositionAction(NodeRef record, List<NodeRef> recordFolders, DispositionAction nextDispositionAction)
{
String recordNextDispositionActionName = nextDispositionAction.getName();
Date recordNextDispositionActionDate = nextDispositionAction.getAsOfDate();
// We're looking for the latest date, so initially start with a very early one.
Date nextDispositionActionDate = new Date(Long.MIN_VALUE);
NodeRef dispositionNodeRef = null;
// Find the latest "disposition as of" date from all the schedules this record is subject to.
for (NodeRef folder : recordFolders)
{
NodeRef dsNodeRef = getDispositionScheduleImpl(folder);
if (dsNodeRef != null)
{
Date dispActionDate = getDispositionActionDate(record, dsNodeRef, recordNextDispositionActionName);
if (dispActionDate == null || (nextDispositionActionDate != null
&& nextDispositionActionDate.before(dispActionDate)))
{
nextDispositionActionDate = dispActionDate;
dispositionNodeRef = dsNodeRef;
if (dispActionDate == null)
{
// Treat null as the latest date possible (so stop searching further).
break;
}
}
}
}
if (dispositionNodeRef == null)
{
return null;
}
WriteMode mode = determineWriteMode(recordNextDispositionActionDate, nextDispositionActionDate);
return new NextActionFromDisposition(dispositionNodeRef, nextDispositionAction.getNodeRef(),
recordNextDispositionActionName, nextDispositionActionDate, mode);
}
/**
* Determine what should be updated for an existing disposition schedule impl. We only update the date if the
* existing date is earlier than the calculated one.
*
* @param recordNextDispositionActionDate The next action date found on the record node (or folder node).
* @param nextDispositionActionDate The next action date calculated from the current disposition schedule(s)
* affecting the node.
* @return READ_ONLY if nothing should be updated, or DATE_ONLY if the date needs updating.
*/
private WriteMode determineWriteMode(Date recordNextDispositionActionDate, Date nextDispositionActionDate)
{
// Treat null dates as being the latest possible date.
Date maxDate = new Date(Long.MAX_VALUE);
Date recordDate = (recordNextDispositionActionDate != null ? recordNextDispositionActionDate : maxDate);
Date calculatedDate = (nextDispositionActionDate != null ? nextDispositionActionDate : maxDate);
// We only need to update the date if the current one is too early.
if (recordDate.before(calculatedDate))
{
return WriteMode.DATE_ONLY;
}
else
{
return WriteMode.READ_ONLY;
}
}
/**
* Calculate first disposition action when the record doesn't have one
* @param recordFolders
* @return next disposition action and the associated disposition schedule
*/
private NextActionFromDisposition getFirstDispositionAction(NodeRef record, List<NodeRef> recordFolders)
{
NodeRef newAction = null;
String newDispositionActionName = null;
// We're looking for the latest date, so start with a very early one.
Date newDispositionActionDateAsOf = new Date(Long.MIN_VALUE);
NodeRef dispositionNodeRef = null;
for (NodeRef folder : recordFolders)
{
NodeRef folderDS = getDispositionScheduleImpl(folder);
if (folderDS != null)
{
DispositionSchedule ds = new DispositionScheduleImpl(serviceRegistry, nodeService, folderDS);
List<DispositionActionDefinition> dispositionActionDefinitions = ds.getDispositionActionDefinitions();
if (dispositionActionDefinitions != null && dispositionActionDefinitions.size() > 0)
{
DispositionActionDefinition firstDispositionActionDef = dispositionActionDefinitions.get(0);
dispositionNodeRef = folderDS;
if (newAction == null)
{
NodeRef recordOrFolder = record;
if (!ds.isRecordLevelDisposition())
{
recordOrFolder = folder;
}
DispositionAction firstDispositionAction = initialiseDispositionAction(recordOrFolder, firstDispositionActionDef, true);
newAction = firstDispositionAction.getNodeRef();
newDispositionActionName = (String)nodeService.getProperty(newAction, PROP_DISPOSITION_ACTION_NAME);
newDispositionActionDateAsOf = firstDispositionAction.getAsOfDate();
}
else if (firstDispositionActionDef.getPeriod() != null)
{
Date firstActionDate = calculateAsOfDate(record, firstDispositionActionDef, true);
if (firstActionDate == null || (newDispositionActionDateAsOf != null
&& newDispositionActionDateAsOf.before(firstActionDate)))
{
newDispositionActionName = firstDispositionActionDef.getName();
newDispositionActionDateAsOf = firstActionDate;
if (firstActionDate == null)
{
// Treat null as the latest date possible, so there's no point searching further.
break;
}
}
}
}
}
}
if (newDispositionActionName == null || dispositionNodeRef == null || newAction == null)
{
return null;
}
return new NextActionFromDisposition(dispositionNodeRef, newAction,
newDispositionActionName, newDispositionActionDateAsOf, WriteMode.DATE_AND_NAME);
}
}

View File

@@ -0,0 +1,80 @@
package org.alfresco.module.org_alfresco_module_rm.disposition;
import java.util.Date;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionServiceImpl.WriteMode;
import org.alfresco.service.cmr.repository.NodeRef;
public class NextActionFromDisposition
{
public NextActionFromDisposition(NodeRef dispositionNodeRef, NodeRef nextActionNodeRef, String nextActionName, Date nextActionDateAsOf,
WriteMode writeMode)
{
super();
this.dispositionNodeRef = dispositionNodeRef;
this.nextActionNodeRef = nextActionNodeRef;
this.nextActionName = nextActionName;
this.nextActionDateAsOf = nextActionDateAsOf;
this.writeMode = writeMode;
}
private NodeRef dispositionNodeRef;
private NodeRef nextActionNodeRef;
private String nextActionName;
private Date nextActionDateAsOf;
private WriteMode writeMode;
public WriteMode getWriteMode()
{
return writeMode;
}
public void setWriteMode(WriteMode writeMode)
{
this.writeMode = writeMode;
}
public NodeRef getNextActionNodeRef()
{
return nextActionNodeRef;
}
public void setNextActionNodeRef(NodeRef nextActionNodeRef)
{
this.nextActionNodeRef = nextActionNodeRef;
}
public NodeRef getDispositionNodeRef()
{
return dispositionNodeRef;
}
public void setDispositionNodeRef(NodeRef dispositionNodeRef)
{
this.dispositionNodeRef = dispositionNodeRef;
}
public String getNextActionName()
{
return nextActionName;
}
public void setNextActionName(String nextActionName)
{
this.nextActionName = nextActionName;
}
public Date getNextActionDateAsOf()
{
return nextActionDateAsOf;
}
public void setNextActionDateAsOf(Date nextActionDateAsOf)
{
this.nextActionDateAsOf = nextActionDateAsOf;
}
}

View File

@@ -34,12 +34,14 @@ import java.util.Set;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.RecordsManagementServiceRegistry;
import org.alfresco.module.org_alfresco_module_rm.RecordsManagementPolicies.BeforeFileRecord;
import org.alfresco.module.org_alfresco_module_rm.RecordsManagementPolicies.OnFileRecord;
import org.alfresco.module.org_alfresco_module_rm.capability.Capability;
import org.alfresco.module.org_alfresco_module_rm.capability.CapabilityService;
import org.alfresco.module.org_alfresco_module_rm.capability.RMPermissionModel;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionSchedule;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionScheduleImpl;
import org.alfresco.module.org_alfresco_module_rm.disposition.DispositionService;
import org.alfresco.module.org_alfresco_module_rm.dod5015.DOD5015Model;
import org.alfresco.module.org_alfresco_module_rm.fileplan.FilePlanService;
@@ -228,7 +230,7 @@ public class RecordServiceImpl extends BaseBehaviourBean
/** recordable version service */
private RecordableVersionService recordableVersionService;
/** list of available record meta-data aspects and the file plan types the are applicable to */
private Map<QName, Set<QName>> recordMetaDataAspects;
@@ -385,7 +387,7 @@ public class RecordServiceImpl extends BaseBehaviourBean
{
this.recordableVersionService = recordableVersionService;
}
/**
* Init method
*/
@@ -1732,7 +1734,10 @@ public class RecordServiceImpl extends BaseBehaviourBean
private void validateLinkConditions(NodeRef record, NodeRef recordFolder)
{
// ensure that the linking record folders have compatible disposition schedules
DispositionSchedule recordDispositionSchedule = dispositionService.getDispositionSchedule(record);
// get the origin disposition schedule for the record, not the calculated one
DispositionSchedule recordDispositionSchedule = dispositionService.getOriginDispositionSchedule(record);
if (recordDispositionSchedule != null)
{
DispositionSchedule recordFolderDispositionSchedule = dispositionService.getDispositionSchedule(recordFolder);

View File

@@ -18,22 +18,22 @@
*/
package org.alfresco.repo.web.scripts.roles;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Set;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServletResponse;
import org.alfresco.model.ContentModel;
import org.alfresco.module.org_alfresco_module_rm.model.RecordsManagementModel;
import org.alfresco.module.org_alfresco_module_rm.security.ExtendedReaderDynamicAuthority;
@@ -43,26 +43,26 @@ import org.alfresco.repo.domain.node.NodeDAO;
import org.alfresco.repo.domain.patch.PatchDAO;
import org.alfresco.repo.domain.qname.QNameDAO;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.repo.web.scripts.content.ContentStreamer;
import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.model.FileInfo;
import org.alfresco.repo.web.scripts.content.ContentStreamer;
import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.model.FileInfo;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.Pair;
import org.alfresco.util.TempFileProvider;
import org.alfresco.util.TempFileProvider;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.extensions.webscripts.AbstractWebScript;
import org.springframework.extensions.webscripts.AbstractWebScript;
import org.springframework.extensions.webscripts.Cache;
import org.springframework.extensions.webscripts.Format;
import org.springframework.extensions.webscripts.Format;
import org.springframework.extensions.webscripts.Status;
import org.springframework.extensions.webscripts.WebScriptException;
import org.springframework.extensions.webscripts.WebScriptException;
import org.springframework.extensions.webscripts.WebScriptRequest;
import org.springframework.extensions.webscripts.WebScriptResponse;
import org.springframework.extensions.webscripts.WebScriptResponse;
/**
* Webscript used for removing dynamic authorities from the records.
@@ -71,7 +71,7 @@ import org.springframework.extensions.webscripts.WebScriptResponse;
* @since 2.3.0.7
*/
@SuppressWarnings("deprecation")
public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel
public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsManagementModel
{
private static final String MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO = "Parameter batchsize should be a number greater than 0.";
private static final String MESSAGE_PROCESSING_BEGIN = "Processing - BEGIN";
@@ -80,7 +80,7 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
private static final String MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE = "Processing record {0} - BEGIN";
private static final String MESSAGE_BATCHSIZE_IS_INVALID = "Parameter batchsize is invalid.";
private static final String MESSAGE_BATCHSIZE_IS_MANDATORY = "Parameter batchsize is mandatory";
private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist.";
private static final String MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE = "Parameter parentNodeRef = {0} does not exist.";
private static final String SUCCESS_STATUS = "success";
/**
* The logger
@@ -88,8 +88,8 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
private static Log logger = LogFactory.getLog(DynamicAuthoritiesGet.class);
private static final String BATCH_SIZE = "batchsize";
private static final String TOTAL_NUMBER_TO_PROCESS = "maxProcessedRecords";
private static final String PARAM_EXPORT = "export";
private static final String PARAM_PARENT_NODE_REF = "parentNodeRef";
private static final String PARAM_EXPORT = "export";
private static final String PARAM_PARENT_NODE_REF = "parentNodeRef";
private static final String MODEL_STATUS = "responsestatus";
private static final String MODEL_MESSAGE = "message";
private static final String MESSAGE_ALL_TEMPLATE = "Processed {0} records.";
@@ -105,60 +105,60 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
private PermissionService permissionService;
private ExtendedSecurityService extendedSecurityService;
private TransactionService transactionService;
/** Content Streamer */
protected ContentStreamer contentStreamer;
private FileFolderService fileFolderService;
/** service setters */
public void setPatchDAO(PatchDAO patchDAO)
{
this.patchDAO = patchDAO;
}
/** Content Streamer */
protected ContentStreamer contentStreamer;
private FileFolderService fileFolderService;
public void setNodeDAO(NodeDAO nodeDAO)
{
this.nodeDAO = nodeDAO;
}
/** service setters */
public void setPatchDAO(PatchDAO patchDAO)
{
this.patchDAO = patchDAO;
}
public void setQnameDAO(QNameDAO qnameDAO)
public void setNodeDAO(NodeDAO nodeDAO)
{
this.qnameDAO = qnameDAO;
}
this.nodeDAO = nodeDAO;
}
public void setNodeService(NodeService nodeService)
public void setQnameDAO(QNameDAO qnameDAO)
{
this.nodeService = nodeService;
this.qnameDAO = qnameDAO;
}
public void setPermissionService(PermissionService permissionService)
public void setNodeService(NodeService nodeService)
{
this.permissionService = permissionService;
}
public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService)
this.nodeService = nodeService;
}
public void setPermissionService(PermissionService permissionService)
{
this.extendedSecurityService = extendedSecurityService;
this.permissionService = permissionService;
}
public void setTransactionService(TransactionService transactionService)
{
this.transactionService = transactionService;
}
public void setContentStreamer(ContentStreamer contentStreamer)
public void setExtendedSecurityService(ExtendedSecurityService extendedSecurityService)
{
this.contentStreamer = contentStreamer;
this.extendedSecurityService = extendedSecurityService;
}
public void setFileFolderService(FileFolderService fileFolderService)
{
this.fileFolderService = fileFolderService;
}
protected Map<String, Object> buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException
{
Map<String, Object> model = new HashMap<String, Object>();
final Long batchSize = getBatchSizeParameter(req);
public void setTransactionService(TransactionService transactionService)
{
this.transactionService = transactionService;
}
public void setContentStreamer(ContentStreamer contentStreamer)
{
this.contentStreamer = contentStreamer;
}
public void setFileFolderService(FileFolderService fileFolderService)
{
this.fileFolderService = fileFolderService;
}
protected Map<String, Object> buildModel(WebScriptRequest req, WebScriptResponse res) throws IOException
{
Map<String, Object> model = new HashMap<String, Object>();
final Long batchSize = getBatchSizeParameter(req);
// get the max node id and the extended security aspect
Long maxNodeId = patchDAO.getMaxAdmNodeID();
final Pair<Long, QName> recordAspectPair = qnameDAO.getQName(ASPECT_EXTENDED_SECURITY);
@@ -170,201 +170,201 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
return model;
}
Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize);
boolean attach = getExportParameter(req);
File file = TempFileProvider.createTempFile("processedNodes_", ".csv");
FileWriter writer = new FileWriter(file);
BufferedWriter out = new BufferedWriter(writer);
List<NodeRef> processedNodes = new ArrayList<NodeRef>();
try
{
NodeRef parentNodeRef = getParentNodeRefParameter(req);
if (parentNodeRef != null)
{
processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair,
totalNumberOfRecordsToProcess.intValue(), out, attach);
}
else
{
processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess,
out, attach);
}
}
finally
{
out.close();
}
int processedNodesSize = processedNodes.size();
String message = "";
if (totalNumberOfRecordsToProcess == 0
|| (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess))
{
message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize);
}
if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize)
{
message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess);
}
model.put(MODEL_STATUS, SUCCESS_STATUS);
model.put(MODEL_MESSAGE, message);
logger.info(message);
if (attach)
{
try
{
String fileName = file.getName();
contentStreamer.streamContent(req, res, file, null, attach, fileName, model);
model = null;
}
finally
{
if (file != null)
{
file.delete();
}
}
}
return model;
}
/**
* Get export parameter from the request
*
* @param req
* @return
*/
protected boolean getExportParameter(WebScriptRequest req)
{
boolean attach = false;
String export = req.getParameter(PARAM_EXPORT);
if (export != null && Boolean.parseBoolean(export))
{
attach = true;
}
return attach;
}
/*
* (non-Javadoc)
* @see org.alfresco.repo.web.scripts.content.StreamContent#execute(org.springframework.extensions.webscripts.
* WebScriptRequest, org.springframework.extensions.webscripts.WebScriptResponse)
*/
@Override
public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException
{
// retrieve requested format
String format = req.getFormat();
try
{
String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format);
Long totalNumberOfRecordsToProcess = getMaxToProccessParameter(req, batchSize);
boolean attach = getExportParameter(req);
File file = TempFileProvider.createTempFile("processedNodes_", ".csv");
FileWriter writer = new FileWriter(file);
BufferedWriter out = new BufferedWriter(writer);
List<NodeRef> processedNodes = new ArrayList<NodeRef>();
try
{
NodeRef parentNodeRef = getParentNodeRefParameter(req);
if (parentNodeRef != null)
{
processedNodes = processChildrenNodes(parentNodeRef, batchSize.intValue(), recordAspectPair,
totalNumberOfRecordsToProcess.intValue(), out, attach);
}
else
{
processedNodes = processNodes(batchSize, maxNodeId, recordAspectPair, totalNumberOfRecordsToProcess,
out, attach);
}
}
finally
{
out.close();
}
int processedNodesSize = processedNodes.size();
String message = "";
if (totalNumberOfRecordsToProcess == 0
|| (totalNumberOfRecordsToProcess > 0 && processedNodesSize < totalNumberOfRecordsToProcess))
{
message = MessageFormat.format(MESSAGE_ALL_TEMPLATE, processedNodesSize);
}
if (totalNumberOfRecordsToProcess > 0 && totalNumberOfRecordsToProcess == processedNodesSize)
{
message = MessageFormat.format(MESSAGE_PARTIAL_TEMPLATE, totalNumberOfRecordsToProcess);
}
model.put(MODEL_STATUS, SUCCESS_STATUS);
model.put(MODEL_MESSAGE, message);
logger.info(message);
if (attach)
{
try
{
String fileName = file.getName();
contentStreamer.streamContent(req, res, file, null, attach, fileName, model);
model = null;
}
finally
{
if (file != null)
{
file.delete();
}
}
}
return model;
}
/**
* Get export parameter from the request
*
* @param req
* @return
*/
protected boolean getExportParameter(WebScriptRequest req)
{
boolean attach = false;
String export = req.getParameter(PARAM_EXPORT);
if (export != null && Boolean.parseBoolean(export))
{
attach = true;
}
return attach;
}
/*
* (non-Javadoc)
* @see org.alfresco.repo.web.scripts.content.StreamContent#execute(org.springframework.extensions.webscripts.
* WebScriptRequest, org.springframework.extensions.webscripts.WebScriptResponse)
*/
@Override
public void execute(WebScriptRequest req, WebScriptResponse res) throws IOException
{
// retrieve requested format
String format = req.getFormat();
try
{
String mimetype = getContainer().getFormatRegistry().getMimeType(req.getAgent(), format);
if (mimetype == null)
{
throw new WebScriptException("Web Script format '" + format + "' is not registered");
}
// construct model for script / template
Status status = new Status();
Cache cache = new Cache(getDescription().getRequiredCache());
Map<String, Object> model = buildModel(req, res);
if (model == null) { return; }
model.put("status", status);
model.put("cache", cache);
Map<String, Object> templateModel = createTemplateParameters(req, res, model);
// render output
int statusCode = status.getCode();
if (statusCode != HttpServletResponse.SC_OK && !req.forceSuccessStatus())
{
if (logger.isDebugEnabled())
{
logger.debug("Force success status header in response: " + req.forceSuccessStatus());
logger.debug("Setting status " + statusCode);
}
res.setStatus(statusCode);
}
// apply location
String location = status.getLocation();
if (location != null && location.length() > 0)
{
if (logger.isDebugEnabled()) logger.debug("Setting location to " + location);
res.setHeader(WebScriptResponse.HEADER_LOCATION, location);
}
// apply cache
res.setCache(cache);
String callback = null;
if (getContainer().allowCallbacks())
{
callback = req.getJSONCallback();
}
if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null)
{
if (logger.isDebugEnabled()) logger.debug("Rendering JSON callback response: content type="
+ Format.JAVASCRIPT.mimetype() + ", status=" + statusCode + ", callback=" + callback);
// NOTE: special case for wrapping JSON results in a javascript function callback
res.setContentType(Format.JAVASCRIPT.mimetype() + ";charset=UTF-8");
res.getWriter().write((callback + "("));
}
else
{
if (logger.isDebugEnabled())
logger.debug("Rendering response: content type=" + mimetype + ", status=" + statusCode);
res.setContentType(mimetype + ";charset=UTF-8");
}
// render response according to requested format
renderFormatTemplate(format, templateModel, res.getWriter());
if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null)
{
// NOTE: special case for wrapping JSON results in a javascript function callback
res.getWriter().write(")");
}
}
catch (Throwable e)
{
if (logger.isDebugEnabled())
{
StringWriter stack = new StringWriter();
e.printStackTrace(new PrintWriter(stack));
logger.debug("Caught exception; decorating with appropriate status template : " + stack.toString());
}
throw createStatusException(e, req, res);
}
}
protected void renderFormatTemplate(String format, Map<String, Object> model, Writer writer)
{
format = (format == null) ? "" : format;
String templatePath = getDescription().getId() + "." + format;
if (logger.isDebugEnabled()) logger.debug("Rendering template '" + templatePath + "'");
renderTemplate(templatePath, model, writer);
}
/**
* Obtain maximum of the records to be processed from the request if it is specified or bachsize value otherwise
*
* @param req
* @return maximum of the records to be processed from the request if it is specified or bachsize value otherwise
*/
protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize)
{
String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS);
}
// construct model for script / template
Status status = new Status();
Cache cache = new Cache(getDescription().getRequiredCache());
Map<String, Object> model = buildModel(req, res);
if (model == null) { return; }
model.put("status", status);
model.put("cache", cache);
Map<String, Object> templateModel = createTemplateParameters(req, res, model);
// render output
int statusCode = status.getCode();
if (statusCode != HttpServletResponse.SC_OK && !req.forceSuccessStatus())
{
if (logger.isDebugEnabled())
{
logger.debug("Force success status header in response: " + req.forceSuccessStatus());
logger.debug("Setting status " + statusCode);
}
res.setStatus(statusCode);
}
// apply location
String location = status.getLocation();
if (location != null && location.length() > 0)
{
if (logger.isDebugEnabled()) logger.debug("Setting location to " + location);
res.setHeader(WebScriptResponse.HEADER_LOCATION, location);
}
// apply cache
res.setCache(cache);
String callback = null;
if (getContainer().allowCallbacks())
{
callback = req.getJSONCallback();
}
if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null)
{
if (logger.isDebugEnabled()) logger.debug("Rendering JSON callback response: content type="
+ Format.JAVASCRIPT.mimetype() + ", status=" + statusCode + ", callback=" + callback);
// NOTE: special case for wrapping JSON results in a javascript function callback
res.setContentType(Format.JAVASCRIPT.mimetype() + ";charset=UTF-8");
res.getWriter().write((callback + "("));
}
else
{
if (logger.isDebugEnabled())
logger.debug("Rendering response: content type=" + mimetype + ", status=" + statusCode);
res.setContentType(mimetype + ";charset=UTF-8");
}
// render response according to requested format
renderFormatTemplate(format, templateModel, res.getWriter());
if (format.equals(WebScriptResponse.JSON_FORMAT) && callback != null)
{
// NOTE: special case for wrapping JSON results in a javascript function callback
res.getWriter().write(")");
}
}
catch (Throwable e)
{
if (logger.isDebugEnabled())
{
StringWriter stack = new StringWriter();
e.printStackTrace(new PrintWriter(stack));
logger.debug("Caught exception; decorating with appropriate status template : " + stack.toString());
}
throw createStatusException(e, req, res);
}
}
protected void renderFormatTemplate(String format, Map<String, Object> model, Writer writer)
{
format = (format == null) ? "" : format;
String templatePath = getDescription().getId() + "." + format;
if (logger.isDebugEnabled()) logger.debug("Rendering template '" + templatePath + "'");
renderTemplate(templatePath, model, writer);
}
/**
* Obtain maximum of the records to be processed from the request if it is specified or bachsize value otherwise
*
* @param req
* @return maximum of the records to be processed from the request if it is specified or bachsize value otherwise
*/
protected Long getMaxToProccessParameter(WebScriptRequest req, final Long batchSize)
{
String totalToBeProcessedRecordsStr = req.getParameter(TOTAL_NUMBER_TO_PROCESS);
//default total number of records to be processed to batch size value
Long totalNumberOfRecordsToProcess = batchSize;
if (StringUtils.isNotBlank(totalToBeProcessedRecordsStr))
@@ -378,77 +378,77 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
//do nothing here, the value will remain 0L in this case
}
}
return totalNumberOfRecordsToProcess;
}
/**
* Obtain batchsize parameter from the request.
*
* @param req
* @return batchsize parameter from the request
*/
protected Long getBatchSizeParameter(WebScriptRequest req)
{
String batchSizeStr = req.getParameter(BATCH_SIZE);
Long size = 0L;
if (StringUtils.isBlank(batchSizeStr))
{
logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_MANDATORY);
}
try
{
size = Long.parseLong(batchSizeStr);
if (size <= 0)
{
logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
}
}
catch (NumberFormatException ex)
{
logger.info(MESSAGE_BATCHSIZE_IS_INVALID);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_INVALID);
}
return size;
}
return totalNumberOfRecordsToProcess;
}
/**
* Get parentNodeRef parameter from the request
*
* @param req
* @return
*/
protected NodeRef getParentNodeRefParameter(WebScriptRequest req)
{
String parentNodeRefStr = req.getParameter(PARAM_PARENT_NODE_REF);
NodeRef parentNodeRef = null;
if (StringUtils.isNotBlank(parentNodeRefStr))
{
parentNodeRef = new NodeRef(parentNodeRefStr);
if(!nodeService.exists(parentNodeRef))
{
String message = MessageFormat.format(MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE, parentNodeRef.toString());
logger.info(message);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, message);
}
}
return parentNodeRef;
}
/**
* Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess
* parameters
*
* @param batchSize
* @param maxNodeId
* @param recordAspectPair
* @param totalNumberOfRecordsToProcess
* @return the list of processed nodes
*/
protected List<NodeRef> processNodes(final Long batchSize, Long maxNodeId, final Pair<Long, QName> recordAspectPair,
Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach)
{
/**
* Obtain batchsize parameter from the request.
*
* @param req
* @return batchsize parameter from the request
*/
protected Long getBatchSizeParameter(WebScriptRequest req)
{
String batchSizeStr = req.getParameter(BATCH_SIZE);
Long size = 0L;
if (StringUtils.isBlank(batchSizeStr))
{
logger.info(MESSAGE_BATCHSIZE_IS_MANDATORY);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_MANDATORY);
}
try
{
size = Long.parseLong(batchSizeStr);
if (size <= 0)
{
logger.info(MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_PARAMETER_BATCHSIZE_GREATER_THAN_ZERO);
}
}
catch (NumberFormatException ex)
{
logger.info(MESSAGE_BATCHSIZE_IS_INVALID);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, MESSAGE_BATCHSIZE_IS_INVALID);
}
return size;
}
/**
* Get parentNodeRef parameter from the request
*
* @param req
* @return
*/
protected NodeRef getParentNodeRefParameter(WebScriptRequest req)
{
String parentNodeRefStr = req.getParameter(PARAM_PARENT_NODE_REF);
NodeRef parentNodeRef = null;
if (StringUtils.isNotBlank(parentNodeRefStr))
{
parentNodeRef = new NodeRef(parentNodeRefStr);
if(!nodeService.exists(parentNodeRef))
{
String message = MessageFormat.format(MESSAGE_NODE_REF_DOES_NOT_EXIST_TEMPLATE, parentNodeRef.toString());
logger.info(message);
throw new WebScriptException(Status.STATUS_BAD_REQUEST, message);
}
}
return parentNodeRef;
}
/**
* Process nodes all nodes or the maximum number of nodes specified by batchsize or totalNumberOfRecordsToProcess
* parameters
*
* @param batchSize
* @param maxNodeId
* @param recordAspectPair
* @param totalNumberOfRecordsToProcess
* @return the list of processed nodes
*/
protected List<NodeRef> processNodes(final Long batchSize, Long maxNodeId, final Pair<Long, QName> recordAspectPair,
Long totalNumberOfRecordsToProcess, final BufferedWriter out, final boolean attach)
{
final Long maxRecordsToProcess = totalNumberOfRecordsToProcess;
final List<NodeRef> processedNodes = new ArrayList<NodeRef>();
logger.info(MESSAGE_PROCESSING_BEGIN);
@@ -466,8 +466,8 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
public Void execute() throws Throwable
{
// get the nodes with the extended security aspect applied
List<Long> nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex,
currentIndex + batchSize);
List<Long> nodeIds = patchDAO.getNodesByAspectQNameId(recordAspectPair.getFirst(), currentIndex,
currentIndex + batchSize);
// process each one
for (Long nodeId : nodeIds)
@@ -482,79 +482,79 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
processNode(record);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName));
processedNodes.add(record);
if (attach)
{
out.write(recordName);
out.write(",");
out.write(record.toString());
out.write("\n");
if (attach)
{
out.write(recordName);
out.write(",");
out.write(record.toString());
out.write("\n");
}
}
}
return null;
}
}, false, // read only
}, false, // read only
true); // requires new
}
logger.info(MESSAGE_PROCESSING_END);
return processedNodes;
return processedNodes;
}
protected List<NodeRef> processChildrenNodes(NodeRef parentNodeRef, final int batchSize,
final Pair<Long, QName> recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out,
final boolean attach)
{
final List<NodeRef> processedNodes = new ArrayList<NodeRef>();
final List<FileInfo> children = fileFolderService.search(parentNodeRef, "*", /*filesSearch*/true, /*folderSearch*/true, /*includeSubfolders*/true);
logger.info(MESSAGE_PROCESSING_BEGIN);
// by batch size
for (int i = 0; i < children.size(); i += batchSize)
{
if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess)
{
break;
}
final int currentIndex = i;
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>()
{
public Void execute() throws Throwable
{
List<FileInfo> nodes = children.subList(currentIndex, Math.min(currentIndex + batchSize, children.size()));
// process each one
for (FileInfo node : nodes)
{
if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess)
{
break;
}
NodeRef record = node.getNodeRef();
if (nodeService.hasAspect(record, recordAspectPair.getSecond()))
{
String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName));
processNode(record);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName));
processedNodes.add(record);
if (attach)
{
out.write(recordName);
out.write(",");
out.write(record.toString());
out.write("\n");
}
}
}
return null;
}
}, false, // read only
true); // requires new
}
logger.info(MESSAGE_PROCESSING_END);
return processedNodes;
}
protected List<NodeRef> processChildrenNodes(NodeRef parentNodeRef, final int batchSize,
final Pair<Long, QName> recordAspectPair, final int maxRecordsToProcess, final BufferedWriter out,
final boolean attach)
{
final List<NodeRef> processedNodes = new ArrayList<NodeRef>();
final List<FileInfo> children = fileFolderService.search(parentNodeRef, "*", /*filesSearch*/true, /*folderSearch*/true, /*includeSubfolders*/true);
logger.info(MESSAGE_PROCESSING_BEGIN);
// by batch size
for (int i = 0; i < children.size(); i += batchSize)
{
if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess)
{
break;
}
final int currentIndex = i;
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>()
{
public Void execute() throws Throwable
{
List<FileInfo> nodes = children.subList(currentIndex, Math.min(currentIndex + batchSize, children.size()));
// process each one
for (FileInfo node : nodes)
{
if (maxRecordsToProcess != 0 && processedNodes.size() >= maxRecordsToProcess)
{
break;
}
NodeRef record = node.getNodeRef();
if (nodeService.hasAspect(record, recordAspectPair.getSecond()))
{
String recordName = (String) nodeService.getProperty(record, ContentModel.PROP_NAME);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_BEGIN_TEMPLATE, recordName));
processNode(record);
logger.info(MessageFormat.format(MESSAGE_PROCESSING_RECORD_END_TEMPLATE, recordName));
processedNodes.add(record);
if (attach)
{
out.write(recordName);
out.write(",");
out.write(record.toString());
out.write("\n");
}
}
}
return null;
}
}, false, // read only
true); // requires new
}
logger.info(MESSAGE_PROCESSING_END);
return processedNodes;
}
/**
* Process each node
*
@@ -575,20 +575,20 @@ public class DynamicAuthoritiesGet extends AbstractWebScript implements RecordsM
permissionService.clearPermission(nodeRef, ExtendedWriterDynamicAuthority.EXTENDED_WRITER);
// if record then ...
if (nodeService.hasAspect(nodeRef, ASPECT_RECORD))
{
Set<String> readersKeySet = null;
if (readers != null)
if (nodeService.hasAspect(nodeRef, ASPECT_RECORD))
{
readersKeySet = readers.keySet();
}
Set<String> writersKeySet = null;
if (writers != null)
{
writersKeySet = writers.keySet();
}
Set<String> readersKeySet = null;
if (readers != null)
{
readersKeySet = readers.keySet();
}
Set<String> writersKeySet = null;
if (writers != null)
{
writersKeySet = writers.keySet();
}
// re-set extended security via API
extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet);
extendedSecurityService.set(nodeRef, readersKeySet, writersKeySet);
}
}
}