* Added new methods to the data set service

* Added/changed the tests
* Added new aspect
* Changed the REST API for getting the list of data sets

git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/modules/recordsmanagement/HEAD@41625 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Tuna Aksoy
2012-09-14 21:25:49 +00:00
parent c6af92d1a5
commit 26737c520f
12 changed files with 303 additions and 26 deletions

View File

@@ -21,20 +21,56 @@ public interface DataSetService
*/
Map<String, DataSet> getDataSets();
/**
* Gets the details of all available data sets for a file plan depending on
* the parameter "excludeLoaded".
*
* @param filePlan the file plan for which the details should be retrieved
* @param excludeLoaded if true only data sets will be retrieved which has
* not been loaded
* @return Map<String, DataSet> details of the available data sets for a
* specified file plan depending on the parameter "excludeLoaded".
* The result could also be an empty map
*/
Map<String, DataSet> getDataSets(NodeRef filePlan, boolean excludeLoaded);
/**
* Gets the details of all loaded data sets for a specified file plan
*
* @param filePlan the file plan for which the loaded data sets should be
* retrieved
* @return Map<String, DataSet> details of all loaded data sets or an empty
* map if there has not been any data sets loaded for the specified
* file plan
*/
Map<String, DataSet> getLoadedDataSets(NodeRef filePlan);
/**
* Loads the data set with the specified id into the specified file plan
*
* @param dataSetId the id of the data set which will be imported
* @param filePlan the file plan which the data set will load into
* @param dataSetId the id of the data set which will be imported
*/
void loadDataSet(String dataSetId, NodeRef filePlan);
void loadDataSet(NodeRef filePlan, String dataSetId);
/**
* Checks if a data set exists with the given data set id
*
*
* @param dataSetId the id of the data set which will be checked
* @return true if the data set exists, false otherwise
*/
boolean existsDataSet(String dataSetId);
/**
* Checks if a data set with the id "dataSetId" has been loaded into the
* specified file plan
*
* @param filePlan the file plan for which the check should be done
* @param dataSetId the id of the data set which should be checked if it has
* been loaded to the file plan
* @return true if the data set with the specified id has been loaded into
* the specified file plan, false otherwise
*/
boolean isLoadedDataSet(NodeRef filePlan, String dataSetId);
}

View File

@@ -4,6 +4,8 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
@@ -30,6 +32,7 @@ import org.alfresco.service.cmr.security.AuthorityType;
import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.cmr.view.ImporterService;
import org.alfresco.service.cmr.view.Location;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.ParameterCheck;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -203,15 +206,38 @@ public class DataSetServiceImpl implements DataSetService, RecordsManagementMode
return this.dataSets;
}
/**
* @see org.alfresco.module.org_alfresco_module_rm.dataset.DataSetService#getDataSets(NodeRef,
* boolean)
*/
@Override
public Map<String, DataSet> getDataSets(NodeRef filePlan, boolean excludeLoaded)
{
ParameterCheck.mandatory("filePlan", filePlan);
ParameterCheck.mandatory("excludeLoaded", excludeLoaded);
// Get the list of all available data sets
Map<String, DataSet> dataSets = new HashMap<String, DataSet>(getDataSets());
// Should the list of unloaded data sets be retrieved
if (excludeLoaded)
{
dataSets.keySet().removeAll(getLoadedDataSets(filePlan).keySet());
}
// Return the (filtered) list of data sets
return dataSets;
}
/**
* @see org.alfresco.module.org_alfresco_module_rm.dataset.DataSetService#loadDataSet(java.lang.String,
* org.alfresco.service.cmr.repository.NodeRef)
*/
@Override
public void loadDataSet(String dataSetId, NodeRef filePlan)
public void loadDataSet(NodeRef filePlan, String dataSetId)
{
ParameterCheck.mandatoryString("dataSetId", dataSetId);
ParameterCheck.mandatory("filePlan", filePlan);
ParameterCheck.mandatoryString("dataSetId", dataSetId);
// Get the data set
DataSet dataSet = getDataSets().get(dataSetId);
@@ -231,9 +257,12 @@ public class DataSetServiceImpl implements DataSetService, RecordsManagementMode
Reader viewReader = new InputStreamReader(is);
Location location = new Location(filePlan);
importerService.importView(viewReader, location, null, null);
// Patch data
patchLoadedData();
// Set the data set id into the file plan's custom aspect
setDataSetIdIntoFilePlan(dataSetId, filePlan);
}
catch (Exception ex)
{
@@ -256,6 +285,63 @@ public class DataSetServiceImpl implements DataSetService, RecordsManagementMode
}
}
/**
* @see org.alfresco.module.org_alfresco_module_rm.dataset.DataSetService#existsDataSet(java.lang.String)
*/
@Override
public boolean existsDataSet(String dataSetId)
{
ParameterCheck.mandatoryString("dataSetId", dataSetId);
return getDataSets().containsKey(dataSetId);
}
/**
* @see org.alfresco.module.org_alfresco_module_rm.dataset.DataSetService#getLoadedDataSets(org.alfresco.service.cmr.repository.NodeRef)
*/
@Override
public Map<String, DataSet> getLoadedDataSets(NodeRef filePlan)
{
ParameterCheck.mandatory("filePlan", filePlan);
// Get the list of available data sets
Map<String, DataSet> availableDataSets = new HashMap<String, DataSet>(getDataSets());
// Get the property value of the aspect
Serializable dataSetIds = nodeService.getProperty(filePlan, PROP_LOADED_DATA_SET_IDS);
// Check if any data has been loaded before
if (dataSetIds != null)
{
// Filter the data sets which have already been loaded
@SuppressWarnings("unchecked")
ArrayList<String> loadedDataSetIds = (ArrayList<String>) dataSetIds;
for (Map.Entry<String, DataSet> entry : availableDataSets.entrySet())
{
String key = entry.getKey();
if (!loadedDataSetIds.contains(key))
{
availableDataSets.remove(key);
}
}
return availableDataSets;
}
return new HashMap<String, DataSet>();
}
/**
* @see org.alfresco.module.org_alfresco_module_rm.dataset.DataSetService#isLoadedDataSet(org.alfresco.service.cmr.repository.NodeRef,
* java.lang.String)
*/
@Override
public boolean isLoadedDataSet(NodeRef filePlan, String dataSetId)
{
ParameterCheck.mandatory("filePlan", filePlan);
ParameterCheck.mandatory("dataSetId", dataSetId);
return getLoadedDataSets(filePlan).containsKey(dataSetId);
}
/**
* Temp method to patch AMP'ed data
*
@@ -386,14 +472,37 @@ public class DataSetServiceImpl implements DataSetService, RecordsManagementMode
}
/**
* @see org.alfresco.module.org_alfresco_module_rm.dataset.DataSetService#existsDataSet(java.lang.String)
* Helper method for setting the id of the imported data set into the file
* plan's aspect
*
* @param dataSetId The id of the imported data set
* @param filePlan The file plan into which the data set has been imported
*/
@Override
public boolean existsDataSet(String dataSetId)
@SuppressWarnings("unchecked")
private void setDataSetIdIntoFilePlan(String dataSetId, NodeRef filePlan)
{
ParameterCheck.mandatoryString("dataSetId", dataSetId);
ArrayList<String> loadedDataSetIds;
Serializable dataSetIds = nodeService.getProperty(filePlan, PROP_LOADED_DATA_SET_IDS);
return getDataSets().containsKey(dataSetId);
// Check if any data set has been imported
if (dataSetIds == null)
{
Map<QName, Serializable> aspectProperties = new HashMap<QName, Serializable>(1);
aspectProperties.put(PROP_LOADED_DATA_SET_IDS, (Serializable) new ArrayList<String>());
nodeService.addAspect(filePlan, ASPECT_LOADED_DATA_SET_ID, aspectProperties);
loadedDataSetIds = (ArrayList<String>) nodeService.getProperty(filePlan,
PROP_LOADED_DATA_SET_IDS);
}
else
{
loadedDataSetIds = (ArrayList<String>) dataSetIds;
}
// Add the new loaded data set id
loadedDataSetIds.add(dataSetId);
Map<QName, Serializable> aspectProperties = new HashMap<QName, Serializable>(1);
aspectProperties.put(PROP_LOADED_DATA_SET_IDS, (Serializable) loadedDataSetIds);
nodeService.addAspect(filePlan, ASPECT_LOADED_DATA_SET_ID, aspectProperties);
}
}

View File

@@ -219,4 +219,8 @@ public interface RecordsManagementModel extends RecordsManagementCustomModel
public static final QName PROP_RS_DISPOITION_INSTRUCTIONS = QName.createQName(RM_URI, "recordSearchDispositionInstructions");
public static final QName PROP_RS_DISPOITION_AUTHORITY = QName.createQName(RM_URI, "recordSearchDispositionAuthority");
public static final QName PROP_RS_HOLD_REASON = QName.createQName(RM_URI, "recordSearchHoldReason");
// Loaded Data Set Ids
public static final QName ASPECT_LOADED_DATA_SET_ID = QName.createQName(RM_URI, "loadedDataSetId");
public static final QName PROP_LOADED_DATA_SET_IDS = QName.createQName(RM_URI, "loadedDataSetIds");
}

View File

@@ -92,7 +92,7 @@ public class DataSetPost extends DeclarativeWebScript implements RecordsManageme
}
// Load data set in to the file plan
dataSetService.loadDataSet(dataSetId, filePlan);
dataSetService.loadDataSet(filePlan, dataSetId);
Map<String, Object> model = new HashMap<String, Object>(1, 1.0f);
model.put("success", true);

View File

@@ -7,6 +7,10 @@ import java.util.Map;
import org.alfresco.module.org_alfresco_module_rm.dataset.DataSet;
import org.alfresco.module.org_alfresco_module_rm.dataset.DataSetService;
import org.alfresco.module.org_alfresco_module_rm.model.RmSiteType;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.site.SiteService;
import org.apache.commons.lang.StringUtils;
import org.springframework.extensions.webscripts.Cache;
import org.springframework.extensions.webscripts.DeclarativeWebScript;
import org.springframework.extensions.webscripts.Status;
@@ -18,6 +22,9 @@ public class DataSetsGet extends DeclarativeWebScript
/** Data set service */
private DataSetService dataSetService;
/** Site service */
private SiteService siteService;
/**
* Set data set service
*
@@ -28,6 +35,16 @@ public class DataSetsGet extends DeclarativeWebScript
this.dataSetService = dataSetService;
}
/**
* Set site service
*
* @param siteService the site service
*/
public void setSiteService(SiteService siteService)
{
this.siteService = siteService;
}
/**
* @see org.springframework.extensions.webscripts.DeclarativeWebScript#executeImpl(org.springframework.extensions.webscripts.WebScriptRequest,
* org.springframework.extensions.webscripts.Status,
@@ -36,17 +53,36 @@ public class DataSetsGet extends DeclarativeWebScript
@Override
protected Map<String, Object> executeImpl(WebScriptRequest req, Status status, Cache cache)
{
Map<String, DataSet> dataSets = dataSetService.getDataSets();
// Get the site name from the URL and find out the file plan
String siteName = req.getParameter("site");
NodeRef filePlan = siteService.getContainer(siteName, RmSiteType.COMPONENT_DOCUMENT_LIBRARY);
// Check if only unloaded data sets should be returned - default value is false
String unloadedOnlyParam = req.getParameter("unloadedonly");
boolean unloadedOnly = false;
if (StringUtils.isNotBlank(unloadedOnlyParam))
{
unloadedOnly = new Boolean(unloadedOnlyParam).booleanValue();
}
// Get the loaded/unloaded data sets depending on the "unloadedOnly" parameter
Map<String, DataSet> dataSets = dataSetService.getDataSets(filePlan, unloadedOnly);
List<Map<String, String>> dataSetList = new ArrayList<Map<String, String>>(dataSets.size());
for (Map.Entry<String, DataSet> entry : dataSets.entrySet())
{
Map<String, String> dataSet = new HashMap<String, String>(2);
Map<String, String> dataSet = new HashMap<String, String>(3);
DataSet value = entry.getValue();
dataSet.put("label", value.getLabel());
dataSet.put("id", value.getId());
// Data set details
String dataSetId = value.getId();
Boolean isLoaded = new Boolean(dataSetService.isLoadedDataSet(filePlan, dataSetId));
dataSet.put("label", value.getLabel());
dataSet.put("id", dataSetId);
dataSet.put("isLoaded", isLoaded.toString());
// Add data set to the list
dataSetList.add(dataSet);
}