mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-08-07 17:49:17 +00:00
Merged HEAD-BUG-FIX (4.3/Cloud) to HEAD (4.3/Cloud)
57039: Merged V4.2-BUG-FIX (4.2.1) to HEAD-BUG-FIX (Cloud/4.3) 56506: Merged HEAD-BUG-FIX to V4.2-BUG-FIX (4.2.1) 55929: <<NOT IN 4.1.6>> Merged V4.1-BUG-FIX (4.1.7) to HEAD-BUG-FIX (4.2) 55812: MNT-9076: Penultimate version cannot be accessed from Share when uploading using bulkimport Cleaned up the source files (the tabs and EOL chars). git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@61673 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
@@ -55,7 +55,7 @@ import org.apache.commons.logging.LogFactory;
|
|||||||
* Abstract base class for the node importer, containing helper methods for use by subclasses.
|
* Abstract base class for the node importer, containing helper methods for use by subclasses.
|
||||||
*
|
*
|
||||||
* @since 4.0
|
* @since 4.0
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public abstract class AbstractNodeImporter implements NodeImporter
|
public abstract class AbstractNodeImporter implements NodeImporter
|
||||||
{
|
{
|
||||||
@@ -69,39 +69,39 @@ public abstract class AbstractNodeImporter implements NodeImporter
|
|||||||
protected BehaviourFilter behaviourFilter;
|
protected BehaviourFilter behaviourFilter;
|
||||||
|
|
||||||
public void setVersionService(VersionService versionService)
|
public void setVersionService(VersionService versionService)
|
||||||
{
|
{
|
||||||
this.versionService = versionService;
|
this.versionService = versionService;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setFileFolderService(FileFolderService fileFolderService)
|
public void setFileFolderService(FileFolderService fileFolderService)
|
||||||
{
|
{
|
||||||
this.fileFolderService = fileFolderService;
|
this.fileFolderService = fileFolderService;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setNodeService(NodeService nodeService)
|
public void setNodeService(NodeService nodeService)
|
||||||
{
|
{
|
||||||
this.nodeService = nodeService;
|
this.nodeService = nodeService;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setMetadataLoader(MetadataLoader metadataLoader)
|
public void setMetadataLoader(MetadataLoader metadataLoader)
|
||||||
{
|
{
|
||||||
this.metadataLoader = metadataLoader;
|
this.metadataLoader = metadataLoader;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setImportStatus(BulkImportStatusImpl importStatus)
|
public void setImportStatus(BulkImportStatusImpl importStatus)
|
||||||
{
|
{
|
||||||
this.importStatus = importStatus;
|
this.importStatus = importStatus;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setBehaviourFilter(BehaviourFilter behaviourFilter)
|
public void setBehaviourFilter(BehaviourFilter behaviourFilter)
|
||||||
{
|
{
|
||||||
this.behaviourFilter = behaviourFilter;
|
this.behaviourFilter = behaviourFilter;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract NodeRef importImportableItemImpl(ImportableItem importableItem, boolean replaceExisting);
|
protected abstract NodeRef importImportableItemImpl(ImportableItem importableItem, boolean replaceExisting);
|
||||||
protected abstract void importContentAndMetadata(NodeRef nodeRef, ImportableItem.ContentAndMetadata contentAndMetadata, MetadataLoader.Metadata metadata);
|
protected abstract void importContentAndMetadata(NodeRef nodeRef, ImportableItem.ContentAndMetadata contentAndMetadata, MetadataLoader.Metadata metadata);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Because commons-lang ToStringBuilder doesn't seem to like unmodifiable Maps
|
* Because commons-lang ToStringBuilder doesn't seem to like unmodifiable Maps
|
||||||
*/
|
*/
|
||||||
protected final String mapToString(Map<?, ?> map)
|
protected final String mapToString(Map<?, ?> map)
|
||||||
@@ -121,27 +121,27 @@ public abstract class AbstractNodeImporter implements NodeImporter
|
|||||||
result.append(String.valueOf(map.get(key)));
|
result.append(String.valueOf(map.get(key)));
|
||||||
result.append(",\n");
|
result.append(",\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete final dangling ", " value
|
// Delete final dangling ", " value
|
||||||
result.delete(result.length() - 2, result.length());
|
result.delete(result.length() - 2, result.length());
|
||||||
}
|
}
|
||||||
|
|
||||||
result.append(']');
|
result.append(']');
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
result.append("(null)");
|
result.append("(null)");
|
||||||
}
|
}
|
||||||
|
|
||||||
return(result.toString());
|
return(result.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the name of the given importable item. This is the final name of the item, as it would appear in the repository,
|
* Returns the name of the given importable item. This is the final name of the item, as it would appear in the repository,
|
||||||
* after metadata renames are taken into account.
|
* after metadata renames are taken into account.
|
||||||
*
|
*
|
||||||
* @param importableItem The importableItem with which to
|
* @param importableItem The importableItem with which to
|
||||||
* @param metadata
|
* @param metadata
|
||||||
* @return the name of the given importable item
|
* @return the name of the given importable item
|
||||||
*/
|
*/
|
||||||
protected final String getImportableItemName(ImportableItem importableItem, MetadataLoader.Metadata metadata)
|
protected final String getImportableItemName(ImportableItem importableItem, MetadataLoader.Metadata metadata)
|
||||||
@@ -153,7 +153,7 @@ public abstract class AbstractNodeImporter implements NodeImporter
|
|||||||
{
|
{
|
||||||
result = (String)metadata.getProperties().get(ContentModel.PROP_NAME);
|
result = (String)metadata.getProperties().get(ContentModel.PROP_NAME);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 2: attempt to get name from metadata file
|
// Step 2: attempt to get name from metadata file
|
||||||
if (result == null &&
|
if (result == null &&
|
||||||
importableItem != null &&
|
importableItem != null &&
|
||||||
@@ -169,36 +169,36 @@ public abstract class AbstractNodeImporter implements NodeImporter
|
|||||||
(MetadataLoader.METADATA_SUFFIX.length() + metadataLoader.getMetadataFileExtension().length()));
|
(MetadataLoader.METADATA_SUFFIX.length() + metadataLoader.getMetadataFileExtension().length()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return(result);
|
return(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected final int importImportableItemFile(NodeRef nodeRef, ImportableItem importableItem, MetadataLoader.Metadata metadata)
|
protected final int importImportableItemFile(NodeRef nodeRef, ImportableItem importableItem, MetadataLoader.Metadata metadata)
|
||||||
{
|
{
|
||||||
int result = 0;
|
int result = 0;
|
||||||
|
|
||||||
if (importableItem.hasVersionEntries())
|
if (importableItem.hasVersionEntries())
|
||||||
{
|
{
|
||||||
// If cm:versionable isn't listed as one of the aspects for this node, add it - cm:versionable is required for nodes that have versions
|
// If cm:versionable isn't listed as one of the aspects for this node, add it - cm:versionable is required for nodes that have versions
|
||||||
if (!metadata.getAspects().contains(ContentModel.ASPECT_VERSIONABLE))
|
if (!metadata.getAspects().contains(ContentModel.ASPECT_VERSIONABLE))
|
||||||
{
|
{
|
||||||
if (logger.isWarnEnabled()) logger.warn("Metadata for file '" + getFileName(importableItem.getHeadRevision().getContentFile()) + "' was missing the cm:versionable aspect, yet it has " + importableItem.getVersionEntries().size() + " versions. Adding cm:versionable.");
|
if (logger.isWarnEnabled()) logger.warn("Metadata for file '" + getFileName(importableItem.getHeadRevision().getContentFile()) + "' was missing the cm:versionable aspect, yet it has " + importableItem.getVersionEntries().size() + " versions. Adding cm:versionable.");
|
||||||
metadata.addAspect(ContentModel.ASPECT_VERSIONABLE);
|
metadata.addAspect(ContentModel.ASPECT_VERSIONABLE);
|
||||||
}
|
}
|
||||||
|
|
||||||
result = importContentVersions(nodeRef, importableItem);
|
result = importContentVersions(nodeRef, importableItem);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
importContentAndMetadata(nodeRef, importableItem.getHeadRevision(), metadata);
|
importContentAndMetadata(nodeRef, importableItem.getHeadRevision(), metadata);
|
||||||
}
|
}
|
||||||
|
|
||||||
return(result);
|
return(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected final int importContentVersions(NodeRef nodeRef, ImportableItem importableItem)
|
protected final int importContentVersions(NodeRef nodeRef, ImportableItem importableItem)
|
||||||
{
|
{
|
||||||
int result = 0;
|
int result = 0;
|
||||||
Map<String, Serializable> versionProperties = new HashMap<String, Serializable>();
|
Map<String, Serializable> versionProperties = new HashMap<String, Serializable>();
|
||||||
// Note: PROP_VERSION_LABEL is a "reserved" property, and cannot be modified by custom code.
|
// Note: PROP_VERSION_LABEL is a "reserved" property, and cannot be modified by custom code.
|
||||||
// In other words, we can't use the version label on disk as the version label in Alfresco. :-(
|
// In other words, we can't use the version label on disk as the version label in Alfresco. :-(
|
||||||
@@ -206,16 +206,16 @@ public abstract class AbstractNodeImporter implements NodeImporter
|
|||||||
//versionProperties.put(ContentModel.PROP_VERSION_LABEL.toPrefixString(), String.valueOf(versionEntry.getVersion()));
|
//versionProperties.put(ContentModel.PROP_VERSION_LABEL.toPrefixString(), String.valueOf(versionEntry.getVersion()));
|
||||||
versionProperties.put(VersionModel.PROP_VERSION_TYPE, VersionType.MAJOR); // Load every version as a major version for now - see http://code.google.com/p/alfresco-bulk-filesystem-import/issues/detail?id=84
|
versionProperties.put(VersionModel.PROP_VERSION_TYPE, VersionType.MAJOR); // Load every version as a major version for now - see http://code.google.com/p/alfresco-bulk-filesystem-import/issues/detail?id=84
|
||||||
|
|
||||||
for (final ImportableItem.VersionedContentAndMetadata versionEntry : importableItem.getVersionEntries())
|
for (final ImportableItem.VersionedContentAndMetadata versionEntry : importableItem.getVersionEntries())
|
||||||
{
|
{
|
||||||
MetadataLoader.Metadata metadata = loadMetadata(versionEntry);
|
MetadataLoader.Metadata metadata = loadMetadata(versionEntry);
|
||||||
importContentAndMetadata(nodeRef, versionEntry, metadata);
|
importContentAndMetadata(nodeRef, versionEntry, metadata);
|
||||||
|
|
||||||
if (logger.isDebugEnabled()) logger.debug("Creating v" + String.valueOf(versionEntry.getVersion()) + " of node '" + nodeRef.toString() + "' (note: version label in Alfresco will not be the same - it is not currently possible to explicitly force a particular version label).");
|
if (logger.isDebugEnabled()) logger.debug("Creating v" + String.valueOf(versionEntry.getVersion()) + " of node '" + nodeRef.toString() + "' (note: version label in Alfresco will not be the same - it is not currently possible to explicitly force a particular version label).");
|
||||||
|
|
||||||
versionService.createVersion(nodeRef, versionProperties);
|
versionService.createVersion(nodeRef, versionProperties);
|
||||||
result += metadata.getProperties().size() + 4; // Add 4 for "standard" metadata properties read from filesystem
|
result += metadata.getProperties().size() + 4; // Add 4 for "standard" metadata properties read from filesystem
|
||||||
}
|
}
|
||||||
|
|
||||||
if (logger.isDebugEnabled()) logger.debug("Creating head revision of node " + nodeRef.toString());
|
if (logger.isDebugEnabled()) logger.debug("Creating head revision of node " + nodeRef.toString());
|
||||||
ImportableItem.ContentAndMetadata contentAndMetadata = importableItem.getHeadRevision();
|
ImportableItem.ContentAndMetadata contentAndMetadata = importableItem.getHeadRevision();
|
||||||
@@ -223,163 +223,163 @@ public abstract class AbstractNodeImporter implements NodeImporter
|
|||||||
importContentAndMetadata(nodeRef, importableItem.getHeadRevision(), metadata);
|
importContentAndMetadata(nodeRef, importableItem.getHeadRevision(), metadata);
|
||||||
versionService.createVersion(nodeRef, versionProperties);
|
versionService.createVersion(nodeRef, versionProperties);
|
||||||
|
|
||||||
return(result);
|
return(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected final Triple<NodeRef, Boolean, NodeState> createOrFindNode(NodeRef target, ImportableItem importableItem,
|
protected final Triple<NodeRef, Boolean, NodeState> createOrFindNode(NodeRef target, ImportableItem importableItem,
|
||||||
boolean replaceExisting, MetadataLoader.Metadata metadata)
|
boolean replaceExisting, MetadataLoader.Metadata metadata)
|
||||||
{
|
{
|
||||||
Triple<NodeRef, Boolean, NodeState> result = null;
|
Triple<NodeRef, Boolean, NodeState> result = null;
|
||||||
boolean isDirectory = false;
|
boolean isDirectory = false;
|
||||||
NodeState nodeState = replaceExisting ? NodeState.REPLACED : NodeState.SKIPPED;
|
NodeState nodeState = replaceExisting ? NodeState.REPLACED : NodeState.SKIPPED;
|
||||||
String nodeName = getImportableItemName(importableItem, metadata);
|
String nodeName = getImportableItemName(importableItem, metadata);
|
||||||
NodeRef nodeRef = null;
|
NodeRef nodeRef = null;
|
||||||
|
|
||||||
//####TODO: handle this more elegantly
|
//####TODO: handle this more elegantly
|
||||||
if (nodeName == null)
|
if (nodeName == null)
|
||||||
{
|
{
|
||||||
throw new IllegalStateException("Unable to determine node name for " + String.valueOf(importableItem));
|
throw new IllegalStateException("Unable to determine node name for " + String.valueOf(importableItem));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (logger.isDebugEnabled())
|
if (logger.isDebugEnabled())
|
||||||
{
|
{
|
||||||
logger.debug("Searching for node with name '" + nodeName + "' within node '" + target.toString() + "'.");
|
logger.debug("Searching for node with name '" + nodeName + "' within node '" + target.toString() + "'.");
|
||||||
}
|
}
|
||||||
|
|
||||||
nodeRef = fileFolderService.searchSimple(target, nodeName);
|
nodeRef = fileFolderService.searchSimple(target, nodeName);
|
||||||
|
|
||||||
// If we didn't find an existing item, create a new node in the repo.
|
// If we didn't find an existing item, create a new node in the repo.
|
||||||
if (nodeRef == null)
|
if (nodeRef == null)
|
||||||
{
|
{
|
||||||
// But only if the content file exists - we don't create new nodes based on metadata-only importableItems
|
// But only if the content file exists - we don't create new nodes based on metadata-only importableItems
|
||||||
if (importableItem.getHeadRevision().contentFileExists())
|
if (importableItem.getHeadRevision().contentFileExists())
|
||||||
{
|
{
|
||||||
isDirectory = ImportableItem.FileType.DIRECTORY.equals(importableItem.getHeadRevision().getContentFileType());
|
isDirectory = ImportableItem.FileType.DIRECTORY.equals(importableItem.getHeadRevision().getContentFileType());
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
if (logger.isDebugEnabled()) logger.debug("Creating new node of type '" + metadata.getType().toString() + "' with name '" + nodeName + "' within node '" + target.toString() + "'.");
|
if (logger.isDebugEnabled()) logger.debug("Creating new node of type '" + metadata.getType().toString() + "' with name '" + nodeName + "' within node '" + target.toString() + "'.");
|
||||||
nodeRef = fileFolderService.create(target, nodeName, metadata.getType()).getNodeRef();
|
nodeRef = fileFolderService.create(target, nodeName, metadata.getType()).getNodeRef();
|
||||||
nodeState = NodeState.CREATED;
|
nodeState = NodeState.CREATED;
|
||||||
}
|
}
|
||||||
catch (final FileExistsException fee)
|
catch (final FileExistsException fee)
|
||||||
{
|
{
|
||||||
if (logger.isWarnEnabled()) logger.warn("Node with name '" + nodeName + "' within node '" + target.toString() + "' was created concurrently to the bulk import. Skipping importing it.", fee);
|
if (logger.isWarnEnabled()) logger.warn("Node with name '" + nodeName + "' within node '" + target.toString() + "' was created concurrently to the bulk import. Skipping importing it.", fee);
|
||||||
nodeRef = null;
|
nodeRef = null;
|
||||||
nodeState = NodeState.SKIPPED;
|
nodeState = NodeState.SKIPPED;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
if (logger.isDebugEnabled()) logger.debug("Skipping creation of new node '" + nodeName + "' within node '" + target.toString() + "' since it doesn't have a content file.");
|
if (logger.isDebugEnabled()) logger.debug("Skipping creation of new node '" + nodeName + "' within node '" + target.toString() + "' since it doesn't have a content file.");
|
||||||
nodeRef = null;
|
nodeRef = null;
|
||||||
nodeState = NodeState.SKIPPED;
|
nodeState = NodeState.SKIPPED;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// We found the node in the repository. Make sure we return the NodeRef, so that recursive loading works (we need the NodeRef of all sub-spaces, even if we didn't create them).
|
// We found the node in the repository. Make sure we return the NodeRef, so that recursive loading works (we need the NodeRef of all sub-spaces, even if we didn't create them).
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
if (replaceExisting)
|
if (replaceExisting)
|
||||||
{
|
{
|
||||||
boolean targetNodeIsSpace = fileFolderService.getFileInfo(nodeRef).isFolder();
|
boolean targetNodeIsSpace = fileFolderService.getFileInfo(nodeRef).isFolder();
|
||||||
|
|
||||||
if (importableItem.getHeadRevision().contentFileExists())
|
if (importableItem.getHeadRevision().contentFileExists())
|
||||||
{
|
{
|
||||||
// If the source file exists, ensure that the target node is of the same type (i.e. file or folder) as it.
|
// If the source file exists, ensure that the target node is of the same type (i.e. file or folder) as it.
|
||||||
isDirectory = ImportableItem.FileType.DIRECTORY.equals(importableItem.getHeadRevision().getContentFileType());
|
isDirectory = ImportableItem.FileType.DIRECTORY.equals(importableItem.getHeadRevision().getContentFileType());
|
||||||
|
|
||||||
if (isDirectory != targetNodeIsSpace)
|
if (isDirectory != targetNodeIsSpace)
|
||||||
{
|
{
|
||||||
if (logger.isWarnEnabled()) logger.warn("Skipping replacement of " + (isDirectory ? "Directory " : "File ") +
|
if (logger.isWarnEnabled()) logger.warn("Skipping replacement of " + (isDirectory ? "Directory " : "File ") +
|
||||||
"'" + getFileName(importableItem.getHeadRevision().getContentFile()) + "'. " +
|
"'" + getFileName(importableItem.getHeadRevision().getContentFile()) + "'. " +
|
||||||
"The target node in the repository is a " + (targetNodeIsSpace ? "space node" : "content node") + ".");
|
"The target node in the repository is a " + (targetNodeIsSpace ? "space node" : "content node") + ".");
|
||||||
nodeState = NodeState.SKIPPED;
|
nodeState = NodeState.SKIPPED;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
isDirectory = targetNodeIsSpace;
|
isDirectory = targetNodeIsSpace;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (nodeRef != null)
|
if (nodeRef != null)
|
||||||
{
|
{
|
||||||
if (metadata.getType() != null)
|
if (metadata.getType() != null)
|
||||||
{
|
{
|
||||||
// Finally, specialise the type.
|
// Finally, specialise the type.
|
||||||
if (logger.isDebugEnabled()) logger.debug("Specialising type of node '" + nodeRef.toString() + "' to '" + String.valueOf(metadata.getType()) + "'.");
|
if (logger.isDebugEnabled()) logger.debug("Specialising type of node '" + nodeRef.toString() + "' to '" + String.valueOf(metadata.getType()) + "'.");
|
||||||
nodeService.setType(nodeRef, metadata.getType());
|
nodeService.setType(nodeRef, metadata.getType());
|
||||||
}
|
}
|
||||||
|
|
||||||
nodeState = NodeState.REPLACED;
|
nodeState = NodeState.REPLACED;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
if (logger.isDebugEnabled()) logger.debug("Found content node '" + nodeRef.toString() + "', but replaceExisting=false, so skipping it.");
|
if (logger.isDebugEnabled()) logger.debug("Found content node '" + nodeRef.toString() + "', but replaceExisting=false, so skipping it.");
|
||||||
nodeState = NodeState.SKIPPED;
|
nodeState = NodeState.SKIPPED;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
result = new Triple<NodeRef, Boolean, NodeState>(nodeRef, isDirectory, nodeState);
|
result = new Triple<NodeRef, Boolean, NodeState>(nodeRef, isDirectory, nodeState);
|
||||||
|
|
||||||
return(result);
|
return(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected String getFileName(File file)
|
protected String getFileName(File file)
|
||||||
{
|
{
|
||||||
return FileUtils.getFileName(file);
|
return FileUtils.getFileName(file);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected final void importImportableItemMetadata(NodeRef nodeRef, File parentFile, MetadataLoader.Metadata metadata)
|
protected final void importImportableItemMetadata(NodeRef nodeRef, File parentFile, MetadataLoader.Metadata metadata)
|
||||||
{
|
{
|
||||||
// Attach aspects
|
// Attach aspects
|
||||||
if (metadata.getAspects() != null)
|
if (metadata.getAspects() != null)
|
||||||
{
|
{
|
||||||
for (final QName aspect : metadata.getAspects())
|
for (final QName aspect : metadata.getAspects())
|
||||||
{
|
{
|
||||||
if (logger.isDebugEnabled()) logger.debug("Attaching aspect '" + aspect.toString() + "' to node '" + nodeRef.toString() + "'.");
|
if (logger.isDebugEnabled()) logger.debug("Attaching aspect '" + aspect.toString() + "' to node '" + nodeRef.toString() + "'.");
|
||||||
|
|
||||||
nodeService.addAspect(nodeRef, aspect, null); // Note: we set the aspect's properties separately, hence null for the third parameter
|
nodeService.addAspect(nodeRef, aspect, null); // Note: we set the aspect's properties separately, hence null for the third parameter
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set property values for both the type and any aspect(s)
|
// Set property values for both the type and any aspect(s)
|
||||||
if (metadata.getProperties() != null)
|
if (metadata.getProperties() != null)
|
||||||
{
|
{
|
||||||
if (logger.isDebugEnabled()) logger.debug("Adding properties to node '" + nodeRef.toString() + "':\n" + mapToString(metadata.getProperties()));
|
if (logger.isDebugEnabled()) logger.debug("Adding properties to node '" + nodeRef.toString() + "':\n" + mapToString(metadata.getProperties()));
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
nodeService.addProperties(nodeRef, metadata.getProperties());
|
nodeService.addProperties(nodeRef, metadata.getProperties());
|
||||||
}
|
}
|
||||||
catch (final InvalidNodeRefException inre)
|
catch (final InvalidNodeRefException inre)
|
||||||
{
|
{
|
||||||
if (!nodeRef.equals(inre.getNodeRef()))
|
if (!nodeRef.equals(inre.getNodeRef()))
|
||||||
{
|
{
|
||||||
// Caused by an invalid NodeRef in the metadata (e.g. in an association)
|
// Caused by an invalid NodeRef in the metadata (e.g. in an association)
|
||||||
throw new IllegalStateException("Invalid nodeRef found in metadata for '" + getFileName(parentFile) + "'. " +
|
throw new IllegalStateException("Invalid nodeRef found in metadata for '" + getFileName(parentFile) + "'. " +
|
||||||
"Probable cause: an association is being populated via metadata, but the " +
|
"Probable cause: an association is being populated via metadata, but the " +
|
||||||
"NodeRef for the target of that association ('" + inre.getNodeRef() + "') is invalid. " +
|
"NodeRef for the target of that association ('" + inre.getNodeRef() + "') is invalid. " +
|
||||||
"Please double check your metadata file and try again.", inre);
|
"Please double check your metadata file and try again.", inre);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
// Logic bug in the BFSIT. :-(
|
// Logic bug in the BFSIT. :-(
|
||||||
throw inre;
|
throw inre;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected final void importImportableItemDirectory(NodeRef nodeRef, ImportableItem importableItem, MetadataLoader.Metadata metadata)
|
protected final void importImportableItemDirectory(NodeRef nodeRef, ImportableItem importableItem, MetadataLoader.Metadata metadata)
|
||||||
{
|
{
|
||||||
if (importableItem.hasVersionEntries())
|
if (importableItem.hasVersionEntries())
|
||||||
{
|
{
|
||||||
logger.warn("Skipping versions for directory '" + getFileName(importableItem.getHeadRevision().getContentFile()) + "' - Alfresco does not support versioned spaces.");
|
logger.warn("Skipping versions for directory '" + getFileName(importableItem.getHeadRevision().getContentFile()) + "' - Alfresco does not support versioned spaces.");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Attach aspects and set all properties
|
// Attach aspects and set all properties
|
||||||
importImportableItemMetadata(nodeRef, importableItem.getHeadRevision().getContentFile(), metadata);
|
importImportableItemMetadata(nodeRef, importableItem.getHeadRevision().getContentFile(), metadata);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected final MetadataLoader.Metadata loadMetadata(ImportableItem.ContentAndMetadata contentAndMetadata)
|
protected final MetadataLoader.Metadata loadMetadata(ImportableItem.ContentAndMetadata contentAndMetadata)
|
||||||
@@ -392,19 +392,19 @@ public abstract class AbstractNodeImporter implements NodeImporter
|
|||||||
final String filename = contentAndMetadata.getContentFile().getName().trim().replaceFirst(DirectoryAnalyser.VERSION_SUFFIX_REGEX, ""); // Strip off the version suffix (if any)
|
final String filename = contentAndMetadata.getContentFile().getName().trim().replaceFirst(DirectoryAnalyser.VERSION_SUFFIX_REGEX, ""); // Strip off the version suffix (if any)
|
||||||
final Date modified = new Date(contentAndMetadata.getContentFile().lastModified());
|
final Date modified = new Date(contentAndMetadata.getContentFile().lastModified());
|
||||||
final Date created = modified; //TODO: determine proper file creation time (awaiting JDK 1.7 NIO2 library)
|
final Date created = modified; //TODO: determine proper file creation time (awaiting JDK 1.7 NIO2 library)
|
||||||
|
|
||||||
result.setType(ImportableItem.FileType.FILE.equals(contentAndMetadata.getContentFileType()) ? ContentModel.TYPE_CONTENT : ContentModel.TYPE_FOLDER);
|
result.setType(ImportableItem.FileType.FILE.equals(contentAndMetadata.getContentFileType()) ? ContentModel.TYPE_CONTENT : ContentModel.TYPE_FOLDER);
|
||||||
result.addProperty(ContentModel.PROP_NAME, filename);
|
result.addProperty(ContentModel.PROP_NAME, filename);
|
||||||
result.addProperty(ContentModel.PROP_TITLE, filename);
|
result.addProperty(ContentModel.PROP_TITLE, filename);
|
||||||
result.addProperty(ContentModel.PROP_CREATED, created);
|
result.addProperty(ContentModel.PROP_CREATED, created);
|
||||||
result.addProperty(ContentModel.PROP_MODIFIED, modified);
|
result.addProperty(ContentModel.PROP_MODIFIED, modified);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (metadataLoader != null)
|
if (metadataLoader != null)
|
||||||
{
|
{
|
||||||
metadataLoader.loadMetadata(contentAndMetadata, result);
|
metadataLoader.loadMetadata(contentAndMetadata, result);
|
||||||
}
|
}
|
||||||
|
|
||||||
return(result);
|
return(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -412,7 +412,7 @@ public abstract class AbstractNodeImporter implements NodeImporter
|
|||||||
{
|
{
|
||||||
if(logger.isDebugEnabled())
|
if(logger.isDebugEnabled())
|
||||||
{
|
{
|
||||||
logger.debug("Importing " + String.valueOf(importableItem));
|
logger.debug("Importing " + String.valueOf(importableItem));
|
||||||
}
|
}
|
||||||
|
|
||||||
NodeRef nodeRef = importImportableItemImpl(importableItem, replaceExisting);
|
NodeRef nodeRef = importImportableItemImpl(importableItem, replaceExisting);
|
||||||
@@ -421,26 +421,26 @@ public abstract class AbstractNodeImporter implements NodeImporter
|
|||||||
//importableItem.setParent(null);
|
//importableItem.setParent(null);
|
||||||
// importableItem.clearParent();
|
// importableItem.clearParent();
|
||||||
|
|
||||||
importableItem.setNodeRef(nodeRef);
|
importableItem.setNodeRef(nodeRef);
|
||||||
|
|
||||||
return nodeRef;
|
return nodeRef;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void skipImportableDirectory(ImportableItem importableItem)
|
protected void skipImportableDirectory(ImportableItem importableItem)
|
||||||
{
|
{
|
||||||
if (logger.isInfoEnabled())
|
if (logger.isInfoEnabled())
|
||||||
{
|
{
|
||||||
logger.info("Skipping '" + getFileName(importableItem.getHeadRevision().getContentFile()) + "' as it already exists in the repository and 'replace existing' is false.");
|
logger.info("Skipping '" + getFileName(importableItem.getHeadRevision().getContentFile()) + "' as it already exists in the repository and 'replace existing' is false.");
|
||||||
}
|
}
|
||||||
importStatus.incrementImportableItemsSkipped(importableItem, true);
|
importStatus.incrementImportableItemsSkipped(importableItem, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void skipImportableFile(ImportableItem importableItem)
|
protected void skipImportableFile(ImportableItem importableItem)
|
||||||
{
|
{
|
||||||
if (logger.isInfoEnabled())
|
if (logger.isInfoEnabled())
|
||||||
{
|
{
|
||||||
logger.info("Skipping '" + getFileName(importableItem.getHeadRevision().getContentFile()) + "' as it already exists in the repository and 'replace existing' is false.");
|
logger.info("Skipping '" + getFileName(importableItem.getHeadRevision().getContentFile()) + "' as it already exists in the repository and 'replace existing' is false.");
|
||||||
}
|
}
|
||||||
importStatus.incrementImportableItemsSkipped(importableItem, false);
|
importStatus.incrementImportableItemsSkipped(importableItem, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -66,214 +66,214 @@ import org.springframework.context.ApplicationContext;
|
|||||||
*/
|
*/
|
||||||
public class AbstractBulkImportTests
|
public class AbstractBulkImportTests
|
||||||
{
|
{
|
||||||
protected static ApplicationContext ctx = null;
|
protected static ApplicationContext ctx = null;
|
||||||
|
|
||||||
protected FileFolderService fileFolderService;
|
protected FileFolderService fileFolderService;
|
||||||
protected NodeService nodeService;
|
protected NodeService nodeService;
|
||||||
protected TransactionService transactionService;
|
protected TransactionService transactionService;
|
||||||
protected ContentService contentService;
|
protected ContentService contentService;
|
||||||
protected UserTransaction txn = null;
|
protected UserTransaction txn = null;
|
||||||
protected RuleService ruleService;
|
protected RuleService ruleService;
|
||||||
protected ActionService actionService;
|
protected ActionService actionService;
|
||||||
protected VersionService versionService;
|
protected VersionService versionService;
|
||||||
protected MultiThreadedBulkFilesystemImporter bulkImporter;
|
protected MultiThreadedBulkFilesystemImporter bulkImporter;
|
||||||
|
|
||||||
protected NodeRef rootNodeRef;
|
protected NodeRef rootNodeRef;
|
||||||
protected FileInfo topLevelFolder;
|
protected FileInfo topLevelFolder;
|
||||||
protected NodeRef top;
|
protected NodeRef top;
|
||||||
|
|
||||||
protected static void startContext()
|
protected static void startContext()
|
||||||
{
|
{
|
||||||
ctx = ApplicationContextHelper.getApplicationContext();
|
ctx = ApplicationContextHelper.getApplicationContext();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static void startContext(String[] configLocations)
|
protected static void startContext(String[] configLocations)
|
||||||
{
|
{
|
||||||
ctx = ApplicationContextHelper.getApplicationContext(configLocations);
|
ctx = ApplicationContextHelper.getApplicationContext(configLocations);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static void stopContext()
|
protected static void stopContext()
|
||||||
{
|
{
|
||||||
ApplicationContextHelper.closeApplicationContext();
|
ApplicationContextHelper.closeApplicationContext();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setup() throws SystemException, NotSupportedException
|
public void setup() throws SystemException, NotSupportedException
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
nodeService = (NodeService)ctx.getBean("nodeService");
|
nodeService = (NodeService)ctx.getBean("nodeService");
|
||||||
fileFolderService = (FileFolderService)ctx.getBean("fileFolderService");
|
fileFolderService = (FileFolderService)ctx.getBean("fileFolderService");
|
||||||
transactionService = (TransactionService)ctx.getBean("transactionService");
|
transactionService = (TransactionService)ctx.getBean("transactionService");
|
||||||
bulkImporter = (MultiThreadedBulkFilesystemImporter)ctx.getBean("bulkFilesystemImporter");
|
bulkImporter = (MultiThreadedBulkFilesystemImporter)ctx.getBean("bulkFilesystemImporter");
|
||||||
contentService = (ContentService)ctx.getBean("contentService");
|
contentService = (ContentService)ctx.getBean("contentService");
|
||||||
actionService = (ActionService)ctx.getBean("actionService");
|
actionService = (ActionService)ctx.getBean("actionService");
|
||||||
ruleService = (RuleService)ctx.getBean("ruleService");
|
ruleService = (RuleService)ctx.getBean("ruleService");
|
||||||
versionService = (VersionService)ctx.getBean("versionService");
|
versionService = (VersionService)ctx.getBean("versionService");
|
||||||
|
|
||||||
AuthenticationUtil.setFullyAuthenticatedUser(AuthenticationUtil.getAdminUserName());
|
AuthenticationUtil.setFullyAuthenticatedUser(AuthenticationUtil.getAdminUserName());
|
||||||
|
|
||||||
String s = "BulkFilesystemImport" + System.currentTimeMillis();
|
String s = "BulkFilesystemImport" + System.currentTimeMillis();
|
||||||
|
|
||||||
txn = transactionService.getUserTransaction();
|
|
||||||
txn.begin();
|
|
||||||
|
|
||||||
AuthenticationUtil.pushAuthentication();
|
|
||||||
AuthenticationUtil.setFullyAuthenticatedUser(AuthenticationUtil.getAdminUserName());
|
|
||||||
|
|
||||||
StoreRef storeRef = nodeService.createStore(StoreRef.PROTOCOL_WORKSPACE, s);
|
txn = transactionService.getUserTransaction();
|
||||||
rootNodeRef = nodeService.getRootNode(storeRef);
|
txn.begin();
|
||||||
top = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}top"), ContentModel.TYPE_FOLDER).getChildRef();
|
|
||||||
|
|
||||||
topLevelFolder = fileFolderService.create(top, s, ContentModel.TYPE_FOLDER);
|
|
||||||
|
|
||||||
txn.commit();
|
AuthenticationUtil.pushAuthentication();
|
||||||
}
|
AuthenticationUtil.setFullyAuthenticatedUser(AuthenticationUtil.getAdminUserName());
|
||||||
catch(Throwable e)
|
|
||||||
{
|
StoreRef storeRef = nodeService.createStore(StoreRef.PROTOCOL_WORKSPACE, s);
|
||||||
fail(e.getMessage());
|
rootNodeRef = nodeService.getRootNode(storeRef);
|
||||||
}
|
top = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}top"), ContentModel.TYPE_FOLDER).getChildRef();
|
||||||
}
|
|
||||||
|
topLevelFolder = fileFolderService.create(top, s, ContentModel.TYPE_FOLDER);
|
||||||
|
|
||||||
|
txn.commit();
|
||||||
|
}
|
||||||
|
catch(Throwable e)
|
||||||
|
{
|
||||||
|
fail(e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@After
|
@After
|
||||||
public void teardown() throws Exception
|
public void teardown() throws Exception
|
||||||
{
|
{
|
||||||
AuthenticationUtil.popAuthentication();
|
AuthenticationUtil.popAuthentication();
|
||||||
if(txn != null)
|
if(txn != null)
|
||||||
{
|
{
|
||||||
txn.commit();
|
txn.commit();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
public static void afterTests()
|
public static void afterTests()
|
||||||
{
|
{
|
||||||
stopContext();
|
stopContext();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected List<FileInfo> getFolders(NodeRef parent, String pattern)
|
protected List<FileInfo> getFolders(NodeRef parent, String pattern)
|
||||||
{
|
{
|
||||||
PagingResults<FileInfo> page = fileFolderService.list(parent, false, true, pattern, null, null, new PagingRequest(CannedQueryPageDetails.DEFAULT_PAGE_SIZE));
|
PagingResults<FileInfo> page = fileFolderService.list(parent, false, true, pattern, null, null, new PagingRequest(CannedQueryPageDetails.DEFAULT_PAGE_SIZE));
|
||||||
List<FileInfo> folders = page.getPage();
|
List<FileInfo> folders = page.getPage();
|
||||||
return folders;
|
return folders;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected List<FileInfo> getFiles(NodeRef parent, String pattern)
|
protected List<FileInfo> getFiles(NodeRef parent, String pattern)
|
||||||
{
|
{
|
||||||
PagingResults<FileInfo> page = fileFolderService.list(parent, true, false, pattern, null, null, new PagingRequest(CannedQueryPageDetails.DEFAULT_PAGE_SIZE));
|
PagingResults<FileInfo> page = fileFolderService.list(parent, true, false, pattern, null, null, new PagingRequest(CannedQueryPageDetails.DEFAULT_PAGE_SIZE));
|
||||||
List<FileInfo> files = page.getPage();
|
List<FileInfo> files = page.getPage();
|
||||||
return files;
|
return files;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Map<String, FileInfo> toMap(List<FileInfo> list)
|
protected Map<String, FileInfo> toMap(List<FileInfo> list)
|
||||||
{
|
{
|
||||||
Map<String, FileInfo> map = new HashMap<String, FileInfo>(list.size());
|
Map<String, FileInfo> map = new HashMap<String, FileInfo>(list.size());
|
||||||
for(FileInfo fileInfo : list)
|
for(FileInfo fileInfo : list)
|
||||||
{
|
{
|
||||||
map.put(fileInfo.getName(), fileInfo);
|
map.put(fileInfo.getName(), fileInfo);
|
||||||
}
|
}
|
||||||
return map;
|
return map;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void checkFolder(NodeRef folderNode, String childFolderName, String pattern, int numExpectedFolders, int numExpectedFiles, ExpectedFolder[] expectedFolders, ExpectedFile[] expectedFiles)
|
protected void checkFolder(NodeRef folderNode, String childFolderName, String pattern, int numExpectedFolders, int numExpectedFiles, ExpectedFolder[] expectedFolders, ExpectedFile[] expectedFiles)
|
||||||
{
|
{
|
||||||
List<FileInfo> folders = getFolders(folderNode, childFolderName);
|
List<FileInfo> folders = getFolders(folderNode, childFolderName);
|
||||||
assertEquals("", 1, folders.size());
|
assertEquals("", 1, folders.size());
|
||||||
NodeRef folder1 = folders.get(0).getNodeRef();
|
NodeRef folder1 = folders.get(0).getNodeRef();
|
||||||
checkFiles(folder1, pattern, numExpectedFolders, numExpectedFiles, expectedFiles, expectedFolders);
|
checkFiles(folder1, pattern, numExpectedFolders, numExpectedFiles, expectedFiles, expectedFolders);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void checkFiles(NodeRef parent, String pattern, int expectedNumFolders, int expectedNumFiles,
|
protected void checkFiles(NodeRef parent, String pattern, int expectedNumFolders, int expectedNumFiles,
|
||||||
ExpectedFile[] expectedFiles, ExpectedFolder[] expectedFolders)
|
ExpectedFile[] expectedFiles, ExpectedFolder[] expectedFolders)
|
||||||
{
|
{
|
||||||
Map<String, FileInfo> folders = toMap(getFolders(parent, pattern));
|
Map<String, FileInfo> folders = toMap(getFolders(parent, pattern));
|
||||||
Map<String, FileInfo> files = toMap(getFiles(parent, pattern));
|
Map<String, FileInfo> files = toMap(getFiles(parent, pattern));
|
||||||
assertEquals("", expectedNumFolders, folders.size());
|
assertEquals("", expectedNumFolders, folders.size());
|
||||||
assertEquals("", expectedNumFiles, files.size());
|
assertEquals("", expectedNumFiles, files.size());
|
||||||
|
|
||||||
if(expectedFiles != null)
|
if(expectedFiles != null)
|
||||||
{
|
{
|
||||||
for(ExpectedFile expectedFile : expectedFiles)
|
for(ExpectedFile expectedFile : expectedFiles)
|
||||||
{
|
{
|
||||||
FileInfo fileInfo = files.get(expectedFile.getName());
|
FileInfo fileInfo = files.get(expectedFile.getName());
|
||||||
assertNotNull("", fileInfo);
|
assertNotNull("", fileInfo);
|
||||||
assertNotNull("", fileInfo.getContentData());
|
assertNotNull("", fileInfo.getContentData());
|
||||||
assertEquals(expectedFile.getMimeType(), fileInfo.getContentData().getMimetype());
|
assertEquals(expectedFile.getMimeType(), fileInfo.getContentData().getMimetype());
|
||||||
if(fileInfo.getContentData().getMimetype() == MimetypeMap.MIMETYPE_TEXT_PLAIN
|
if(fileInfo.getContentData().getMimetype() == MimetypeMap.MIMETYPE_TEXT_PLAIN
|
||||||
&& expectedFile.getContentContains() != null)
|
&& expectedFile.getContentContains() != null)
|
||||||
{
|
{
|
||||||
ContentReader reader = contentService.getReader(fileInfo.getNodeRef(), ContentModel.PROP_CONTENT);
|
ContentReader reader = contentService.getReader(fileInfo.getNodeRef(), ContentModel.PROP_CONTENT);
|
||||||
String contentContains = expectedFile.getContentContains();
|
String contentContains = expectedFile.getContentContains();
|
||||||
assertTrue("", reader.getContentString().indexOf(contentContains) != -1);
|
assertTrue("", reader.getContentString().indexOf(contentContains) != -1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if(expectedFolders != null)
|
if(expectedFolders != null)
|
||||||
{
|
{
|
||||||
for(ExpectedFolder expectedFolder : expectedFolders)
|
for(ExpectedFolder expectedFolder : expectedFolders)
|
||||||
{
|
{
|
||||||
FileInfo fileInfo = folders.get(expectedFolder.getName());
|
FileInfo fileInfo = folders.get(expectedFolder.getName());
|
||||||
assertNotNull("", fileInfo);
|
assertNotNull("", fileInfo);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void checkContent(FileInfo file, String name, String mimeType)
|
protected void checkContent(FileInfo file, String name, String mimeType)
|
||||||
{
|
{
|
||||||
assertEquals("", name, file.getName());
|
assertEquals("", name, file.getName());
|
||||||
assertEquals("", mimeType, file.getContentData().getMimetype());
|
assertEquals("", mimeType, file.getContentData().getMimetype());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
protected static class ExpectedFolder
|
protected static class ExpectedFolder
|
||||||
{
|
{
|
||||||
private String name;
|
private String name;
|
||||||
|
|
||||||
public ExpectedFolder(String name)
|
public ExpectedFolder(String name)
|
||||||
{
|
{
|
||||||
super();
|
super();
|
||||||
this.name = name;
|
this.name = name;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getName()
|
public String getName()
|
||||||
{
|
{
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static class ExpectedFile
|
|
||||||
{
|
|
||||||
private String name;
|
|
||||||
private String mimeType;
|
|
||||||
private String contentContains = null;
|
|
||||||
|
|
||||||
public ExpectedFile(String name, String mimeType, String contentContains)
|
|
||||||
{
|
|
||||||
this(name, mimeType);
|
|
||||||
this.contentContains = contentContains;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ExpectedFile(String name, String mimeType)
|
|
||||||
{
|
|
||||||
super();
|
|
||||||
this.name = name;
|
|
||||||
this.mimeType = mimeType;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getName()
|
protected static class ExpectedFile
|
||||||
{
|
{
|
||||||
return name;
|
private String name;
|
||||||
}
|
private String mimeType;
|
||||||
|
private String contentContains = null;
|
||||||
|
|
||||||
|
public ExpectedFile(String name, String mimeType, String contentContains)
|
||||||
|
{
|
||||||
|
this(name, mimeType);
|
||||||
|
this.contentContains = contentContains;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ExpectedFile(String name, String mimeType)
|
||||||
|
{
|
||||||
|
super();
|
||||||
|
this.name = name;
|
||||||
|
this.mimeType = mimeType;
|
||||||
|
}
|
||||||
|
|
||||||
public String getMimeType()
|
public String getName()
|
||||||
{
|
{
|
||||||
return mimeType;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getContentContains()
|
public String getMimeType()
|
||||||
{
|
{
|
||||||
return contentContains;
|
return mimeType;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
public String getContentContains()
|
||||||
|
{
|
||||||
|
return contentContains;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -63,151 +63,151 @@ import org.springframework.util.ResourceUtils;
|
|||||||
*/
|
*/
|
||||||
public class BulkImportTest extends AbstractBulkImportTests
|
public class BulkImportTest extends AbstractBulkImportTests
|
||||||
{
|
{
|
||||||
private StreamingNodeImporterFactory streamingNodeImporterFactory;
|
private StreamingNodeImporterFactory streamingNodeImporterFactory;
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void beforeTests()
|
public static void beforeTests()
|
||||||
{
|
{
|
||||||
startContext();
|
startContext();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setup() throws SystemException, NotSupportedException
|
public void setup() throws SystemException, NotSupportedException
|
||||||
{
|
{
|
||||||
super.setup();
|
super.setup();
|
||||||
streamingNodeImporterFactory = (StreamingNodeImporterFactory)ctx.getBean("streamingNodeImporterFactory");
|
streamingNodeImporterFactory = (StreamingNodeImporterFactory)ctx.getBean("streamingNodeImporterFactory");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* For replaceExisting = true, the title must be taken from the metadata and not overridden by the actual filename.
|
* For replaceExisting = true, the title must be taken from the metadata and not overridden by the actual filename.
|
||||||
*
|
*
|
||||||
* @throws Throwable
|
* @throws Throwable
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testMNT8470() throws Throwable
|
public void testMNT8470() throws Throwable
|
||||||
{
|
{
|
||||||
txn = transactionService.getUserTransaction();
|
txn = transactionService.getUserTransaction();
|
||||||
txn.begin();
|
txn.begin();
|
||||||
|
|
||||||
NodeRef folderNode = topLevelFolder.getNodeRef();
|
NodeRef folderNode = topLevelFolder.getNodeRef();
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
NodeImporter nodeImporter = streamingNodeImporterFactory.getNodeImporter(ResourceUtils.getFile("classpath:bulkimport1"));
|
NodeImporter nodeImporter = streamingNodeImporterFactory.getNodeImporter(ResourceUtils.getFile("classpath:bulkimport1"));
|
||||||
BulkImportParameters bulkImportParameters = new BulkImportParameters();
|
BulkImportParameters bulkImportParameters = new BulkImportParameters();
|
||||||
bulkImportParameters.setTarget(folderNode);
|
bulkImportParameters.setTarget(folderNode);
|
||||||
bulkImportParameters.setReplaceExisting(true);
|
bulkImportParameters.setReplaceExisting(true);
|
||||||
bulkImportParameters.setDisableRulesService(true);
|
bulkImportParameters.setDisableRulesService(true);
|
||||||
bulkImportParameters.setBatchSize(40);
|
bulkImportParameters.setBatchSize(40);
|
||||||
bulkImporter.bulkImport(bulkImportParameters, nodeImporter);
|
bulkImporter.bulkImport(bulkImportParameters, nodeImporter);
|
||||||
}
|
}
|
||||||
catch(Throwable e)
|
catch(Throwable e)
|
||||||
{
|
{
|
||||||
fail(e.getMessage());
|
fail(e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
System.out.println(bulkImporter.getStatus());
|
System.out.println(bulkImporter.getStatus());
|
||||||
assertEquals(false, bulkImporter.getStatus().inProgress());
|
assertEquals(false, bulkImporter.getStatus().inProgress());
|
||||||
|
|
||||||
List<FileInfo> folders = getFolders(folderNode, null);
|
List<FileInfo> folders = getFolders(folderNode, null);
|
||||||
assertEquals(1, folders.size());
|
assertEquals(1, folders.size());
|
||||||
FileInfo folder1 = folders.get(0);
|
FileInfo folder1 = folders.get(0);
|
||||||
assertEquals("folder1", folder1.getName());
|
assertEquals("folder1", folder1.getName());
|
||||||
// title should be taken from the metadata file
|
// title should be taken from the metadata file
|
||||||
assertEquals("", folder1.getProperties().get(ContentModel.PROP_TITLE));
|
assertEquals("", folder1.getProperties().get(ContentModel.PROP_TITLE));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCopyImportStriping() throws Throwable
|
public void testCopyImportStriping() throws Throwable
|
||||||
{
|
{
|
||||||
txn = transactionService.getUserTransaction();
|
txn = transactionService.getUserTransaction();
|
||||||
txn.begin();
|
txn.begin();
|
||||||
|
|
||||||
NodeRef folderNode = topLevelFolder.getNodeRef();
|
NodeRef folderNode = topLevelFolder.getNodeRef();
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
NodeImporter nodeImporter = streamingNodeImporterFactory.getNodeImporter(ResourceUtils.getFile("classpath:bulkimport"));
|
NodeImporter nodeImporter = streamingNodeImporterFactory.getNodeImporter(ResourceUtils.getFile("classpath:bulkimport"));
|
||||||
BulkImportParameters bulkImportParameters = new BulkImportParameters();
|
BulkImportParameters bulkImportParameters = new BulkImportParameters();
|
||||||
bulkImportParameters.setTarget(folderNode);
|
bulkImportParameters.setTarget(folderNode);
|
||||||
bulkImportParameters.setReplaceExisting(true);
|
bulkImportParameters.setReplaceExisting(true);
|
||||||
bulkImportParameters.setDisableRulesService(true);
|
bulkImportParameters.setDisableRulesService(true);
|
||||||
bulkImportParameters.setBatchSize(40);
|
bulkImportParameters.setBatchSize(40);
|
||||||
bulkImporter.bulkImport(bulkImportParameters, nodeImporter);
|
bulkImporter.bulkImport(bulkImportParameters, nodeImporter);
|
||||||
}
|
}
|
||||||
catch(Throwable e)
|
catch(Throwable e)
|
||||||
{
|
{
|
||||||
fail(e.getMessage());
|
fail(e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
System.out.println(bulkImporter.getStatus());
|
System.out.println(bulkImporter.getStatus());
|
||||||
|
|
||||||
checkFiles(folderNode, null, 2, 9,
|
checkFiles(folderNode, null, 2, 9,
|
||||||
new ExpectedFile[]
|
new ExpectedFile[]
|
||||||
{
|
{
|
||||||
new ExpectedFile("quickImg1.xls", MimetypeMap.MIMETYPE_EXCEL),
|
new ExpectedFile("quickImg1.xls", MimetypeMap.MIMETYPE_EXCEL),
|
||||||
new ExpectedFile("quickImg1.doc", MimetypeMap.MIMETYPE_WORD),
|
new ExpectedFile("quickImg1.doc", MimetypeMap.MIMETYPE_WORD),
|
||||||
new ExpectedFile("quick.txt", MimetypeMap.MIMETYPE_TEXT_PLAIN, "The quick brown fox jumps over the lazy dog"),
|
new ExpectedFile("quick.txt", MimetypeMap.MIMETYPE_TEXT_PLAIN, "The quick brown fox jumps over the lazy dog"),
|
||||||
},
|
},
|
||||||
new ExpectedFolder[]
|
new ExpectedFolder[]
|
||||||
{
|
{
|
||||||
new ExpectedFolder("folder1"),
|
new ExpectedFolder("folder1"),
|
||||||
new ExpectedFolder("folder2")
|
new ExpectedFolder("folder2")
|
||||||
});
|
});
|
||||||
|
|
||||||
List<FileInfo> folders = getFolders(folderNode, "folder1");
|
|
||||||
assertEquals("", 1, folders.size());
|
|
||||||
NodeRef folder1 = folders.get(0).getNodeRef();
|
|
||||||
checkFiles(folder1, null, 1, 0, null,
|
|
||||||
new ExpectedFolder[]
|
|
||||||
{
|
|
||||||
new ExpectedFolder("folder1.1")
|
|
||||||
});
|
|
||||||
|
|
||||||
folders = getFolders(folderNode, "folder2");
|
List<FileInfo> folders = getFolders(folderNode, "folder1");
|
||||||
assertEquals("", 1, folders.size());
|
assertEquals("", 1, folders.size());
|
||||||
NodeRef folder2 = folders.get(0).getNodeRef();
|
NodeRef folder1 = folders.get(0).getNodeRef();
|
||||||
checkFiles(folder2, null, 1, 0,
|
checkFiles(folder1, null, 1, 0, null,
|
||||||
new ExpectedFile[]
|
new ExpectedFolder[]
|
||||||
{
|
{
|
||||||
},
|
new ExpectedFolder("folder1.1")
|
||||||
new ExpectedFolder[]
|
});
|
||||||
{
|
|
||||||
new ExpectedFolder("folder2.1")
|
folders = getFolders(folderNode, "folder2");
|
||||||
});
|
assertEquals("", 1, folders.size());
|
||||||
|
NodeRef folder2 = folders.get(0).getNodeRef();
|
||||||
|
checkFiles(folder2, null, 1, 0,
|
||||||
|
new ExpectedFile[]
|
||||||
|
{
|
||||||
|
},
|
||||||
|
new ExpectedFolder[]
|
||||||
|
{
|
||||||
|
new ExpectedFolder("folder2.1")
|
||||||
|
});
|
||||||
|
|
||||||
|
folders = getFolders(folder1, "folder1.1");
|
||||||
|
assertEquals("", 1, folders.size());
|
||||||
|
NodeRef folder1_1 = folders.get(0).getNodeRef();
|
||||||
|
checkFiles(folder1_1, null, 2, 12,
|
||||||
|
new ExpectedFile[]
|
||||||
|
{
|
||||||
|
new ExpectedFile("quick.txt", MimetypeMap.MIMETYPE_TEXT_PLAIN, "The quick brown fox jumps over the lazy dog"),
|
||||||
|
new ExpectedFile("quick.sxw", MimetypeMap.MIMETYPE_OPENOFFICE1_WRITER),
|
||||||
|
new ExpectedFile("quick.tar", "application/x-gtar"),
|
||||||
|
},
|
||||||
|
new ExpectedFolder[]
|
||||||
|
{
|
||||||
|
new ExpectedFolder("folder1.1.1"),
|
||||||
|
new ExpectedFolder("folder1.1.2")
|
||||||
|
});
|
||||||
|
|
||||||
|
folders = getFolders(folder2, "folder2.1");
|
||||||
|
assertEquals("", 1, folders.size());
|
||||||
|
NodeRef folder2_1 = folders.get(0).getNodeRef();
|
||||||
|
|
||||||
|
checkFiles(folder2_1, null, 0, 17,
|
||||||
|
new ExpectedFile[]
|
||||||
|
{
|
||||||
|
new ExpectedFile("quick.png", MimetypeMap.MIMETYPE_IMAGE_PNG),
|
||||||
|
new ExpectedFile("quick.pdf", MimetypeMap.MIMETYPE_PDF),
|
||||||
|
new ExpectedFile("quick.odt", MimetypeMap.MIMETYPE_OPENDOCUMENT_TEXT),
|
||||||
|
},
|
||||||
|
new ExpectedFolder[]
|
||||||
|
{
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
folders = getFolders(folder1, "folder1.1");
|
|
||||||
assertEquals("", 1, folders.size());
|
|
||||||
NodeRef folder1_1 = folders.get(0).getNodeRef();
|
|
||||||
checkFiles(folder1_1, null, 2, 12,
|
|
||||||
new ExpectedFile[]
|
|
||||||
{
|
|
||||||
new ExpectedFile("quick.txt", MimetypeMap.MIMETYPE_TEXT_PLAIN, "The quick brown fox jumps over the lazy dog"),
|
|
||||||
new ExpectedFile("quick.sxw", MimetypeMap.MIMETYPE_OPENOFFICE1_WRITER),
|
|
||||||
new ExpectedFile("quick.tar", "application/x-gtar"),
|
|
||||||
},
|
|
||||||
new ExpectedFolder[]
|
|
||||||
{
|
|
||||||
new ExpectedFolder("folder1.1.1"),
|
|
||||||
new ExpectedFolder("folder1.1.2")
|
|
||||||
});
|
|
||||||
|
|
||||||
folders = getFolders(folder2, "folder2.1");
|
|
||||||
assertEquals("", 1, folders.size());
|
|
||||||
NodeRef folder2_1 = folders.get(0).getNodeRef();
|
|
||||||
|
|
||||||
checkFiles(folder2_1, null, 0, 17,
|
|
||||||
new ExpectedFile[]
|
|
||||||
{
|
|
||||||
new ExpectedFile("quick.png", MimetypeMap.MIMETYPE_IMAGE_PNG),
|
|
||||||
new ExpectedFile("quick.pdf", MimetypeMap.MIMETYPE_PDF),
|
|
||||||
new ExpectedFile("quick.odt", MimetypeMap.MIMETYPE_OPENDOCUMENT_TEXT),
|
|
||||||
},
|
|
||||||
new ExpectedFolder[]
|
|
||||||
{
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
protected Rule createCopyRule(NodeRef targetNode, boolean isAppliedToChildren)
|
protected Rule createCopyRule(NodeRef targetNode, boolean isAppliedToChildren)
|
||||||
{
|
{
|
||||||
Rule rule = new Rule();
|
Rule rule = new Rule();
|
||||||
@@ -215,7 +215,7 @@ public class BulkImportTest extends AbstractBulkImportTests
|
|||||||
String title = "rule title " + System.currentTimeMillis();
|
String title = "rule title " + System.currentTimeMillis();
|
||||||
rule.setTitle(title);
|
rule.setTitle(title);
|
||||||
rule.setDescription(title);
|
rule.setDescription(title);
|
||||||
rule.applyToChildren(isAppliedToChildren);
|
rule.applyToChildren(isAppliedToChildren);
|
||||||
|
|
||||||
Map<String, Serializable> params = new HashMap<String, Serializable>(1);
|
Map<String, Serializable> params = new HashMap<String, Serializable>(1);
|
||||||
params.put(MoveActionExecuter.PARAM_DESTINATION_FOLDER, targetNode);
|
params.put(MoveActionExecuter.PARAM_DESTINATION_FOLDER, targetNode);
|
||||||
@@ -228,9 +228,9 @@ public class BulkImportTest extends AbstractBulkImportTests
|
|||||||
return rule;
|
return rule;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testImportWithRules() throws Throwable
|
public void testImportWithRules() throws Throwable
|
||||||
{
|
{
|
||||||
NodeRef folderNode = topLevelFolder.getNodeRef();
|
NodeRef folderNode = topLevelFolder.getNodeRef();
|
||||||
NodeImporter nodeImporter = null;
|
NodeImporter nodeImporter = null;
|
||||||
|
|
||||||
@@ -258,60 +258,60 @@ public class BulkImportTest extends AbstractBulkImportTests
|
|||||||
bulkImporter.bulkImport(bulkImportParameters, nodeImporter);
|
bulkImporter.bulkImport(bulkImportParameters, nodeImporter);
|
||||||
|
|
||||||
System.out.println(bulkImporter.getStatus());
|
System.out.println(bulkImporter.getStatus());
|
||||||
|
|
||||||
assertEquals("", 74, bulkImporter.getStatus().getNumberOfContentNodesCreated());
|
assertEquals("", 74, bulkImporter.getStatus().getNumberOfContentNodesCreated());
|
||||||
|
|
||||||
checkFiles(folderNode, null, 2, 9, new ExpectedFile[] {
|
checkFiles(folderNode, null, 2, 9, new ExpectedFile[] {
|
||||||
new ExpectedFile("quickImg1.xls", MimetypeMap.MIMETYPE_EXCEL),
|
new ExpectedFile("quickImg1.xls", MimetypeMap.MIMETYPE_EXCEL),
|
||||||
new ExpectedFile("quickImg1.doc", MimetypeMap.MIMETYPE_WORD),
|
new ExpectedFile("quickImg1.doc", MimetypeMap.MIMETYPE_WORD),
|
||||||
new ExpectedFile("quick.txt", MimetypeMap.MIMETYPE_TEXT_PLAIN, "The quick brown fox jumps over the lazy dog"),
|
new ExpectedFile("quick.txt", MimetypeMap.MIMETYPE_TEXT_PLAIN, "The quick brown fox jumps over the lazy dog"),
|
||||||
},
|
},
|
||||||
new ExpectedFolder[] {
|
new ExpectedFolder[] {
|
||||||
new ExpectedFolder("folder1"),
|
new ExpectedFolder("folder1"),
|
||||||
new ExpectedFolder("folder2")
|
new ExpectedFolder("folder2")
|
||||||
});
|
});
|
||||||
|
|
||||||
List<FileInfo> folders = getFolders(folderNode, "folder1");
|
List<FileInfo> folders = getFolders(folderNode, "folder1");
|
||||||
assertEquals("", 1, folders.size());
|
assertEquals("", 1, folders.size());
|
||||||
NodeRef folder1 = folders.get(0).getNodeRef();
|
NodeRef folder1 = folders.get(0).getNodeRef();
|
||||||
checkFiles(folder1, null, 1, 0, null, new ExpectedFolder[] {
|
checkFiles(folder1, null, 1, 0, null, new ExpectedFolder[] {
|
||||||
new ExpectedFolder("folder1.1")
|
new ExpectedFolder("folder1.1")
|
||||||
});
|
});
|
||||||
|
|
||||||
folders = getFolders(folderNode, "folder2");
|
folders = getFolders(folderNode, "folder2");
|
||||||
assertEquals("", 1, folders.size());
|
assertEquals("", 1, folders.size());
|
||||||
NodeRef folder2 = folders.get(0).getNodeRef();
|
NodeRef folder2 = folders.get(0).getNodeRef();
|
||||||
checkFiles(folder2, null, 1, 0, new ExpectedFile[] {
|
checkFiles(folder2, null, 1, 0, new ExpectedFile[] {
|
||||||
},
|
},
|
||||||
new ExpectedFolder[] {
|
new ExpectedFolder[] {
|
||||||
new ExpectedFolder("folder2.1")
|
new ExpectedFolder("folder2.1")
|
||||||
});
|
});
|
||||||
|
|
||||||
folders = getFolders(folder1, "folder1.1");
|
folders = getFolders(folder1, "folder1.1");
|
||||||
assertEquals("", 1, folders.size());
|
assertEquals("", 1, folders.size());
|
||||||
NodeRef folder1_1 = folders.get(0).getNodeRef();
|
NodeRef folder1_1 = folders.get(0).getNodeRef();
|
||||||
checkFiles(folder1_1, null, 2, 12, new ExpectedFile[] {
|
checkFiles(folder1_1, null, 2, 12, new ExpectedFile[] {
|
||||||
new ExpectedFile("quick.txt", MimetypeMap.MIMETYPE_TEXT_PLAIN, "The quick brown fox jumps over the lazy dog"),
|
new ExpectedFile("quick.txt", MimetypeMap.MIMETYPE_TEXT_PLAIN, "The quick brown fox jumps over the lazy dog"),
|
||||||
new ExpectedFile("quick.sxw", MimetypeMap.MIMETYPE_OPENOFFICE1_WRITER),
|
new ExpectedFile("quick.sxw", MimetypeMap.MIMETYPE_OPENOFFICE1_WRITER),
|
||||||
new ExpectedFile("quick.tar", "application/x-gtar"),
|
new ExpectedFile("quick.tar", "application/x-gtar"),
|
||||||
},
|
},
|
||||||
new ExpectedFolder[] {
|
new ExpectedFolder[] {
|
||||||
new ExpectedFolder("folder1.1.1"),
|
new ExpectedFolder("folder1.1.1"),
|
||||||
new ExpectedFolder("folder1.1.2")
|
new ExpectedFolder("folder1.1.2")
|
||||||
});
|
});
|
||||||
|
|
||||||
folders = getFolders(folder2, "folder2.1");
|
folders = getFolders(folder2, "folder2.1");
|
||||||
assertEquals("", 1, folders.size());
|
assertEquals("", 1, folders.size());
|
||||||
NodeRef folder2_1 = folders.get(0).getNodeRef();
|
NodeRef folder2_1 = folders.get(0).getNodeRef();
|
||||||
|
|
||||||
checkFiles(folder2_1, null, 0, 17, new ExpectedFile[] {
|
checkFiles(folder2_1, null, 0, 17, new ExpectedFile[] {
|
||||||
new ExpectedFile("quick.png", MimetypeMap.MIMETYPE_IMAGE_PNG),
|
new ExpectedFile("quick.png", MimetypeMap.MIMETYPE_IMAGE_PNG),
|
||||||
new ExpectedFile("quick.pdf", MimetypeMap.MIMETYPE_PDF),
|
new ExpectedFile("quick.pdf", MimetypeMap.MIMETYPE_PDF),
|
||||||
new ExpectedFile("quick.odt", MimetypeMap.MIMETYPE_OPENDOCUMENT_TEXT),
|
new ExpectedFile("quick.odt", MimetypeMap.MIMETYPE_OPENDOCUMENT_TEXT),
|
||||||
},
|
},
|
||||||
new ExpectedFolder[] {
|
new ExpectedFolder[] {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* MNT-9076: Penultimate version cannot be accessed from Share when uploading using bulkimport
|
* MNT-9076: Penultimate version cannot be accessed from Share when uploading using bulkimport
|
||||||
@@ -373,6 +373,6 @@ public class BulkImportTest extends AbstractBulkImportTests
|
|||||||
contentReader = this.contentService.getReader(versions[3].getFrozenStateNodeRef(), ContentModel.PROP_CONTENT);
|
contentReader = this.contentService.getReader(versions[3].getFrozenStateNodeRef(), ContentModel.PROP_CONTENT);
|
||||||
assertNotNull(contentReader);
|
assertNotNull(contentReader);
|
||||||
assertEquals("This is version 1 of fileWithVersions.txt.", contentReader.getContentString());
|
assertEquals("This is version 1 of fileWithVersions.txt.", contentReader.getContentString());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user