stringTaggingSeparators)
@@ -189,21 +188,14 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
* the taggable property should still contain raw string values.
*
* Mixing of NodeRefs and string values is permitted so each raw value is
- * checked for a valid NodeRef representation and if so, converts to a NodeRef,
+ * checked for a valid NodeRef representation and if so, converts to a NodeRef,
* if not, adds as a tag via the {@link TaggingService}.
- *
+ *
* @param actionedUponNodeRef The NodeRef being actioned upon
* @param propertyDef the PropertyDefinition of the taggable property
* @param rawValue the raw value from the metadata extracter
*/
protected void addTags(NodeRef actionedUponNodeRef, PropertyDefinition propertyDef, Serializable rawValue)
- {
- addTags(actionedUponNodeRef, propertyDef, rawValue, nodeService, stringTaggingSeparators, taggingService);
- }
-
- private static void addTags(NodeRef actionedUponNodeRef, PropertyDefinition propertyDef, Serializable rawValue,
- NodeService nodeService, List stringTaggingSeparators,
- TaggingService taggingService)
{
if (rawValue == null)
{
@@ -239,7 +231,7 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
logger.trace("adding string tag name'" + tagName + "' (from tag nodeRef "+nodeRef+") to " + actionedUponNodeRef);
}
- tags.addAll(splitTag(tagName, stringTaggingSeparators));
+ tags.addAll(splitTag(tagName));
}
catch (InvalidNodeRefException e)
{
@@ -258,7 +250,7 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
logger.trace("adding string tag name'" + singleValue + "' to " + actionedUponNodeRef);
}
- tags.addAll(splitTag((String)singleValue, stringTaggingSeparators));
+ tags.addAll(splitTag((String)singleValue));
}
}
else if (singleValue instanceof NodeRef)
@@ -271,7 +263,7 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
logger.trace("adding string tag name'" + tagName + "' (for nodeRef "+nodeRef+") to " + actionedUponNodeRef);
}
- tags.addAll(splitTag(tagName, stringTaggingSeparators));
+ tags.addAll(splitTag(tagName));
}
}
}
@@ -281,8 +273,8 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
{
logger.trace("adding string tag name'" + (String)rawValue + "' to " + actionedUponNodeRef);
}
-
- tags.addAll(splitTag((String)rawValue, stringTaggingSeparators));
+
+ tags.addAll(splitTag((String)rawValue));
}
if (logger.isDebugEnabled())
@@ -305,11 +297,6 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
}
protected List splitTag(String str)
- {
- return splitTag(str, stringTaggingSeparators);
- }
-
- private static List splitTag(String str, List stringTaggingSeparators)
{
List result = new ArrayList<>();
if ((str != null) && (!str.equals("")))
@@ -336,7 +323,7 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
return result;
}
-
+
/**
* @see org.alfresco.repo.action.executer.ActionExecuter#execute(Action,
* NodeRef)
@@ -360,8 +347,7 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
return;
}
String mimetype = reader.getMimetype();
- long sourceSizeInBytes = reader.getSize();
- MetadataExtracter extracter = metadataExtracterRegistry.getExtractor(mimetype, sourceSizeInBytes);
+ MetadataExtracter extracter = metadataExtracterRegistry.getExtracter(mimetype);
if (extracter == null)
{
if(logger.isDebugEnabled())
@@ -386,7 +372,6 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
try
{
modifiedProperties = extracter.extract(
- actionedUponNodeRef,
reader,
/*OverwritePolicy.PRAGMATIC,*/
nodeProperties);
@@ -423,22 +408,11 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
{
return;
}
-
- addExtractedMetadataToNode(actionedUponNodeRef, nodeProperties, modifiedProperties,
- nodeService, dictionaryService, taggingService, enableStringTagging, carryAspectProperties,
- stringTaggingSeparators);
- }
-
- public static void addExtractedMetadataToNode(NodeRef actionedUponNodeRef, Map nodeProperties,
- Map modifiedProperties,
- NodeService nodeService, DictionaryService dictionaryService,
- TaggingService taggingService, boolean enableStringTagging,
- boolean carryAspectProperties, List stringTaggingSeparators)
- {
+
// Check that all properties have the appropriate aspect applied
Set requiredAspectQNames = new HashSet(3);
Set aspectPropertyQNames = new HashSet(17);
-
+
/**
* The modified properties contain null values as well. As we are only interested
* in the keys, this will force aspect aspect properties to be removed even if there
@@ -458,10 +432,9 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
if (enableStringTagging && propertyContainerDef.getName().equals(ContentModel.ASPECT_TAGGABLE))
{
Serializable oldValue = nodeProperties.get(propertyQName);
- addTags(actionedUponNodeRef, propertyDef, oldValue,
- nodeService, stringTaggingSeparators, taggingService);
+ addTags(actionedUponNodeRef, propertyDef, oldValue);
// Replace the raw value with the created tag NodeRefs
- nodeProperties.put(ContentModel.PROP_TAGS,
+ nodeProperties.put(ContentModel.PROP_TAGS,
nodeService.getProperty(actionedUponNodeRef, ContentModel.PROP_TAGS));
}
else
@@ -474,7 +447,7 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
}
}
}
-
+
if (!carryAspectProperties)
{
// Remove any node properties that are defined on the aspects but were not extracted
@@ -492,14 +465,10 @@ public class ContentMetadataExtracter extends ActionExecuterAbstractBase
}
}
}
-
- // The following code can result in a postCommit to extract the metadata again via JavaBehaviour
- // (such as ImapContentPolicy.onAddAspect). Not very efficient, but I cannot think of a way to
- // avoid it that does not risk memory leaks or disabling behaviour we want.
-
+
// Add all the properties to the node BEFORE we add the aspects
nodeService.setProperties(actionedUponNodeRef, nodeProperties);
-
+
// Add each of the aspects, as required
for (QName requiredAspectQName : requiredAspectQNames)
{
diff --git a/repository/src/main/java/org/alfresco/repo/content/metadata/AbstractMappingMetadataExtracter.java b/repository/src/main/java/org/alfresco/repo/content/metadata/AbstractMappingMetadataExtracter.java
index ece13df5b5..78865f8768 100644
--- a/repository/src/main/java/org/alfresco/repo/content/metadata/AbstractMappingMetadataExtracter.java
+++ b/repository/src/main/java/org/alfresco/repo/content/metadata/AbstractMappingMetadataExtracter.java
@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
- * Copyright (C) 2005 - 2020 Alfresco Software Limited
+ * Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -25,33 +25,6 @@
*/
package org.alfresco.repo.content.metadata;
-import org.alfresco.api.AlfrescoPublicApi;
-import org.alfresco.error.AlfrescoRuntimeException;
-import org.alfresco.model.ContentModel;
-import org.alfresco.repo.content.StreamAwareContentReaderProxy;
-import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
-import org.alfresco.service.cmr.dictionary.DictionaryService;
-import org.alfresco.service.cmr.dictionary.PropertyDefinition;
-import org.alfresco.service.cmr.repository.ContentIOException;
-import org.alfresco.service.cmr.repository.ContentReader;
-import org.alfresco.service.cmr.repository.ContentWriter;
-import org.alfresco.service.cmr.repository.MalformedNodeRefException;
-import org.alfresco.service.cmr.repository.MimetypeService;
-import org.alfresco.service.cmr.repository.NodeRef;
-import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
-import org.alfresco.service.cmr.repository.datatype.TypeConversionException;
-import org.alfresco.service.namespace.InvalidQNameException;
-import org.alfresco.service.namespace.QName;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.joda.time.DateTime;
-import org.joda.time.format.DateTimeFormat;
-import org.joda.time.format.DateTimeFormatter;
-import org.springframework.beans.factory.BeanNameAware;
-import org.springframework.context.ApplicationContext;
-import org.springframework.context.ApplicationContextAware;
-import org.springframework.extensions.surf.util.ISO8601DateFormat;
-
import java.io.InputStream;
import java.io.Serializable;
import java.lang.reflect.Array;
@@ -77,6 +50,32 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
+import org.alfresco.api.AlfrescoPublicApi;
+import org.alfresco.error.AlfrescoRuntimeException;
+import org.alfresco.model.ContentModel;
+import org.alfresco.repo.content.StreamAwareContentReaderProxy;
+import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
+import org.alfresco.service.cmr.dictionary.DictionaryService;
+import org.alfresco.service.cmr.dictionary.PropertyDefinition;
+import org.alfresco.service.cmr.repository.ContentIOException;
+import org.alfresco.service.cmr.repository.ContentReader;
+import org.alfresco.service.cmr.repository.ContentWriter;
+import org.alfresco.service.cmr.repository.MalformedNodeRefException;
+import org.alfresco.service.cmr.repository.MimetypeService;
+import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
+import org.alfresco.service.cmr.repository.datatype.TypeConversionException;
+import org.alfresco.service.namespace.InvalidQNameException;
+import org.alfresco.service.namespace.QName;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.joda.time.DateTime;
+import org.joda.time.format.DateTimeFormat;
+import org.joda.time.format.DateTimeFormatter;
+import org.springframework.beans.factory.BeanNameAware;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+import org.springframework.extensions.surf.util.ISO8601DateFormat;
+
/**
* Support class for metadata extracters that support dynamic and config-driven
* mapping between extracted values and model properties. Extraction is broken
@@ -132,7 +131,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
private MetadataExtracterRegistry registry;
private MimetypeService mimetypeService;
- protected DictionaryService dictionaryService;
+ private DictionaryService dictionaryService;
private boolean initialized;
private Set supportedMimetypes;
@@ -233,11 +232,6 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
this.dictionaryService = dictionaryService;
}
- public Set getSupportedMimetypes()
- {
- return supportedMimetypes;
- }
-
/**
* Set the mimetypes that are supported by the extracter.
*
@@ -284,7 +278,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
return supportedEmbedMimetypes.contains(sourceMimetype);
}
- protected boolean isEnabled(String mimetype)
+ private boolean isEnabled(String mimetype)
{
return properties == null || mimetypeService == null ||
(getBooleanProperty(beanName+".enabled", true) &&
@@ -720,10 +714,10 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
{
return readMappingProperties(mappingProperties.entrySet());
}
-
+
/**
* A utility method to convert mapping properties entries to the Map form.
- *
+ *
* @see #setMappingProperties(Properties)
*/
private Map> readMappingProperties(Set> mappingPropertiesEntries)
@@ -771,8 +765,8 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
{
throw new AlfrescoRuntimeException(
"No prefix mapping for extracter property mapping: \n" +
- " Extracter: " + this + "\n" +
- " Mapping: " + entry);
+ " Extracter: " + this + "\n" +
+ " Mapping: " + entry);
}
qnameStr = QName.NAMESPACE_BEGIN + uri + QName.NAMESPACE_END + suffix;
}
@@ -786,8 +780,8 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
{
throw new AlfrescoRuntimeException(
"Can't create metadata extracter property mapping: \n" +
- " Extracter: " + this + "\n" +
- " Mapping: " + entry);
+ " Extracter: " + this + "\n" +
+ " Mapping: " + entry);
}
}
if (logger.isTraceEnabled())
@@ -1138,7 +1132,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
@Override
public final Map extract(ContentReader reader, Map destination)
{
- return extract(null, reader, this.overwritePolicy, destination, this.mapping);
+ return extract(reader, this.overwritePolicy, destination, this.mapping);
}
/**
@@ -1150,7 +1144,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
OverwritePolicy overwritePolicy,
Map destination)
{
- return extract(null, reader, overwritePolicy, destination, this.mapping);
+ return extract(reader, overwritePolicy, destination, this.mapping);
}
/**
@@ -1162,29 +1156,6 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
OverwritePolicy overwritePolicy,
Map destination,
Map> mapping)
- {
- return extract(null, reader, overwritePolicy, destination, mapping);
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public Map extract(NodeRef nodeRef, ContentReader reader, Map destination)
- {
- return extract(nodeRef, reader, overwritePolicy, destination, mapping);
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public Map extract(
- NodeRef nodeRef,
- ContentReader reader,
- OverwritePolicy overwritePolicy,
- Map destination,
- Map> mapping)
{
// Done
if (logger.isDebugEnabled())
@@ -1211,13 +1182,12 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
// Check that the content has some meat
if (reader.getSize() > 0 && reader.exists())
{
- rawMetadata = extractRaw(nodeRef, reader, getLimits(reader.getMimetype()));
+ rawMetadata = extractRaw(reader, getLimits(reader.getMimetype()));
}
else
{
rawMetadata = new HashMap(1);
}
-
// Convert to system properties (standalone)
Map systemProperties = mapRawToSystem(rawMetadata);
// Convert the properties according to the dictionary types
@@ -1245,7 +1215,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
// the current mime type is plausible
String typeErrorMessage = null;
String differentType = null;
- if (mimetypeService != null)
+ if(mimetypeService != null)
{
differentType = mimetypeService.getMimetypeIfNotMatches(reader.getReader());
}
@@ -1254,7 +1224,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
logger.info("Unable to verify mimetype of " + reader.getReader() +
" as no MimetypeService available to " + getClass().getName());
}
- if (differentType != null)
+ if(differentType != null)
{
typeErrorMessage = "\n" +
" claimed mime type: " + reader.getMimetype() + "\n" +
@@ -1315,19 +1285,6 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
Map properties,
ContentReader reader,
ContentWriter writer)
- {
- embed(null, properties, reader, writer);
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public void embed(
- NodeRef nodeRef,
- Map properties,
- ContentReader reader,
- ContentWriter writer)
{
// Done
if (logger.isDebugEnabled())
@@ -1350,7 +1307,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
try
{
- embedInternal(nodeRef, mapSystemToRaw(properties), reader, writer);
+ embedInternal(mapSystemToRaw(properties), reader, writer);
if(logger.isDebugEnabled())
{
logger.debug("Embedded Metadata into " + writer);
@@ -1515,7 +1472,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
* @return Returns a modified map of properties that have been converted.
*/
@SuppressWarnings("unchecked")
- protected Map convertSystemPropertyValues(Map systemProperties)
+ private Map convertSystemPropertyValues(Map systemProperties)
{
Map convertedProperties = new HashMap(systemProperties.size() + 7);
for (Map.Entry entry : systemProperties.entrySet())
@@ -1543,10 +1500,6 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
{
convertedPropertyValue = propertyValue;
}
- else if (propertyValue instanceof Long)
- {
- convertedPropertyValue = new Date((Long)propertyValue);
- }
else if (propertyValue instanceof Collection)
{
convertedPropertyValue = (Serializable) makeDates((Collection) propertyValue);
@@ -1565,9 +1518,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
{
StringBuilder mesg = new StringBuilder();
mesg.append("Unable to convert Date property: ").append(propertyQName)
- .append(", value: ").append(propertyValue).append(" (")
- .append(propertyValue.getClass().getSimpleName())
- .append("), type: ").append(propertyTypeDef.getName());
+ .append(", value: ").append(propertyValue).append(", type: ").append(propertyTypeDef.getName());
logger.warn(mesg.toString());
}
}
@@ -1737,21 +1688,6 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
}
}
- // Try milliseconds. This was introduced with T-Engine extractors. Previously Dates would have been
- // created and then converted to a Alfresco Date property in a single operation. T-Engines do not know
- // about Alfresco Date property formats.
- try
- {
- long ms = Long.parseLong(dateStr);
- if (Long.toString(ms).equals(dateStr))
- {
- date = new Date(ms);
- }
- }
- catch (NumberFormatException ignore)
- {
- }
-
if (date == null)
{
// Still no luck
@@ -2046,7 +1982,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
return limits;
}
-
+
/**
* Callable
wrapper for the
* {@link AbstractMappingMetadataExtracter#extractRaw(ContentReader)} method
@@ -2090,7 +2026,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
/**
* Exception wrapper to handle exceeded limits imposed by {@link MetadataExtracterLimits}
- * {@link AbstractMappingMetadataExtracter#extractRaw(NodeRef, ContentReader, MetadataExtracterLimits)}
+ * {@link AbstractMappingMetadataExtracter#extractRaw(ContentReader, MetadataExtracterLimits)}
*/
private class LimitExceededException extends Exception
{
@@ -2111,17 +2047,19 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
*
* If no timeout limit is defined or is unlimited (-1),
* the extractRaw
method is called directly.
- *
- * @param nodeRef the node being acted on.
+ *
* @param reader the document to extract the values from. This stream provided by
* the reader must be closed if accessed directly.
* @param limits the limits to impose on the extraction
* @return Returns a map of document property values keyed by property name.
* @throws Throwable All exception conditions can be handled.
*/
- private Map extractRaw(NodeRef nodeRef,
+ private Map extractRaw(
ContentReader reader, MetadataExtracterLimits limits) throws Throwable
{
+ FutureTask