Merged V2.2 to HEAD

8078: Merged V2.1 to V2.2
      8025: Fixes WCM-1039, problems with case insensitive name handling.
   8079: Merged V2.1 to V2.2
      8035: -- DONE SEPARATELY --
      8040: Fix AR-1985: SQL Server dialect is derived from Sybase dialect there need additional no-op script
      8046: Better Javadocs for getChildByName()
      8056: Fixed WCM-790: Date conversion for metadata extractors
      8057: Fixed WCM-790: Properties that don't convert can be discarded (default is to fail)
      8059: -- DONE SEPARATELY --
      8061: Fixes WCM-790: Fallout from CHK-2168 and CHK-2169
   8081: Fix for WCM-1018
   8082: Merged V2.1 to V2.2
      8016: Merged V2.1-A to V2.1
         8000: Additional indexes for AVM
         8013: Patch to introduce reverse indexes required for AVM


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@8474 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley 2008-03-10 12:17:10 +00:00
parent f22c4c4e57
commit 72a90a14f1
22 changed files with 428 additions and 67 deletions

View File

@ -145,6 +145,9 @@
<property name="mimetypeService"> <property name="mimetypeService">
<ref bean="mimetypeService" /> <ref bean="mimetypeService" />
</property> </property>
<property name="dictionaryService">
<ref bean="dictionaryService" />
</property>
</bean> </bean>
<!-- Content Metadata Extracters --> <!-- Content Metadata Extracters -->

View File

@ -24,7 +24,7 @@
<value>classpath:alfresco/repository.properties</value> <value>classpath:alfresco/repository.properties</value>
<value>classpath:alfresco/version.properties</value> <value>classpath:alfresco/version.properties</value>
<value>classpath:alfresco/domain/transaction.properties</value> <value>classpath:alfresco/domain/transaction.properties</value>
<value>classpath:alfresco/jndi.properties</value> <!-- <value>classpath:alfresco/jndi.properties</value> -->
</list> </list>
</property> </property>
</bean> </bean>
@ -666,7 +666,6 @@
<!-- --> <!-- -->
<bean id="namespaceDAO" class="org.alfresco.repo.dictionary.NamespaceDAOImpl"> <bean id="namespaceDAO" class="org.alfresco.repo.dictionary.NamespaceDAOImpl">
<property name="tenantService"> <property name="tenantService">
<ref bean="tenantService"/> <ref bean="tenantService"/>
</property> </property>
@ -676,7 +675,21 @@
<property name="prefixesCache"> <property name="prefixesCache">
<ref bean="prefixesCache"/> <ref bean="prefixesCache"/>
</property> </property>
</bean>
<bean id="dictionaryModelType" class="org.alfresco.repo.dictionary.DictionaryModelType" init-method="init">
<property name="dictionaryDAO">
<ref bean="dictionaryDAO" />
</property>
<property name="namespaceDAO">
<ref bean="namespaceDAO" />
</property>
<property name="nodeService">
<ref bean="nodeService"/>
</property>
<property name="contentService">
<ref bean="contentService"/>
</property>
</bean> </bean>
<bean id="dictionaryDAO" class="org.alfresco.repo.dictionary.DictionaryDAOImpl"> <bean id="dictionaryDAO" class="org.alfresco.repo.dictionary.DictionaryDAOImpl">
@ -771,6 +784,24 @@
</property> </property>
</bean> </bean>
<bean id="dictionaryRepositoryBootstrap" class="org.alfresco.repo.dictionary.DictionaryRepositoryBootstrap" init-method="bootstrap">
<property name="dictionaryDAO">
<ref local="dictionaryDAO"/>
</property>
<property name="contentService">
<ref bean="contentService"/>
</property>
<property name="searchService">
<ref bean="searchService"/>
</property>
<property name="transactionService">
<ref bean="transactionService"/>
</property>
<property name="authenticationComponent">
<ref bean="authenticationComponent"/>
</property>
</bean>
<!-- --> <!-- -->
<!-- Copy Service --> <!-- Copy Service -->
<!-- --> <!-- -->

View File

@ -26,3 +26,7 @@ ALTER TABLE avm_aspects_new ADD CONSTRAINT fk_avm_na_qn FOREIGN KEY (qname_id) R
CREATE INDEX fk_avm_np_qn ON avm_node_properties_new (qname_id); CREATE INDEX fk_avm_np_qn ON avm_node_properties_new (qname_id);
ALTER TABLE avm_node_properties_new ADD CONSTRAINT fk_avm_np_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id); ALTER TABLE avm_node_properties_new ADD CONSTRAINT fk_avm_np_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
CREATE INDEX idx_avm_hl_revpk ON avm_history_links (descendent, ancestor);
CREATE INDEX idx_avm_vr_revuq ON avm_version_roots (avm_store_id, version_id);

View File

@ -0,0 +1,21 @@
--
-- Title: Add text columns that allow null
-- Database: SQL Server
-- Since: V2.1 Schema 64
-- Author: Derek Hulley
--
-- Please contact support@alfresco.com if you need assistance with the upgrade.
--
-- This is a Sybase issue, so nothing is required here.
--
-- Record script finish
--
DELETE FROM alf_applied_patch WHERE id = 'patch.db-V2.1-NotNullColumns';
INSERT INTO alf_applied_patch
(id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
VALUES
(
'patch.db-V2.1-NotNullColumns', 'Manually executed script upgrade V2.1: Add nullable columns',
0, 63, -1, 64, null, 'UNKOWN', 1, 1, 'Script completed'
);

View File

@ -61,6 +61,10 @@ ALTER TABLE avm_aspects_new ADD CONSTRAINT fk_avm_na_qn FOREIGN KEY (qname_id) R
CREATE INDEX fk_avm_np_qn ON avm_node_properties_new (qname_id); CREATE INDEX fk_avm_np_qn ON avm_node_properties_new (qname_id);
ALTER TABLE avm_node_properties_new ADD CONSTRAINT fk_avm_np_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id); ALTER TABLE avm_node_properties_new ADD CONSTRAINT fk_avm_np_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
CREATE INDEX idx_avm_hl_revpk ON avm_history_links (descendent, ancestor);
CREATE INDEX idx_avm_vr_revuq ON avm_version_roots (avm_store_id, version_id);
-- --
-- Record script finish -- Record script finish
-- --

View File

@ -13,3 +13,5 @@ content.http_reader.err.no_connection=Unable to connect to remote Alfresco serve
content.http_reader.err.no_authentication=The HTTP reader was unable to authenticate on the remote server: {0} \n content.http_reader.err.no_authentication=The HTTP reader was unable to authenticate on the remote server: {0} \n
content.http_reader.err.check_cluster=Please ensure that 'replicateUpdates' and 'replicateUpdatesViaCopy' is enabled for the cache 'org.alfresco.cache.ticketsCache'. Check that the general cluster configuration is correct and working. content.http_reader.err.check_cluster=Please ensure that 'replicateUpdates' and 'replicateUpdatesViaCopy' is enabled for the cache 'org.alfresco.cache.ticketsCache'. Check that the general cluster configuration is correct and working.
content.http_reader.err.unrecognized=An unrecognized error occured when attempting to download content from remote server:\n Server: {0} \n Content: {1} \n HTTP Response: {2} content.http_reader.err.unrecognized=An unrecognized error occured when attempting to download content from remote server:\n Server: {0} \n Content: {1} \n HTTP Response: {2}
metadata.extraction.err.type_conversion=Metadata extraction failed because an extracted value failed to convert to the required type: \n Extractor: {0} \n Target Property QName: {1} \n Required Type: {2} \n Extracted Value: {3}

View File

@ -68,6 +68,7 @@ import org.alfresco.service.cmr.avm.AVMService;
import org.alfresco.service.cmr.avm.AVMStoreDescriptor; import org.alfresco.service.cmr.avm.AVMStoreDescriptor;
import org.alfresco.service.cmr.avm.LayeringDescriptor; import org.alfresco.service.cmr.avm.LayeringDescriptor;
import org.alfresco.service.cmr.avm.VersionDescriptor; import org.alfresco.service.cmr.avm.VersionDescriptor;
import org.alfresco.service.cmr.avm.deploy.DeploymentEvent;
import org.alfresco.service.cmr.avm.deploy.DeploymentReport; import org.alfresco.service.cmr.avm.deploy.DeploymentReport;
import org.alfresco.service.cmr.avm.deploy.DeploymentService; import org.alfresco.service.cmr.avm.deploy.DeploymentService;
import org.alfresco.service.cmr.avmsync.AVMDifference; import org.alfresco.service.cmr.avmsync.AVMDifference;
@ -612,6 +613,15 @@ public class AVMServiceTest extends AVMServiceTestBase
runQueriesForCreateAndDeploy("target"); runQueriesForCreateAndDeploy("target");
assertEquals(fService.lookup(-1, "main:/a/b/biz").getGuid(), fService.lookup(-1, "target:/a/b/biz").getGuid()); assertEquals(fService.lookup(-1, "main:/a/b/biz").getGuid(), fService.lookup(-1, "target:/a/b/biz").getGuid());
fService.removeNode("main:/a/b/c/foo"); fService.removeNode("main:/a/b/c/foo");
fService.createFile("main:/a/b/c", "Foo").close();
ContentWriter writer = fService.getContentWriter("main:/a/b/c/Foo");
writer.setEncoding("UTF-8");
writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
writer.putContent("I am main:/a/b/c/foo");
report = depService.deployDifference(-1, "main:/a", "localhost", 50500, "admin", "admin", "target:/a", matcher, false, false, false, null);
System.out.println(report);
assertEquals(DeploymentEvent.Type.UPDATED, report.iterator().next().getType());
fService.removeNode("main:/a/b/c/foo");
report = depService.deployDifference(-1, "main:/a", "localhost", 50500, "admin", "admin", "target:/a", matcher, false, true, false, null); report = depService.deployDifference(-1, "main:/a", "localhost", 50500, "admin", "admin", "target:/a", matcher, false, true, false, null);
runQueriesForCreateAndDeploy("target"); runQueriesForCreateAndDeploy("target");
System.out.println(report); System.out.println(report);

View File

@ -695,7 +695,7 @@ public class AVMStoreImpl implements AVMStore, Serializable
private SortedMap<String, AVMNodeDescriptor> private SortedMap<String, AVMNodeDescriptor>
translateListing(Map<String, AVMNode> listing, Lookup lPath) translateListing(Map<String, AVMNode> listing, Lookup lPath)
{ {
SortedMap<String, AVMNodeDescriptor> results = new TreeMap<String, AVMNodeDescriptor>(); SortedMap<String, AVMNodeDescriptor> results = new TreeMap<String, AVMNodeDescriptor>(String.CASE_INSENSITIVE_ORDER);
for (String name : listing.keySet()) for (String name : listing.keySet())
{ {
// TODO consider doing this at a lower level. // TODO consider doing this at a lower level.

View File

@ -778,7 +778,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
return false; return false;
} }
// layer and underlying must match for flattening to be useful. // layer and underlying must match for flattening to be useful.
if (!layer.getIndirection().equals(underlying.getPath())) if (!layer.getIndirection().equalsIgnoreCase(underlying.getPath()))
{ {
return false; return false;
} }

View File

@ -514,7 +514,7 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
public SortedMap<String, AVMNodeDescriptor> getListingDirect(AVMNodeDescriptor dir, boolean includeDeleted) public SortedMap<String, AVMNodeDescriptor> getListingDirect(AVMNodeDescriptor dir, boolean includeDeleted)
{ {
List<ChildEntry> children = AVMDAOs.Instance().fChildEntryDAO.getByParent(this); List<ChildEntry> children = AVMDAOs.Instance().fChildEntryDAO.getByParent(this);
SortedMap<String, AVMNodeDescriptor> listing = new TreeMap<String, AVMNodeDescriptor>(); SortedMap<String, AVMNodeDescriptor> listing = new TreeMap<String, AVMNodeDescriptor>(String.CASE_INSENSITIVE_ORDER);
for (ChildEntry child : children) for (ChildEntry child : children)
{ {
AVMNode childNode = child.getChild(); AVMNode childNode = child.getChild();
@ -551,7 +551,7 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
{ {
throw new AVMBadArgumentException("Illegal null argument."); throw new AVMBadArgumentException("Illegal null argument.");
} }
SortedMap<String, AVMNodeDescriptor> baseListing = new TreeMap<String, AVMNodeDescriptor>(); SortedMap<String, AVMNodeDescriptor> baseListing = new TreeMap<String, AVMNodeDescriptor>(String.CASE_INSENSITIVE_ORDER);
// If we are not opaque, get the underlying base listing. // If we are not opaque, get the underlying base listing.
if (!fOpacity) if (!fOpacity)
{ {

View File

@ -111,7 +111,7 @@ public class LookupKey implements Serializable
return false; return false;
} }
LookupKey o = (LookupKey)obj; LookupKey o = (LookupKey)obj;
return fStoreName.equals(o.fStoreName) && return fStoreName.equalsIgnoreCase(o.fStoreName) &&
fVersion == o.fVersion && fVersion == o.fVersion &&
fPath.equals(o.fPath) && fPath.equals(o.fPath) &&
fWrite == o.fWrite && fWrite == o.fWrite &&
@ -124,7 +124,7 @@ public class LookupKey implements Serializable
@Override @Override
public int hashCode() public int hashCode()
{ {
int hash = fStoreName.hashCode(); int hash = fStoreName.toLowerCase().hashCode();
hash += fPath.hashCode(); hash += fPath.hashCode();
hash += fVersion; hash += fVersion;
hash += fWrite ? 1 : 0; hash += fWrite ? 1 : 0;

View File

@ -163,7 +163,7 @@ class PlainDirectoryNodeImpl extends DirectoryNodeImpl implements PlainDirectory
{ {
throw new AVMBadArgumentException("Path is null."); throw new AVMBadArgumentException("Path is null.");
} }
SortedMap<String, AVMNodeDescriptor> result = new TreeMap<String, AVMNodeDescriptor>(); SortedMap<String, AVMNodeDescriptor> result = new TreeMap<String, AVMNodeDescriptor>(String.CASE_INSENSITIVE_ORDER);
List<ChildEntry> children = AVMDAOs.Instance().fChildEntryDAO.getByParent(this); List<ChildEntry> children = AVMDAOs.Instance().fChildEntryDAO.getByParent(this);
for (ChildEntry child : children) for (ChildEntry child : children)
{ {

View File

@ -80,7 +80,7 @@ public class SimplePath implements Serializable
} }
for (int i = 0; i < fNames.length; i++) for (int i = 0; i < fNames.length; i++)
{ {
if (!fNames[i].equals(o.fNames[i])) if (!fNames[i].equalsIgnoreCase(o.fNames[i]))
{ {
return false; return false;
} }
@ -97,7 +97,7 @@ public class SimplePath implements Serializable
int hash = 0; int hash = 0;
for (String name : fNames) for (String name : fNames)
{ {
hash += name.hashCode(); hash += name.toLowerCase().hashCode();
} }
return hash; return hash;
} }

View File

@ -27,21 +27,33 @@ package org.alfresco.repo.content.metadata;
import java.io.InputStream; import java.io.InputStream;
import java.io.Serializable; import java.io.Serializable;
import java.lang.reflect.Array; import java.lang.reflect.Array;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Properties; import java.util.Properties;
import java.util.Set; import java.util.Set;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
import org.alfresco.service.cmr.repository.ContentIOException; import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader; import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.MimetypeService; import org.alfresco.service.cmr.repository.MimetypeService;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.cmr.repository.datatype.TypeConversionException;
import org.alfresco.service.namespace.InvalidQNameException; import org.alfresco.service.namespace.InvalidQNameException;
import org.alfresco.service.namespace.QName; import org.alfresco.service.namespace.QName;
import org.alfresco.util.ISO8601DateFormat;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -88,15 +100,19 @@ import org.apache.commons.logging.LogFactory;
abstract public class AbstractMappingMetadataExtracter implements MetadataExtracter abstract public class AbstractMappingMetadataExtracter implements MetadataExtracter
{ {
public static final String NAMESPACE_PROPERTY_PREFIX = "namespace.prefix."; public static final String NAMESPACE_PROPERTY_PREFIX = "namespace.prefix.";
private static final String ERR_TYPE_CONVERSION = "metadata.extraction.err.type_conversion";
protected static Log logger = LogFactory.getLog(AbstractMappingMetadataExtracter.class); protected static Log logger = LogFactory.getLog(AbstractMappingMetadataExtracter.class);
private MetadataExtracterRegistry registry; private MetadataExtracterRegistry registry;
private MimetypeService mimetypeService; private MimetypeService mimetypeService;
private DictionaryService dictionaryService;
private boolean initialized; private boolean initialized;
private Set<String> supportedMimetypes; private Set<String> supportedMimetypes;
private OverwritePolicy overwritePolicy; private OverwritePolicy overwritePolicy;
private boolean failOnTypeConversion;
private Set<DateFormat> supportedDateFormats;
private Map<String, Set<QName>> mapping; private Map<String, Set<QName>> mapping;
private boolean inheritDefaultMapping; private boolean inheritDefaultMapping;
@ -124,6 +140,8 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
this.supportedMimetypes = supportedMimetypes; this.supportedMimetypes = supportedMimetypes;
// Set defaults // Set defaults
overwritePolicy = OverwritePolicy.PRAGMATIC; overwritePolicy = OverwritePolicy.PRAGMATIC;
failOnTypeConversion = true;
supportedDateFormats = new HashSet<DateFormat>(0);
mapping = null; // The default will be fetched mapping = null; // The default will be fetched
inheritDefaultMapping = false; // Any overrides are complete inheritDefaultMapping = false; // Any overrides are complete
initialized = false; initialized = false;
@ -157,6 +175,14 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
return mimetypeService; return mimetypeService;
} }
/**
* @param dictionaryService the dictionary service to determine which data conversions are necessary
*/
public void setDictionaryService(DictionaryService dictionaryService)
{
this.dictionaryService = dictionaryService;
}
/** /**
* Set the mimetypes that are supported by the extracter. * Set the mimetypes that are supported by the extracter.
* *
@ -212,6 +238,46 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
this.overwritePolicy = OverwritePolicy.valueOf(overwritePolicyStr); this.overwritePolicy = OverwritePolicy.valueOf(overwritePolicyStr);
} }
/**
* Set whether the extractor should discard metadata that fails to convert to the target type
* defined in the data dictionary model. This is <tt>true</tt> by default i.e. if the data
* extracted is not compatible with the target model then the extraction will fail. If this is
* <tt>false<tt> then any extracted data that fails to convert will be discarded.
*
* @param failOnTypeConversion <tt>false</tt> to discard properties that can't get converted
* to the dictionary-defined type, or <tt>true</tt> (default)
* to fail the extraction if the type doesn't convert
*/
public void setFailOnTypeConversion(boolean failOnTypeConversion)
{
this.failOnTypeConversion = failOnTypeConversion;
}
/**
* Set the date formats, over and above the {@link ISO8601DateFormat ISO8601 format}, that will
* be supported for string to date conversions. The supported syntax is described by the
* {@link http://java.sun.com/j2se/1.5.0/docs/api/java/text/SimpleDateFormat.html SimpleDateFormat Javadocs}.
*
* @param supportedDateFormats a list of supported date formats.
*/
public void setSupportedDateFormats(List<String> supportedDateFormats)
{
this.supportedDateFormats = new HashSet<DateFormat>(5);
for (String dateFormatStr : supportedDateFormats)
{
try
{
DateFormat df = new SimpleDateFormat(dateFormatStr);
this.supportedDateFormats.add(df);
}
catch (Throwable e)
{
// No good
throw new AlfrescoRuntimeException("Unable to set supported date format: " + dateFormatStr, e);
}
}
}
/** /**
* Set if the property mappings augment or override the mapping generically provided by the * Set if the property mappings augment or override the mapping generically provided by the
* extracter implementation. The default is <tt>false</tt>, i.e. any mapping set completely * extracter implementation. The default is <tt>false</tt>, i.e. any mapping set completely
@ -347,6 +413,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
* *
* @see #setMappingProperties(Properties) * @see #setMappingProperties(Properties)
*/ */
@SuppressWarnings("unchecked")
protected Map<String, Set<QName>> readMappingProperties(Properties mappingProperties) protected Map<String, Set<QName>> readMappingProperties(Properties mappingProperties)
{ {
Map<String, String> namespacesByPrefix = new HashMap<String, String>(5); Map<String, String> namespacesByPrefix = new HashMap<String, String>(5);
@ -562,6 +629,8 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
Map<String, Serializable> rawMetadata = extractRaw(reader); Map<String, Serializable> rawMetadata = extractRaw(reader);
// Convert to system properties (standalone) // Convert to system properties (standalone)
Map<QName, Serializable> systemProperties = mapRawToSystem(rawMetadata); Map<QName, Serializable> systemProperties = mapRawToSystem(rawMetadata);
// Convert the properties according to the dictionary types
systemProperties = convertSystemPropertyValues(systemProperties);
// Now use the proper overwrite policy // Now use the proper overwrite policy
changedProperties = overwritePolicy.applyProperties(systemProperties, destination); changedProperties = overwritePolicy.applyProperties(systemProperties, destination);
} }
@ -628,6 +697,131 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
return systemProperties; return systemProperties;
} }
/**
* Converts all values according to their dictionary-defined type. This uses the
* {@link #setFailOnTypeConversion(boolean) failOnTypeConversion flag} to determine how failures
* are handled i.e. if values fail to convert, the process may discard the property.
*
* @param systemProperties the values keyed to system property names
* @return Returns a modified map of properties that have been converted.
*/
@SuppressWarnings("unchecked")
private Map<QName, Serializable> convertSystemPropertyValues(Map<QName, Serializable> systemProperties)
{
Map<QName, Serializable> convertedProperties = new HashMap<QName, Serializable>(systemProperties.size() + 7);
for (Map.Entry<QName, Serializable> entry : systemProperties.entrySet())
{
QName propertyQName = entry.getKey();
Serializable propertyValue = entry.getValue();
// Get the property definition
PropertyDefinition propertyDef = (dictionaryService == null) ? null : dictionaryService.getProperty(propertyQName);
if (propertyDef == null)
{
// There is nothing in the DD about this so just transfer it
convertedProperties.put(propertyQName, propertyValue);
continue;
}
// It is in the DD, so attempt the conversion
DataTypeDefinition propertyTypeDef = propertyDef.getDataType();
Serializable convertedPropertyValue = null;
try
{
// Attempt to make any date conversions
if (propertyTypeDef.getName().equals(DataTypeDefinition.DATE) || propertyTypeDef.getName().equals(DataTypeDefinition.DATETIME))
{
if (propertyValue instanceof Collection)
{
convertedPropertyValue = (Serializable) makeDates((Collection) propertyValue);
}
else if (propertyValue instanceof String)
{
convertedPropertyValue = makeDate((String) propertyValue);
}
}
else
{
if (propertyValue instanceof Collection)
{
convertedPropertyValue = (Serializable) DefaultTypeConverter.INSTANCE.convert(
propertyTypeDef,
(Collection) propertyValue);
}
else
{
convertedPropertyValue = (Serializable) DefaultTypeConverter.INSTANCE.convert(
propertyTypeDef,
propertyValue);
}
}
convertedProperties.put(propertyQName, convertedPropertyValue);
}
catch (TypeConversionException e)
{
// Do we just absorb this or is it a problem?
if (failOnTypeConversion)
{
throw AlfrescoRuntimeException.create(
e,
ERR_TYPE_CONVERSION,
this,
propertyQName,
propertyTypeDef.getName(),
propertyValue);
}
}
}
// Done
return convertedProperties;
}
/**
* Convert a collection of date <tt>String</tt> to <tt>Date</tt> objects
*/
private Collection<Date> makeDates(Collection<String> dateStrs)
{
List<Date> dates = new ArrayList<Date>(dateStrs.size());
for (String dateStr : dateStrs)
{
Date date = makeDate(dateStr);
dates.add(date);
}
return dates;
}
/**
* Convert a date <tt>String</tt> to a <tt>Date</tt> object
*/
private Date makeDate(String dateStr)
{
Date date = null;
try
{
date = DefaultTypeConverter.INSTANCE.convert(Date.class, dateStr);
}
catch (TypeConversionException e)
{
// Try one of the other formats
for (DateFormat df : this.supportedDateFormats)
{
try
{
date = df.parse(dateStr);
}
catch (ParseException ee)
{
// Didn't work
}
}
if (date == null)
{
// Still no luck
throw new TypeConversionException("Unable to convert string to date: " + dateStr);
}
}
return date;
}
/** /**
* Adds a value to the map if it is non-trivial. A value is trivial if * Adds a value to the map if it is non-trivial. A value is trivial if
* <ul> * <ul>
@ -646,6 +840,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
* @param destination the map to put values into * @param destination the map to put values into
* @return Returns <tt>true</tt> if set, otherwise <tt>false</tt> * @return Returns <tt>true</tt> if set, otherwise <tt>false</tt>
*/ */
@SuppressWarnings("unchecked")
protected boolean putRawValue(String key, Serializable value, Map<String, Serializable> destination) protected boolean putRawValue(String key, Serializable value, Map<String, Serializable> destination)
{ {
if (value == null) if (value == null)

View File

@ -36,7 +36,9 @@ import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.MimetypeMap; import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.content.filestore.FileContentReader; import org.alfresco.repo.content.filestore.FileContentReader;
import org.alfresco.repo.content.transform.AbstractContentTransformerTest; import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.ContentReader; import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.namespace.QName; import org.alfresco.service.namespace.QName;
import org.alfresco.util.ApplicationContextHelper; import org.alfresco.util.ApplicationContextHelper;
import org.alfresco.util.TempFileProvider; import org.alfresco.util.TempFileProvider;
@ -57,6 +59,7 @@ public abstract class AbstractMetadataExtracterTest extends TestCase
protected static final String QUICK_CREATOR = "Nevin Nollop"; protected static final String QUICK_CREATOR = "Nevin Nollop";
protected MimetypeMap mimetypeMap; protected MimetypeMap mimetypeMap;
protected DictionaryService dictionaryService;
protected abstract MetadataExtracter getExtracter(); protected abstract MetadataExtracter getExtracter();
@ -67,6 +70,7 @@ public abstract class AbstractMetadataExtracterTest extends TestCase
public void setUp() throws Exception public void setUp() throws Exception
{ {
this.mimetypeMap = (MimetypeMap) ctx.getBean("mimetypeService"); this.mimetypeMap = (MimetypeMap) ctx.getBean("mimetypeService");
this.dictionaryService = (DictionaryService) ctx.getBean("dictionaryService");
// perform a little cleaning up // perform a little cleaning up
long now = System.currentTimeMillis(); long now = System.currentTimeMillis();
@ -123,9 +127,11 @@ public abstract class AbstractMetadataExtracterTest extends TestCase
{ {
assertEquals( assertEquals(
"Property " + ContentModel.PROP_TITLE + " not found for mimetype " + mimetype, "Property " + ContentModel.PROP_TITLE + " not found for mimetype " + mimetype,
QUICK_TITLE, properties.get(ContentModel.PROP_TITLE)); QUICK_TITLE,
DefaultTypeConverter.INSTANCE.convert(String.class, properties.get(ContentModel.PROP_TITLE)));
assertEquals( assertEquals(
"Property " + ContentModel.PROP_DESCRIPTION + " not found for mimetype " + mimetype, "Property " + ContentModel.PROP_DESCRIPTION + " not found for mimetype " + mimetype,
QUICK_DESCRIPTION, properties.get(ContentModel.PROP_DESCRIPTION)); QUICK_DESCRIPTION,
DefaultTypeConverter.INSTANCE.convert(String.class, properties.get(ContentModel.PROP_DESCRIPTION)));
} }
} }

View File

@ -38,6 +38,7 @@ public class HtmlMetadataExtracterTest extends AbstractMetadataExtracterTest
{ {
super.setUp(); super.setUp();
extracter = new HtmlMetadataExtracter(); extracter = new HtmlMetadataExtracter();
extracter.setDictionaryService(dictionaryService);
extracter.register(); extracter.register();
} }

View File

@ -15,6 +15,7 @@ public class OfficeMetadataExtracterTest extends AbstractMetadataExtracterTest
{ {
super.setUp(); super.setUp();
extracter = new OfficeMetadataExtracter(); extracter = new OfficeMetadataExtracter();
extracter.setDictionaryService(dictionaryService);
extracter.register(); extracter.register();
} }

View File

@ -15,6 +15,7 @@ public class OpenDocumentMetadataExtracterTest extends AbstractMetadataExtracter
{ {
super.setUp(); super.setUp();
extracter = new OpenDocumentMetadataExtracter(); extracter = new OpenDocumentMetadataExtracter();
extracter.setDictionaryService(dictionaryService);
extracter.register(); extracter.register();
} }

View File

@ -23,8 +23,6 @@
package org.alfresco.repo.content.metadata; package org.alfresco.repo.content.metadata;
import net.sf.jooreports.openoffice.connection.OpenOfficeConnection; import net.sf.jooreports.openoffice.connection.OpenOfficeConnection;
import net.sf.jooreports.openoffice.connection.SocketOpenOfficeConnection;
/** /**
* @author Jesper Steen Møller * @author Jesper Steen Møller
@ -42,6 +40,7 @@ public class OpenOfficeMetadataExtracterTest extends AbstractMetadataExtracterTe
extracter = new OpenOfficeMetadataExtracter(); extracter = new OpenOfficeMetadataExtracter();
extracter.setMimetypeService(mimetypeMap); extracter.setMimetypeService(mimetypeMap);
extracter.setDictionaryService(dictionaryService);
extracter.setConnection(connection); extracter.setConnection(connection);
extracter.init(); extracter.init();
} }

View File

@ -16,6 +16,7 @@ public class PdfBoxMetadataExtracterTest extends AbstractMetadataExtracterTest
{ {
super.setUp(); super.setUp();
extracter = new PdfBoxMetadataExtracter(); extracter = new PdfBoxMetadataExtracter();
extracter.setDictionaryService(dictionaryService);
extracter.register(); extracter.register();
} }

View File

@ -70,6 +70,8 @@ import org.alfresco.service.cmr.security.AuthenticationService;
import org.alfresco.service.namespace.QName; import org.alfresco.service.namespace.QName;
import org.alfresco.util.NameMatcher; import org.alfresco.util.NameMatcher;
import org.alfresco.util.Pair; import org.alfresco.util.Pair;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.remoting.rmi.RmiProxyFactoryBean; import org.springframework.remoting.rmi.RmiProxyFactoryBean;
/** /**
@ -78,6 +80,8 @@ import org.springframework.remoting.rmi.RmiProxyFactoryBean;
*/ */
public class DeploymentServiceImpl implements DeploymentService public class DeploymentServiceImpl implements DeploymentService
{ {
private static Log fgLogger = LogFactory.getLog(DeploymentServiceImpl.class);
/** /**
* Class to hold Deployment destination information. * Class to hold Deployment destination information.
* Used as a lock to serialize deployments to the same * Used as a lock to serialize deployments to the same
@ -122,6 +126,11 @@ public class DeploymentServiceImpl implements DeploymentService
{ {
return fHost.hashCode() + fPort; return fHost.hashCode() + fPort;
} }
public String toString()
{
return fHost;
}
}; };
/** /**
@ -169,6 +178,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentDestination dest = getLock(hostName, port); DeploymentDestination dest = getLock(hostName, port);
synchronized (dest) synchronized (dest)
{ {
if (fgLogger.isDebugEnabled())
{
fgLogger.debug("Deploying to Remote Alfresco at " + dest);
}
try try
{ {
DeploymentReport report = new DeploymentReport(); DeploymentReport report = new DeploymentReport();
@ -178,6 +191,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.START, DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.START,
new Pair<Integer, String>(version, srcPath), new Pair<Integer, String>(version, srcPath),
dstPath); dstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
for (DeploymentCallback callback : callbacks) for (DeploymentCallback callback : callbacks)
{ {
callback.eventOccurred(event); callback.eventOccurred(event);
@ -229,6 +246,10 @@ public class DeploymentServiceImpl implements DeploymentService
new DeploymentEvent(DeploymentEvent.Type.COPIED, new DeploymentEvent(DeploymentEvent.Type.COPIED,
new Pair<Integer, String>(version, srcPath), new Pair<Integer, String>(version, srcPath),
dstPath); dstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
report.add(event); report.add(event);
if (callbacks != null) if (callbacks != null)
{ {
@ -248,6 +269,10 @@ public class DeploymentServiceImpl implements DeploymentService
event = new DeploymentEvent(DeploymentEvent.Type.END, event = new DeploymentEvent(DeploymentEvent.Type.END,
new Pair<Integer, String>(version, srcPath), new Pair<Integer, String>(version, srcPath),
dstPath); dstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
for (DeploymentCallback callback : callbacks) for (DeploymentCallback callback : callbacks)
{ {
callback.eventOccurred(event); callback.eventOccurred(event);
@ -269,6 +294,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.END, DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.END,
new Pair<Integer, String>(version, srcPath), new Pair<Integer, String>(version, srcPath),
dstPath); dstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
for (DeploymentCallback callback : callbacks) for (DeploymentCallback callback : callbacks)
{ {
callback.eventOccurred(event); callback.eventOccurred(event);
@ -381,6 +410,10 @@ public class DeploymentServiceImpl implements DeploymentService
new DeploymentEvent(DeploymentEvent.Type.DELETED, new DeploymentEvent(DeploymentEvent.Type.DELETED,
source, source,
destination); destination);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
report.add(event); report.add(event);
if (callbacks != null) if (callbacks != null)
{ {
@ -428,6 +461,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED, DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
source, source,
destination); destination);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
report.add(event); report.add(event);
if (callbacks != null) if (callbacks != null)
{ {
@ -449,6 +486,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED, DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
source, source,
destination); destination);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
report.add(event); report.add(event);
if (callbacks != null) if (callbacks != null)
{ {
@ -482,6 +523,10 @@ public class DeploymentServiceImpl implements DeploymentService
String destination = dst.getPath(); String destination = dst.getPath();
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED, DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
source, destination); source, destination);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
report.add(event); report.add(event);
if (callbacks != null) if (callbacks != null)
{ {
@ -512,6 +557,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.UPDATED, DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.UPDATED,
source, source,
destination); destination);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
report.add(event); report.add(event);
if (callbacks != null) if (callbacks != null)
{ {
@ -536,6 +585,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.UPDATED, DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.UPDATED,
source, source,
destination); destination);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
report.add(event); report.add(event);
if (callbacks != null) if (callbacks != null)
{ {
@ -798,6 +851,10 @@ public class DeploymentServiceImpl implements DeploymentService
boolean dontDelete, boolean dontDo, boolean dontDelete, boolean dontDo,
List<DeploymentCallback> callbacks) List<DeploymentCallback> callbacks)
{ {
if (fgLogger.isDebugEnabled())
{
fgLogger.debug("Deploying To FileSystem Reciever on " + hostName + " to target " + target);
}
DeploymentReport report = new DeploymentReport(); DeploymentReport report = new DeploymentReport();
DeploymentReceiverService service = null; DeploymentReceiverService service = null;
String ticket = null; String ticket = null;
@ -807,6 +864,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.START, DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.START,
new Pair<Integer, String>(version, srcPath), new Pair<Integer, String>(version, srcPath),
target); target);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
if (callbacks != null) if (callbacks != null)
{ {
for (DeploymentCallback callback : callbacks) for (DeploymentCallback callback : callbacks)
@ -893,6 +954,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.DELETED, DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.DELETED,
new Pair<Integer, String>(version, extendPath(srcPath, dst.getName())), new Pair<Integer, String>(version, extendPath(srcPath, dst.getName())),
newDstPath); newDstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
if (callbacks != null) if (callbacks != null)
{ {
for (DeploymentCallback callback : callbacks) for (DeploymentCallback callback : callbacks)
@ -915,7 +980,7 @@ public class DeploymentServiceImpl implements DeploymentService
src = null; src = null;
continue; continue;
} }
int diff = src.getName().compareTo(dst.getName()); int diff = src.getName().compareToIgnoreCase(dst.getName());
if (diff < 0) if (diff < 0)
{ {
if (!excluded(matcher, src.getPath(), null)) if (!excluded(matcher, src.getPath(), null))
@ -973,6 +1038,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.DELETED, DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.DELETED,
new Pair<Integer, String>(version, extendPath(srcPath, dst.getName())), new Pair<Integer, String>(version, extendPath(srcPath, dst.getName())),
newDstPath); newDstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
if (callbacks != null) if (callbacks != null)
{ {
for (DeploymentCallback callback : callbacks) for (DeploymentCallback callback : callbacks)
@ -1008,6 +1077,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED, DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
new Pair<Integer, String>(version, src.getPath()), new Pair<Integer, String>(version, src.getPath()),
dstPath); dstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
if (callbacks != null) if (callbacks != null)
{ {
for (DeploymentCallback callback : callbacks) for (DeploymentCallback callback : callbacks)
@ -1049,6 +1122,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED, DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
new Pair<Integer, String>(version, src.getPath()), new Pair<Integer, String>(version, src.getPath()),
dstPath); dstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
if (callbacks != null) if (callbacks != null)
{ {
for (DeploymentCallback callback : callbacks) for (DeploymentCallback callback : callbacks)

View File

@ -480,6 +480,11 @@ public interface NodeService
/** /**
* Get the node with the given name within the context of the parent node. The name * Get the node with the given name within the context of the parent node. The name
* is case-insensitive as Alfresco has to support case-insensitive clients as standard. * is case-insensitive as Alfresco has to support case-insensitive clients as standard.
* <p>
* That API method getChildByName only works for associations that don't allow duplicate child names.
* See <b>cm:folder</b> and the <b>duplicate</b> tag. Child associations without this allow duplicate
* child names and therefore it is possible to have multiple children with the same name stored against
* the given association type.
* *
* @param nodeRef the parent node - usuall a <b>container</b> * @param nodeRef the parent node - usuall a <b>container</b>
* @param assocTypeQName the type of the association * @param assocTypeQName the type of the association