Merged V2.2 to HEAD

8078: Merged V2.1 to V2.2
      8025: Fixes WCM-1039, problems with case insensitive name handling.
   8079: Merged V2.1 to V2.2
      8035: -- DONE SEPARATELY --
      8040: Fix AR-1985: SQL Server dialect is derived from Sybase dialect there need additional no-op script
      8046: Better Javadocs for getChildByName()
      8056: Fixed WCM-790: Date conversion for metadata extractors
      8057: Fixed WCM-790: Properties that don't convert can be discarded (default is to fail)
      8059: -- DONE SEPARATELY --
      8061: Fixes WCM-790: Fallout from CHK-2168 and CHK-2169
   8081: Fix for WCM-1018
   8082: Merged V2.1 to V2.2
      8016: Merged V2.1-A to V2.1
         8000: Additional indexes for AVM
         8013: Patch to introduce reverse indexes required for AVM


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@8474 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley
2008-03-10 12:17:10 +00:00
parent f22c4c4e57
commit 72a90a14f1
22 changed files with 428 additions and 67 deletions

View File

@@ -68,6 +68,7 @@ import org.alfresco.service.cmr.avm.AVMService;
import org.alfresco.service.cmr.avm.AVMStoreDescriptor;
import org.alfresco.service.cmr.avm.LayeringDescriptor;
import org.alfresco.service.cmr.avm.VersionDescriptor;
import org.alfresco.service.cmr.avm.deploy.DeploymentEvent;
import org.alfresco.service.cmr.avm.deploy.DeploymentReport;
import org.alfresco.service.cmr.avm.deploy.DeploymentService;
import org.alfresco.service.cmr.avmsync.AVMDifference;
@@ -96,7 +97,7 @@ import org.alfresco.util.Pair;
/**
* Big test of AVM behavior.
*
*
* @author britt
*/
public class AVMServiceTest extends AVMServiceTestBase
@@ -612,6 +613,15 @@ public class AVMServiceTest extends AVMServiceTestBase
runQueriesForCreateAndDeploy("target");
assertEquals(fService.lookup(-1, "main:/a/b/biz").getGuid(), fService.lookup(-1, "target:/a/b/biz").getGuid());
fService.removeNode("main:/a/b/c/foo");
fService.createFile("main:/a/b/c", "Foo").close();
ContentWriter writer = fService.getContentWriter("main:/a/b/c/Foo");
writer.setEncoding("UTF-8");
writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
writer.putContent("I am main:/a/b/c/foo");
report = depService.deployDifference(-1, "main:/a", "localhost", 50500, "admin", "admin", "target:/a", matcher, false, false, false, null);
System.out.println(report);
assertEquals(DeploymentEvent.Type.UPDATED, report.iterator().next().getType());
fService.removeNode("main:/a/b/c/foo");
report = depService.deployDifference(-1, "main:/a", "localhost", 50500, "admin", "admin", "target:/a", matcher, false, true, false, null);
runQueriesForCreateAndDeploy("target");
System.out.println(report);
@@ -5531,7 +5541,7 @@ public class AVMServiceTest extends AVMServiceTestBase
/**
* Test async indexing.
*
*
* @throws Exception
*/
public void testAsyncIndex() throws Exception

View File

@@ -695,7 +695,7 @@ public class AVMStoreImpl implements AVMStore, Serializable
private SortedMap<String, AVMNodeDescriptor>
translateListing(Map<String, AVMNode> listing, Lookup lPath)
{
SortedMap<String, AVMNodeDescriptor> results = new TreeMap<String, AVMNodeDescriptor>();
SortedMap<String, AVMNodeDescriptor> results = new TreeMap<String, AVMNodeDescriptor>(String.CASE_INSENSITIVE_ORDER);
for (String name : listing.keySet())
{
// TODO consider doing this at a lower level.

View File

@@ -778,7 +778,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
return false;
}
// layer and underlying must match for flattening to be useful.
if (!layer.getIndirection().equals(underlying.getPath()))
if (!layer.getIndirection().equalsIgnoreCase(underlying.getPath()))
{
return false;
}

View File

@@ -514,7 +514,7 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
public SortedMap<String, AVMNodeDescriptor> getListingDirect(AVMNodeDescriptor dir, boolean includeDeleted)
{
List<ChildEntry> children = AVMDAOs.Instance().fChildEntryDAO.getByParent(this);
SortedMap<String, AVMNodeDescriptor> listing = new TreeMap<String, AVMNodeDescriptor>();
SortedMap<String, AVMNodeDescriptor> listing = new TreeMap<String, AVMNodeDescriptor>(String.CASE_INSENSITIVE_ORDER);
for (ChildEntry child : children)
{
AVMNode childNode = child.getChild();
@@ -551,7 +551,7 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
{
throw new AVMBadArgumentException("Illegal null argument.");
}
SortedMap<String, AVMNodeDescriptor> baseListing = new TreeMap<String, AVMNodeDescriptor>();
SortedMap<String, AVMNodeDescriptor> baseListing = new TreeMap<String, AVMNodeDescriptor>(String.CASE_INSENSITIVE_ORDER);
// If we are not opaque, get the underlying base listing.
if (!fOpacity)
{

View File

@@ -1,5 +1,5 @@
/**
*
*
*/
package org.alfresco.repo.avm;
@@ -19,22 +19,22 @@ public class LookupKey implements Serializable
* The name of the store.
*/
private String fStoreName;
/**
* The path being looked up.
*/
private SimplePath fPath;
/**
* The version being looked up.
*/
private int fVersion;
/**
* Whether the lookup is a write lookup.
*/
private boolean fWrite;
/**
* Whether the lookup includes deleted nodes.
*/
@@ -48,7 +48,7 @@ public class LookupKey implements Serializable
* @param write Whether this is a write lookup.
* @param includeDeleted Whether this lookup should include deleted items.
*/
public LookupKey(int version,
public LookupKey(int version,
SimplePath path,
String storeName,
boolean write,
@@ -60,7 +60,7 @@ public class LookupKey implements Serializable
fWrite = write;
fIncludeDeleted = includeDeleted;
}
public LookupKey(LookupKey other)
{
fVersion = other.fVersion;
@@ -69,7 +69,7 @@ public class LookupKey implements Serializable
fWrite = other.fWrite;
fIncludeDeleted = other.fIncludeDeleted;
}
/**
* Set the writeness of this key.
*/
@@ -86,7 +86,7 @@ public class LookupKey implements Serializable
{
return fStoreName;
}
/**
* Is this a write lookup.
* @return Whether this is a write lookup.
@@ -95,12 +95,12 @@ public class LookupKey implements Serializable
{
return fWrite;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj)
public boolean equals(Object obj)
{
if (this == obj)
{
@@ -111,7 +111,7 @@ public class LookupKey implements Serializable
return false;
}
LookupKey o = (LookupKey)obj;
return fStoreName.equals(o.fStoreName) &&
return fStoreName.equalsIgnoreCase(o.fStoreName) &&
fVersion == o.fVersion &&
fPath.equals(o.fPath) &&
fWrite == o.fWrite &&
@@ -122,9 +122,9 @@ public class LookupKey implements Serializable
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode()
public int hashCode()
{
int hash = fStoreName.hashCode();
int hash = fStoreName.toLowerCase().hashCode();
hash += fPath.hashCode();
hash += fVersion;
hash += fWrite ? 1 : 0;
@@ -136,8 +136,8 @@ public class LookupKey implements Serializable
* @see java.lang.Object#toString()
*/
@Override
public String toString()
public String toString()
{
return fStoreName + ":" + fPath + "-" + fVersion + "-" + fWrite + "-" + fIncludeDeleted;
}
}
}

View File

@@ -163,7 +163,7 @@ class PlainDirectoryNodeImpl extends DirectoryNodeImpl implements PlainDirectory
{
throw new AVMBadArgumentException("Path is null.");
}
SortedMap<String, AVMNodeDescriptor> result = new TreeMap<String, AVMNodeDescriptor>();
SortedMap<String, AVMNodeDescriptor> result = new TreeMap<String, AVMNodeDescriptor>(String.CASE_INSENSITIVE_ORDER);
List<ChildEntry> children = AVMDAOs.Instance().fChildEntryDAO.getByParent(this);
for (ChildEntry child : children)
{

View File

@@ -1,5 +1,5 @@
/**
*
*
*/
package org.alfresco.repo.avm.util;
@@ -9,7 +9,7 @@ import java.io.Serializable;
* Holds a simple path.
* @author britt
*/
public class SimplePath implements Serializable
public class SimplePath implements Serializable
{
private static final long serialVersionUID = 2696828491008988470L;
@@ -17,7 +17,7 @@ public class SimplePath implements Serializable
* The names of the path's components.
*/
private String [] fNames;
/**
* Construct a new one from a string.
* @param path The String representation of the path.
@@ -39,7 +39,7 @@ public class SimplePath implements Serializable
}
fNames = path.split("/+");
}
/**
* Get the component name at index.
* @param index The index of the component to get.
@@ -49,7 +49,7 @@ public class SimplePath implements Serializable
{
return fNames[index];
}
/**
* Get the number of components in this path.
* @return The number of components.
@@ -63,7 +63,7 @@ public class SimplePath implements Serializable
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj)
public boolean equals(Object obj)
{
if (this == obj)
{
@@ -80,7 +80,7 @@ public class SimplePath implements Serializable
}
for (int i = 0; i < fNames.length; i++)
{
if (!fNames[i].equals(o.fNames[i]))
if (!fNames[i].equalsIgnoreCase(o.fNames[i]))
{
return false;
}
@@ -92,12 +92,12 @@ public class SimplePath implements Serializable
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode()
public int hashCode()
{
int hash = 0;
for (String name : fNames)
{
hash += name.hashCode();
hash += name.toLowerCase().hashCode();
}
return hash;
}
@@ -106,7 +106,7 @@ public class SimplePath implements Serializable
* @see java.lang.Object#toString()
*/
@Override
public String toString()
public String toString()
{
StringBuilder builder = new StringBuilder();
for (String name : fNames)

View File

@@ -27,21 +27,33 @@ package org.alfresco.repo.content.metadata;
import java.io.InputStream;
import java.io.Serializable;
import java.lang.reflect.Array;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.MimetypeService;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.cmr.repository.datatype.TypeConversionException;
import org.alfresco.service.namespace.InvalidQNameException;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.ISO8601DateFormat;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -88,15 +100,19 @@ import org.apache.commons.logging.LogFactory;
abstract public class AbstractMappingMetadataExtracter implements MetadataExtracter
{
public static final String NAMESPACE_PROPERTY_PREFIX = "namespace.prefix.";
private static final String ERR_TYPE_CONVERSION = "metadata.extraction.err.type_conversion";
protected static Log logger = LogFactory.getLog(AbstractMappingMetadataExtracter.class);
private MetadataExtracterRegistry registry;
private MimetypeService mimetypeService;
private DictionaryService dictionaryService;
private boolean initialized;
private Set<String> supportedMimetypes;
private OverwritePolicy overwritePolicy;
private boolean failOnTypeConversion;
private Set<DateFormat> supportedDateFormats;
private Map<String, Set<QName>> mapping;
private boolean inheritDefaultMapping;
@@ -124,6 +140,8 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
this.supportedMimetypes = supportedMimetypes;
// Set defaults
overwritePolicy = OverwritePolicy.PRAGMATIC;
failOnTypeConversion = true;
supportedDateFormats = new HashSet<DateFormat>(0);
mapping = null; // The default will be fetched
inheritDefaultMapping = false; // Any overrides are complete
initialized = false;
@@ -156,7 +174,15 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
{
return mimetypeService;
}
/**
* @param dictionaryService the dictionary service to determine which data conversions are necessary
*/
public void setDictionaryService(DictionaryService dictionaryService)
{
this.dictionaryService = dictionaryService;
}
/**
* Set the mimetypes that are supported by the extracter.
*
@@ -212,6 +238,46 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
this.overwritePolicy = OverwritePolicy.valueOf(overwritePolicyStr);
}
/**
* Set whether the extractor should discard metadata that fails to convert to the target type
* defined in the data dictionary model. This is <tt>true</tt> by default i.e. if the data
* extracted is not compatible with the target model then the extraction will fail. If this is
* <tt>false<tt> then any extracted data that fails to convert will be discarded.
*
* @param failOnTypeConversion <tt>false</tt> to discard properties that can't get converted
* to the dictionary-defined type, or <tt>true</tt> (default)
* to fail the extraction if the type doesn't convert
*/
public void setFailOnTypeConversion(boolean failOnTypeConversion)
{
this.failOnTypeConversion = failOnTypeConversion;
}
/**
* Set the date formats, over and above the {@link ISO8601DateFormat ISO8601 format}, that will
* be supported for string to date conversions. The supported syntax is described by the
* {@link http://java.sun.com/j2se/1.5.0/docs/api/java/text/SimpleDateFormat.html SimpleDateFormat Javadocs}.
*
* @param supportedDateFormats a list of supported date formats.
*/
public void setSupportedDateFormats(List<String> supportedDateFormats)
{
this.supportedDateFormats = new HashSet<DateFormat>(5);
for (String dateFormatStr : supportedDateFormats)
{
try
{
DateFormat df = new SimpleDateFormat(dateFormatStr);
this.supportedDateFormats.add(df);
}
catch (Throwable e)
{
// No good
throw new AlfrescoRuntimeException("Unable to set supported date format: " + dateFormatStr, e);
}
}
}
/**
* Set if the property mappings augment or override the mapping generically provided by the
* extracter implementation. The default is <tt>false</tt>, i.e. any mapping set completely
@@ -347,6 +413,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
*
* @see #setMappingProperties(Properties)
*/
@SuppressWarnings("unchecked")
protected Map<String, Set<QName>> readMappingProperties(Properties mappingProperties)
{
Map<String, String> namespacesByPrefix = new HashMap<String, String>(5);
@@ -562,6 +629,8 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
Map<String, Serializable> rawMetadata = extractRaw(reader);
// Convert to system properties (standalone)
Map<QName, Serializable> systemProperties = mapRawToSystem(rawMetadata);
// Convert the properties according to the dictionary types
systemProperties = convertSystemPropertyValues(systemProperties);
// Now use the proper overwrite policy
changedProperties = overwritePolicy.applyProperties(systemProperties, destination);
}
@@ -628,6 +697,131 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
return systemProperties;
}
/**
* Converts all values according to their dictionary-defined type. This uses the
* {@link #setFailOnTypeConversion(boolean) failOnTypeConversion flag} to determine how failures
* are handled i.e. if values fail to convert, the process may discard the property.
*
* @param systemProperties the values keyed to system property names
* @return Returns a modified map of properties that have been converted.
*/
@SuppressWarnings("unchecked")
private Map<QName, Serializable> convertSystemPropertyValues(Map<QName, Serializable> systemProperties)
{
Map<QName, Serializable> convertedProperties = new HashMap<QName, Serializable>(systemProperties.size() + 7);
for (Map.Entry<QName, Serializable> entry : systemProperties.entrySet())
{
QName propertyQName = entry.getKey();
Serializable propertyValue = entry.getValue();
// Get the property definition
PropertyDefinition propertyDef = (dictionaryService == null) ? null : dictionaryService.getProperty(propertyQName);
if (propertyDef == null)
{
// There is nothing in the DD about this so just transfer it
convertedProperties.put(propertyQName, propertyValue);
continue;
}
// It is in the DD, so attempt the conversion
DataTypeDefinition propertyTypeDef = propertyDef.getDataType();
Serializable convertedPropertyValue = null;
try
{
// Attempt to make any date conversions
if (propertyTypeDef.getName().equals(DataTypeDefinition.DATE) || propertyTypeDef.getName().equals(DataTypeDefinition.DATETIME))
{
if (propertyValue instanceof Collection)
{
convertedPropertyValue = (Serializable) makeDates((Collection) propertyValue);
}
else if (propertyValue instanceof String)
{
convertedPropertyValue = makeDate((String) propertyValue);
}
}
else
{
if (propertyValue instanceof Collection)
{
convertedPropertyValue = (Serializable) DefaultTypeConverter.INSTANCE.convert(
propertyTypeDef,
(Collection) propertyValue);
}
else
{
convertedPropertyValue = (Serializable) DefaultTypeConverter.INSTANCE.convert(
propertyTypeDef,
propertyValue);
}
}
convertedProperties.put(propertyQName, convertedPropertyValue);
}
catch (TypeConversionException e)
{
// Do we just absorb this or is it a problem?
if (failOnTypeConversion)
{
throw AlfrescoRuntimeException.create(
e,
ERR_TYPE_CONVERSION,
this,
propertyQName,
propertyTypeDef.getName(),
propertyValue);
}
}
}
// Done
return convertedProperties;
}
/**
* Convert a collection of date <tt>String</tt> to <tt>Date</tt> objects
*/
private Collection<Date> makeDates(Collection<String> dateStrs)
{
List<Date> dates = new ArrayList<Date>(dateStrs.size());
for (String dateStr : dateStrs)
{
Date date = makeDate(dateStr);
dates.add(date);
}
return dates;
}
/**
* Convert a date <tt>String</tt> to a <tt>Date</tt> object
*/
private Date makeDate(String dateStr)
{
Date date = null;
try
{
date = DefaultTypeConverter.INSTANCE.convert(Date.class, dateStr);
}
catch (TypeConversionException e)
{
// Try one of the other formats
for (DateFormat df : this.supportedDateFormats)
{
try
{
date = df.parse(dateStr);
}
catch (ParseException ee)
{
// Didn't work
}
}
if (date == null)
{
// Still no luck
throw new TypeConversionException("Unable to convert string to date: " + dateStr);
}
}
return date;
}
/**
* Adds a value to the map if it is non-trivial. A value is trivial if
* <ul>
@@ -646,6 +840,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
* @param destination the map to put values into
* @return Returns <tt>true</tt> if set, otherwise <tt>false</tt>
*/
@SuppressWarnings("unchecked")
protected boolean putRawValue(String key, Serializable value, Map<String, Serializable> destination)
{
if (value == null)

View File

@@ -36,7 +36,9 @@ import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.content.filestore.FileContentReader;
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.ApplicationContextHelper;
import org.alfresco.util.TempFileProvider;
@@ -57,6 +59,7 @@ public abstract class AbstractMetadataExtracterTest extends TestCase
protected static final String QUICK_CREATOR = "Nevin Nollop";
protected MimetypeMap mimetypeMap;
protected DictionaryService dictionaryService;
protected abstract MetadataExtracter getExtracter();
@@ -67,12 +70,13 @@ public abstract class AbstractMetadataExtracterTest extends TestCase
public void setUp() throws Exception
{
this.mimetypeMap = (MimetypeMap) ctx.getBean("mimetypeService");
this.dictionaryService = (DictionaryService) ctx.getBean("dictionaryService");
// perform a little cleaning up
long now = System.currentTimeMillis();
TempFileProvider.TempFileCleanerJob.removeFiles(now);
}
/**
* Check that all objects are present
*/
@@ -123,9 +127,11 @@ public abstract class AbstractMetadataExtracterTest extends TestCase
{
assertEquals(
"Property " + ContentModel.PROP_TITLE + " not found for mimetype " + mimetype,
QUICK_TITLE, properties.get(ContentModel.PROP_TITLE));
QUICK_TITLE,
DefaultTypeConverter.INSTANCE.convert(String.class, properties.get(ContentModel.PROP_TITLE)));
assertEquals(
"Property " + ContentModel.PROP_DESCRIPTION + " not found for mimetype " + mimetype,
QUICK_DESCRIPTION, properties.get(ContentModel.PROP_DESCRIPTION));
QUICK_DESCRIPTION,
DefaultTypeConverter.INSTANCE.convert(String.class, properties.get(ContentModel.PROP_DESCRIPTION)));
}
}

View File

@@ -38,6 +38,7 @@ public class HtmlMetadataExtracterTest extends AbstractMetadataExtracterTest
{
super.setUp();
extracter = new HtmlMetadataExtracter();
extracter.setDictionaryService(dictionaryService);
extracter.register();
}

View File

@@ -15,6 +15,7 @@ public class OfficeMetadataExtracterTest extends AbstractMetadataExtracterTest
{
super.setUp();
extracter = new OfficeMetadataExtracter();
extracter.setDictionaryService(dictionaryService);
extracter.register();
}

View File

@@ -15,6 +15,7 @@ public class OpenDocumentMetadataExtracterTest extends AbstractMetadataExtracter
{
super.setUp();
extracter = new OpenDocumentMetadataExtracter();
extracter.setDictionaryService(dictionaryService);
extracter.register();
}

View File

@@ -23,8 +23,6 @@
package org.alfresco.repo.content.metadata;
import net.sf.jooreports.openoffice.connection.OpenOfficeConnection;
import net.sf.jooreports.openoffice.connection.SocketOpenOfficeConnection;
/**
* @author Jesper Steen Møller
@@ -42,6 +40,7 @@ public class OpenOfficeMetadataExtracterTest extends AbstractMetadataExtracterTe
extracter = new OpenOfficeMetadataExtracter();
extracter.setMimetypeService(mimetypeMap);
extracter.setDictionaryService(dictionaryService);
extracter.setConnection(connection);
extracter.init();
}

View File

@@ -16,6 +16,7 @@ public class PdfBoxMetadataExtracterTest extends AbstractMetadataExtracterTest
{
super.setUp();
extracter = new PdfBoxMetadataExtracter();
extracter.setDictionaryService(dictionaryService);
extracter.register();
}

View File

@@ -70,6 +70,8 @@ import org.alfresco.service.cmr.security.AuthenticationService;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.NameMatcher;
import org.alfresco.util.Pair;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.remoting.rmi.RmiProxyFactoryBean;
/**
@@ -78,6 +80,8 @@ import org.springframework.remoting.rmi.RmiProxyFactoryBean;
*/
public class DeploymentServiceImpl implements DeploymentService
{
private static Log fgLogger = LogFactory.getLog(DeploymentServiceImpl.class);
/**
* Class to hold Deployment destination information.
* Used as a lock to serialize deployments to the same
@@ -122,6 +126,11 @@ public class DeploymentServiceImpl implements DeploymentService
{
return fHost.hashCode() + fPort;
}
public String toString()
{
return fHost;
}
};
/**
@@ -169,6 +178,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentDestination dest = getLock(hostName, port);
synchronized (dest)
{
if (fgLogger.isDebugEnabled())
{
fgLogger.debug("Deploying to Remote Alfresco at " + dest);
}
try
{
DeploymentReport report = new DeploymentReport();
@@ -178,6 +191,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.START,
new Pair<Integer, String>(version, srcPath),
dstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
for (DeploymentCallback callback : callbacks)
{
callback.eventOccurred(event);
@@ -229,6 +246,10 @@ public class DeploymentServiceImpl implements DeploymentService
new DeploymentEvent(DeploymentEvent.Type.COPIED,
new Pair<Integer, String>(version, srcPath),
dstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
report.add(event);
if (callbacks != null)
{
@@ -248,6 +269,10 @@ public class DeploymentServiceImpl implements DeploymentService
event = new DeploymentEvent(DeploymentEvent.Type.END,
new Pair<Integer, String>(version, srcPath),
dstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
for (DeploymentCallback callback : callbacks)
{
callback.eventOccurred(event);
@@ -269,6 +294,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.END,
new Pair<Integer, String>(version, srcPath),
dstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
for (DeploymentCallback callback : callbacks)
{
callback.eventOccurred(event);
@@ -381,6 +410,10 @@ public class DeploymentServiceImpl implements DeploymentService
new DeploymentEvent(DeploymentEvent.Type.DELETED,
source,
destination);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
report.add(event);
if (callbacks != null)
{
@@ -428,6 +461,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
source,
destination);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
report.add(event);
if (callbacks != null)
{
@@ -449,6 +486,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
source,
destination);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
report.add(event);
if (callbacks != null)
{
@@ -482,6 +523,10 @@ public class DeploymentServiceImpl implements DeploymentService
String destination = dst.getPath();
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
source, destination);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
report.add(event);
if (callbacks != null)
{
@@ -512,6 +557,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.UPDATED,
source,
destination);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
report.add(event);
if (callbacks != null)
{
@@ -536,6 +585,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.UPDATED,
source,
destination);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
report.add(event);
if (callbacks != null)
{
@@ -798,6 +851,10 @@ public class DeploymentServiceImpl implements DeploymentService
boolean dontDelete, boolean dontDo,
List<DeploymentCallback> callbacks)
{
if (fgLogger.isDebugEnabled())
{
fgLogger.debug("Deploying To FileSystem Reciever on " + hostName + " to target " + target);
}
DeploymentReport report = new DeploymentReport();
DeploymentReceiverService service = null;
String ticket = null;
@@ -807,6 +864,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.START,
new Pair<Integer, String>(version, srcPath),
target);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
if (callbacks != null)
{
for (DeploymentCallback callback : callbacks)
@@ -893,6 +954,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.DELETED,
new Pair<Integer, String>(version, extendPath(srcPath, dst.getName())),
newDstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
if (callbacks != null)
{
for (DeploymentCallback callback : callbacks)
@@ -915,7 +980,7 @@ public class DeploymentServiceImpl implements DeploymentService
src = null;
continue;
}
int diff = src.getName().compareTo(dst.getName());
int diff = src.getName().compareToIgnoreCase(dst.getName());
if (diff < 0)
{
if (!excluded(matcher, src.getPath(), null))
@@ -973,6 +1038,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.DELETED,
new Pair<Integer, String>(version, extendPath(srcPath, dst.getName())),
newDstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
if (callbacks != null)
{
for (DeploymentCallback callback : callbacks)
@@ -1008,6 +1077,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
new Pair<Integer, String>(version, src.getPath()),
dstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
if (callbacks != null)
{
for (DeploymentCallback callback : callbacks)
@@ -1049,6 +1122,10 @@ public class DeploymentServiceImpl implements DeploymentService
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
new Pair<Integer, String>(version, src.getPath()),
dstPath);
if (fgLogger.isDebugEnabled())
{
fgLogger.debug(event);
}
if (callbacks != null)
{
for (DeploymentCallback callback : callbacks)

View File

@@ -480,6 +480,11 @@ public interface NodeService
/**
* Get the node with the given name within the context of the parent node. The name
* is case-insensitive as Alfresco has to support case-insensitive clients as standard.
* <p>
* That API method getChildByName only works for associations that don't allow duplicate child names.
* See <b>cm:folder</b> and the <b>duplicate</b> tag. Child associations without this allow duplicate
* child names and therefore it is possible to have multiple children with the same name stored against
* the given association type.
*
* @param nodeRef the parent node - usuall a <b>container</b>
* @param assocTypeQName the type of the association