mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-06-16 17:55:15 +00:00
Merged V2.2 to HEAD
8078: Merged V2.1 to V2.2 8025: Fixes WCM-1039, problems with case insensitive name handling. 8079: Merged V2.1 to V2.2 8035: -- DONE SEPARATELY -- 8040: Fix AR-1985: SQL Server dialect is derived from Sybase dialect there need additional no-op script 8046: Better Javadocs for getChildByName() 8056: Fixed WCM-790: Date conversion for metadata extractors 8057: Fixed WCM-790: Properties that don't convert can be discarded (default is to fail) 8059: -- DONE SEPARATELY -- 8061: Fixes WCM-790: Fallout from CHK-2168 and CHK-2169 8081: Fix for WCM-1018 8082: Merged V2.1 to V2.2 8016: Merged V2.1-A to V2.1 8000: Additional indexes for AVM 8013: Patch to introduce reverse indexes required for AVM git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@8474 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
parent
f22c4c4e57
commit
72a90a14f1
@ -145,6 +145,9 @@
|
|||||||
<property name="mimetypeService">
|
<property name="mimetypeService">
|
||||||
<ref bean="mimetypeService" />
|
<ref bean="mimetypeService" />
|
||||||
</property>
|
</property>
|
||||||
|
<property name="dictionaryService">
|
||||||
|
<ref bean="dictionaryService" />
|
||||||
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<!-- Content Metadata Extracters -->
|
<!-- Content Metadata Extracters -->
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
|
|
||||||
|
|
||||||
<!-- load common properties -->
|
<!-- load common properties -->
|
||||||
<bean id="repository-properties"
|
<bean id="repository-properties"
|
||||||
class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
|
class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
|
||||||
<property name="ignoreUnresolvablePlaceholders">
|
<property name="ignoreUnresolvablePlaceholders">
|
||||||
<value>true</value>
|
<value>true</value>
|
||||||
@ -24,7 +24,7 @@
|
|||||||
<value>classpath:alfresco/repository.properties</value>
|
<value>classpath:alfresco/repository.properties</value>
|
||||||
<value>classpath:alfresco/version.properties</value>
|
<value>classpath:alfresco/version.properties</value>
|
||||||
<value>classpath:alfresco/domain/transaction.properties</value>
|
<value>classpath:alfresco/domain/transaction.properties</value>
|
||||||
<value>classpath:alfresco/jndi.properties</value>
|
<!-- <value>classpath:alfresco/jndi.properties</value> -->
|
||||||
</list>
|
</list>
|
||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
@ -38,7 +38,7 @@
|
|||||||
<!-- "repository-properties" in a dev-context.xml -->
|
<!-- "repository-properties" in a dev-context.xml -->
|
||||||
<!-- or custom-db-and-data-context.xml file. -->
|
<!-- or custom-db-and-data-context.xml file. -->
|
||||||
|
|
||||||
<bean id="shared-properties"
|
<bean id="shared-properties"
|
||||||
class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
|
class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
|
||||||
<property name="ignoreUnresolvablePlaceholders">
|
<property name="ignoreUnresolvablePlaceholders">
|
||||||
<value>true</value>
|
<value>true</value>
|
||||||
@ -55,17 +55,17 @@
|
|||||||
<!-- Custom MBeanServer -->
|
<!-- Custom MBeanServer -->
|
||||||
<bean id="alfrescoMBeanServer" class="org.springframework.jmx.support.MBeanServerFactoryBean"/>
|
<bean id="alfrescoMBeanServer" class="org.springframework.jmx.support.MBeanServerFactoryBean"/>
|
||||||
|
|
||||||
<bean id="registry"
|
<bean id="registry"
|
||||||
class="org.springframework.remoting.rmi.RmiRegistryFactoryBean"
|
class="org.springframework.remoting.rmi.RmiRegistryFactoryBean"
|
||||||
lazy-init="true">
|
lazy-init="true">
|
||||||
<property name="port" value="${alfresco.rmi.services.port}"/>
|
<property name="port" value="${alfresco.rmi.services.port}"/>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<!-- MBeanServer Connector (registers itself with custom alfrescoMBeanServer) -->
|
<!-- MBeanServer Connector (registers itself with custom alfrescoMBeanServer) -->
|
||||||
<bean id="serverConnector"
|
<bean id="serverConnector"
|
||||||
class="org.springframework.jmx.support.ConnectorServerFactoryBean"
|
class="org.springframework.jmx.support.ConnectorServerFactoryBean"
|
||||||
depends-on="registry"
|
depends-on="registry"
|
||||||
lazy-init="true">
|
lazy-init="true">
|
||||||
|
|
||||||
<property name="server" ref="alfrescoMBeanServer"/>
|
<property name="server" ref="alfrescoMBeanServer"/>
|
||||||
<property name="objectName" value="connector:name=rmi"/>
|
<property name="objectName" value="connector:name=rmi"/>
|
||||||
@ -94,7 +94,7 @@
|
|||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<!-- MBeans registered with alfrescoMBeanServer -->
|
<!-- MBeans registered with alfrescoMBeanServer -->
|
||||||
<bean id="VirtServerRegistry"
|
<bean id="VirtServerRegistry"
|
||||||
class="org.alfresco.mbeans.VirtServerRegistry"
|
class="org.alfresco.mbeans.VirtServerRegistry"
|
||||||
init-method="initialize" >
|
init-method="initialize" >
|
||||||
|
|
||||||
@ -113,10 +113,10 @@
|
|||||||
<!-- the servers run have read access to the password files -->
|
<!-- the servers run have read access to the password files -->
|
||||||
<!-- (e.g.: under Unix, chmod them to "400"). -->
|
<!-- (e.g.: under Unix, chmod them to "400"). -->
|
||||||
|
|
||||||
<property name="passwordFile"
|
<property name="passwordFile"
|
||||||
value="${alfresco.jmx.dir}/alfresco-jmxrmi.password"/>
|
value="${alfresco.jmx.dir}/alfresco-jmxrmi.password"/>
|
||||||
|
|
||||||
<property name="accessFile"
|
<property name="accessFile"
|
||||||
value="${alfresco.jmx.dir}/alfresco-jmxrmi.access"/>
|
value="${alfresco.jmx.dir}/alfresco-jmxrmi.access"/>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
@ -189,7 +189,7 @@
|
|||||||
<value>false</value>
|
<value>false</value>
|
||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<!-- Characterset decoder -->
|
<!-- Characterset decoder -->
|
||||||
<bean id="charset.finder" class="org.alfresco.repo.content.encoding.ContentCharsetFinder">
|
<bean id="charset.finder" class="org.alfresco.repo.content.encoding.ContentCharsetFinder">
|
||||||
<property name="defaultCharset">
|
<property name="defaultCharset">
|
||||||
@ -218,7 +218,7 @@
|
|||||||
<value>${server.transaction.allow-writes}</value>
|
<value>${server.transaction.allow-writes}</value>
|
||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<bean id="retryingTransactionHelper" class="org.alfresco.repo.transaction.RetryingTransactionHelper">
|
<bean id="retryingTransactionHelper" class="org.alfresco.repo.transaction.RetryingTransactionHelper">
|
||||||
<property name="transactionService">
|
<property name="transactionService">
|
||||||
<ref bean="transactionService"/>
|
<ref bean="transactionService"/>
|
||||||
@ -236,7 +236,7 @@
|
|||||||
<value>false</value>
|
<value>false</value>
|
||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<bean id="readOnlyTransactionDefinition" class="org.springframework.transaction.support.DefaultTransactionDefinition">
|
<bean id="readOnlyTransactionDefinition" class="org.springframework.transaction.support.DefaultTransactionDefinition">
|
||||||
<property name="propagationBehaviorName">
|
<property name="propagationBehaviorName">
|
||||||
<value>PROPAGATION_REQUIRED</value>
|
<value>PROPAGATION_REQUIRED</value>
|
||||||
@ -354,7 +354,7 @@
|
|||||||
<ref bean="indexerAndSearcherFactory" />
|
<ref bean="indexerAndSearcherFactory" />
|
||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<bean id="admSearchService" class="org.alfresco.repo.search.SearcherComponent">
|
<bean id="admSearchService" class="org.alfresco.repo.search.SearcherComponent">
|
||||||
<property name="indexerAndSearcherFactory">
|
<property name="indexerAndSearcherFactory">
|
||||||
<ref bean="admLuceneIndexerAndSearcherFactory" />
|
<ref bean="admLuceneIndexerAndSearcherFactory" />
|
||||||
@ -417,7 +417,7 @@
|
|||||||
<value>5</value>
|
<value>5</value>
|
||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
|
|
||||||
<!-- Indexer and searchers for lucene -->
|
<!-- Indexer and searchers for lucene -->
|
||||||
<bean id="admLuceneIndexerAndSearcherFactory"
|
<bean id="admLuceneIndexerAndSearcherFactory"
|
||||||
@ -477,7 +477,7 @@
|
|||||||
<ref bean="indexThreadPoolExecutor"></ref>
|
<ref bean="indexThreadPoolExecutor"></ref>
|
||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<!-- Indexer and searchers for lucene -->
|
<!-- Indexer and searchers for lucene -->
|
||||||
<bean id="avmLuceneIndexerAndSearcherFactory"
|
<bean id="avmLuceneIndexerAndSearcherFactory"
|
||||||
class="org.alfresco.repo.search.impl.lucene.AVMLuceneIndexerAndSearcherFactory">
|
class="org.alfresco.repo.search.impl.lucene.AVMLuceneIndexerAndSearcherFactory">
|
||||||
@ -545,7 +545,7 @@
|
|||||||
<ref bean="indexThreadPoolExecutor"></ref>
|
<ref bean="indexThreadPoolExecutor"></ref>
|
||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
|
|
||||||
<!-- Indexer and searchers for lucene -->
|
<!-- Indexer and searchers for lucene -->
|
||||||
<bean id="luceneCategoryService" class="org.alfresco.repo.search.impl.lucene.LuceneCategoryServiceImpl">
|
<bean id="luceneCategoryService" class="org.alfresco.repo.search.impl.lucene.LuceneCategoryServiceImpl">
|
||||||
@ -666,7 +666,6 @@
|
|||||||
<!-- -->
|
<!-- -->
|
||||||
|
|
||||||
<bean id="namespaceDAO" class="org.alfresco.repo.dictionary.NamespaceDAOImpl">
|
<bean id="namespaceDAO" class="org.alfresco.repo.dictionary.NamespaceDAOImpl">
|
||||||
|
|
||||||
<property name="tenantService">
|
<property name="tenantService">
|
||||||
<ref bean="tenantService"/>
|
<ref bean="tenantService"/>
|
||||||
</property>
|
</property>
|
||||||
@ -676,7 +675,21 @@
|
|||||||
<property name="prefixesCache">
|
<property name="prefixesCache">
|
||||||
<ref bean="prefixesCache"/>
|
<ref bean="prefixesCache"/>
|
||||||
</property>
|
</property>
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
<bean id="dictionaryModelType" class="org.alfresco.repo.dictionary.DictionaryModelType" init-method="init">
|
||||||
|
<property name="dictionaryDAO">
|
||||||
|
<ref bean="dictionaryDAO" />
|
||||||
|
</property>
|
||||||
|
<property name="namespaceDAO">
|
||||||
|
<ref bean="namespaceDAO" />
|
||||||
|
</property>
|
||||||
|
<property name="nodeService">
|
||||||
|
<ref bean="nodeService"/>
|
||||||
|
</property>
|
||||||
|
<property name="contentService">
|
||||||
|
<ref bean="contentService"/>
|
||||||
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<bean id="dictionaryDAO" class="org.alfresco.repo.dictionary.DictionaryDAOImpl">
|
<bean id="dictionaryDAO" class="org.alfresco.repo.dictionary.DictionaryDAOImpl">
|
||||||
@ -731,12 +744,12 @@
|
|||||||
<!-- Content models -->
|
<!-- Content models -->
|
||||||
<value>alfresco/model/applicationModel.xml</value>
|
<value>alfresco/model/applicationModel.xml</value>
|
||||||
<value>alfresco/model/wcmAppModel.xml</value>
|
<value>alfresco/model/wcmAppModel.xml</value>
|
||||||
|
|
||||||
<!-- Implementation models -->
|
<!-- Implementation models -->
|
||||||
<value>org/alfresco/repo/action/actionModel.xml</value>
|
<value>org/alfresco/repo/action/actionModel.xml</value>
|
||||||
<value>org/alfresco/repo/rule/ruleModel.xml</value>
|
<value>org/alfresco/repo/rule/ruleModel.xml</value>
|
||||||
<value>org/alfresco/repo/version/version_model.xml</value>
|
<value>org/alfresco/repo/version/version_model.xml</value>
|
||||||
|
|
||||||
<!-- Email model -->
|
<!-- Email model -->
|
||||||
<value>alfresco/model/emailServerModel.xml</value>
|
<value>alfresco/model/emailServerModel.xml</value>
|
||||||
|
|
||||||
@ -771,6 +784,24 @@
|
|||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
|
<bean id="dictionaryRepositoryBootstrap" class="org.alfresco.repo.dictionary.DictionaryRepositoryBootstrap" init-method="bootstrap">
|
||||||
|
<property name="dictionaryDAO">
|
||||||
|
<ref local="dictionaryDAO"/>
|
||||||
|
</property>
|
||||||
|
<property name="contentService">
|
||||||
|
<ref bean="contentService"/>
|
||||||
|
</property>
|
||||||
|
<property name="searchService">
|
||||||
|
<ref bean="searchService"/>
|
||||||
|
</property>
|
||||||
|
<property name="transactionService">
|
||||||
|
<ref bean="transactionService"/>
|
||||||
|
</property>
|
||||||
|
<property name="authenticationComponent">
|
||||||
|
<ref bean="authenticationComponent"/>
|
||||||
|
</property>
|
||||||
|
</bean>
|
||||||
|
|
||||||
<!-- -->
|
<!-- -->
|
||||||
<!-- Copy Service -->
|
<!-- Copy Service -->
|
||||||
<!-- -->
|
<!-- -->
|
||||||
@ -843,7 +874,7 @@
|
|||||||
<ref bean="LockService" />
|
<ref bean="LockService" />
|
||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<bean id="discussableAspect" class="org.alfresco.repo.forum.DiscussableAspect" init-method="init">
|
<bean id="discussableAspect" class="org.alfresco.repo.forum.DiscussableAspect" init-method="init">
|
||||||
<property name="policyComponent">
|
<property name="policyComponent">
|
||||||
<ref bean="policyComponent" />
|
<ref bean="policyComponent" />
|
||||||
@ -875,13 +906,13 @@
|
|||||||
</props>
|
</props>
|
||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<bean id="luceneFullTextSearchIndexer" class="org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexerImpl">
|
<bean id="luceneFullTextSearchIndexer" class="org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexerImpl">
|
||||||
<property name="indexerAndSearcherFactory">
|
<property name="indexerAndSearcherFactory">
|
||||||
<ref bean="indexerAndSearcherFactory" />
|
<ref bean="indexerAndSearcherFactory" />
|
||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<!-- Bean to backup Lucene indexes -->
|
<!-- Bean to backup Lucene indexes -->
|
||||||
<bean id="luceneIndexBackupComponent"
|
<bean id="luceneIndexBackupComponent"
|
||||||
class="org.alfresco.repo.search.impl.lucene.AbstractLuceneIndexerAndSearcherFactory$LuceneIndexBackupComponent">
|
class="org.alfresco.repo.search.impl.lucene.AbstractLuceneIndexerAndSearcherFactory$LuceneIndexBackupComponent">
|
||||||
@ -904,7 +935,7 @@
|
|||||||
<!-- -->
|
<!-- -->
|
||||||
<!-- Thread Pool -->
|
<!-- Thread Pool -->
|
||||||
<!-- -->
|
<!-- -->
|
||||||
|
|
||||||
<bean id="threadPoolExecutor" class="org.alfresco.util.ThreadPoolExecutorFactoryBean" singleton="true">
|
<bean id="threadPoolExecutor" class="org.alfresco.util.ThreadPoolExecutorFactoryBean" singleton="true">
|
||||||
<property name="corePoolSize">
|
<property name="corePoolSize">
|
||||||
<value>2</value>
|
<value>2</value>
|
||||||
@ -951,7 +982,7 @@
|
|||||||
<ref bean="multilingualContentService" />
|
<ref bean="multilingualContentService" />
|
||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<!-- Empty translation -->
|
<!-- Empty translation -->
|
||||||
<bean id="emptyTranslationAspect" class="org.alfresco.repo.model.ml.EmptyTranslationAspect" init-method="init">
|
<bean id="emptyTranslationAspect" class="org.alfresco.repo.model.ml.EmptyTranslationAspect" init-method="init">
|
||||||
<property name="policyComponent">
|
<property name="policyComponent">
|
||||||
@ -981,7 +1012,7 @@
|
|||||||
<ref bean="policyComponent" />
|
<ref bean="policyComponent" />
|
||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<!-- Temporary Aspect -->
|
<!-- Temporary Aspect -->
|
||||||
<bean id="temporaryAspect" class="org.alfresco.repo.node.TemporaryAspect" init-method="init">
|
<bean id="temporaryAspect" class="org.alfresco.repo.node.TemporaryAspect" init-method="init">
|
||||||
<property name="policyComponent">
|
<property name="policyComponent">
|
||||||
@ -995,7 +1026,7 @@
|
|||||||
<ref bean="nodeService" />
|
<ref bean="nodeService" />
|
||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<!-- Registry service -->
|
<!-- Registry service -->
|
||||||
<bean id="registryService" class="org.alfresco.repo.admin.registry.RegistryServiceImpl" init-method="init">
|
<bean id="registryService" class="org.alfresco.repo.admin.registry.RegistryServiceImpl" init-method="init">
|
||||||
<property name="authenticationComponent">
|
<property name="authenticationComponent">
|
||||||
|
@ -26,3 +26,7 @@ ALTER TABLE avm_aspects_new ADD CONSTRAINT fk_avm_na_qn FOREIGN KEY (qname_id) R
|
|||||||
|
|
||||||
CREATE INDEX fk_avm_np_qn ON avm_node_properties_new (qname_id);
|
CREATE INDEX fk_avm_np_qn ON avm_node_properties_new (qname_id);
|
||||||
ALTER TABLE avm_node_properties_new ADD CONSTRAINT fk_avm_np_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
|
ALTER TABLE avm_node_properties_new ADD CONSTRAINT fk_avm_np_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
|
||||||
|
|
||||||
|
CREATE INDEX idx_avm_hl_revpk ON avm_history_links (descendent, ancestor);
|
||||||
|
|
||||||
|
CREATE INDEX idx_avm_vr_revuq ON avm_version_roots (avm_store_id, version_id);
|
||||||
|
@ -0,0 +1,21 @@
|
|||||||
|
--
|
||||||
|
-- Title: Add text columns that allow null
|
||||||
|
-- Database: SQL Server
|
||||||
|
-- Since: V2.1 Schema 64
|
||||||
|
-- Author: Derek Hulley
|
||||||
|
--
|
||||||
|
-- Please contact support@alfresco.com if you need assistance with the upgrade.
|
||||||
|
--
|
||||||
|
-- This is a Sybase issue, so nothing is required here.
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Record script finish
|
||||||
|
--
|
||||||
|
DELETE FROM alf_applied_patch WHERE id = 'patch.db-V2.1-NotNullColumns';
|
||||||
|
INSERT INTO alf_applied_patch
|
||||||
|
(id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
|
||||||
|
VALUES
|
||||||
|
(
|
||||||
|
'patch.db-V2.1-NotNullColumns', 'Manually executed script upgrade V2.1: Add nullable columns',
|
||||||
|
0, 63, -1, 64, null, 'UNKOWN', 1, 1, 'Script completed'
|
||||||
|
);
|
@ -61,6 +61,10 @@ ALTER TABLE avm_aspects_new ADD CONSTRAINT fk_avm_na_qn FOREIGN KEY (qname_id) R
|
|||||||
CREATE INDEX fk_avm_np_qn ON avm_node_properties_new (qname_id);
|
CREATE INDEX fk_avm_np_qn ON avm_node_properties_new (qname_id);
|
||||||
ALTER TABLE avm_node_properties_new ADD CONSTRAINT fk_avm_np_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
|
ALTER TABLE avm_node_properties_new ADD CONSTRAINT fk_avm_np_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
|
||||||
|
|
||||||
|
CREATE INDEX idx_avm_hl_revpk ON avm_history_links (descendent, ancestor);
|
||||||
|
|
||||||
|
CREATE INDEX idx_avm_vr_revuq ON avm_version_roots (avm_store_id, version_id);
|
||||||
|
|
||||||
--
|
--
|
||||||
-- Record script finish
|
-- Record script finish
|
||||||
--
|
--
|
||||||
|
@ -13,3 +13,5 @@ content.http_reader.err.no_connection=Unable to connect to remote Alfresco serve
|
|||||||
content.http_reader.err.no_authentication=The HTTP reader was unable to authenticate on the remote server: {0} \n
|
content.http_reader.err.no_authentication=The HTTP reader was unable to authenticate on the remote server: {0} \n
|
||||||
content.http_reader.err.check_cluster=Please ensure that 'replicateUpdates' and 'replicateUpdatesViaCopy' is enabled for the cache 'org.alfresco.cache.ticketsCache'. Check that the general cluster configuration is correct and working.
|
content.http_reader.err.check_cluster=Please ensure that 'replicateUpdates' and 'replicateUpdatesViaCopy' is enabled for the cache 'org.alfresco.cache.ticketsCache'. Check that the general cluster configuration is correct and working.
|
||||||
content.http_reader.err.unrecognized=An unrecognized error occured when attempting to download content from remote server:\n Server: {0} \n Content: {1} \n HTTP Response: {2}
|
content.http_reader.err.unrecognized=An unrecognized error occured when attempting to download content from remote server:\n Server: {0} \n Content: {1} \n HTTP Response: {2}
|
||||||
|
|
||||||
|
metadata.extraction.err.type_conversion=Metadata extraction failed because an extracted value failed to convert to the required type: \n Extractor: {0} \n Target Property QName: {1} \n Required Type: {2} \n Extracted Value: {3}
|
@ -68,6 +68,7 @@ import org.alfresco.service.cmr.avm.AVMService;
|
|||||||
import org.alfresco.service.cmr.avm.AVMStoreDescriptor;
|
import org.alfresco.service.cmr.avm.AVMStoreDescriptor;
|
||||||
import org.alfresco.service.cmr.avm.LayeringDescriptor;
|
import org.alfresco.service.cmr.avm.LayeringDescriptor;
|
||||||
import org.alfresco.service.cmr.avm.VersionDescriptor;
|
import org.alfresco.service.cmr.avm.VersionDescriptor;
|
||||||
|
import org.alfresco.service.cmr.avm.deploy.DeploymentEvent;
|
||||||
import org.alfresco.service.cmr.avm.deploy.DeploymentReport;
|
import org.alfresco.service.cmr.avm.deploy.DeploymentReport;
|
||||||
import org.alfresco.service.cmr.avm.deploy.DeploymentService;
|
import org.alfresco.service.cmr.avm.deploy.DeploymentService;
|
||||||
import org.alfresco.service.cmr.avmsync.AVMDifference;
|
import org.alfresco.service.cmr.avmsync.AVMDifference;
|
||||||
@ -96,7 +97,7 @@ import org.alfresco.util.Pair;
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Big test of AVM behavior.
|
* Big test of AVM behavior.
|
||||||
*
|
*
|
||||||
* @author britt
|
* @author britt
|
||||||
*/
|
*/
|
||||||
public class AVMServiceTest extends AVMServiceTestBase
|
public class AVMServiceTest extends AVMServiceTestBase
|
||||||
@ -612,6 +613,15 @@ public class AVMServiceTest extends AVMServiceTestBase
|
|||||||
runQueriesForCreateAndDeploy("target");
|
runQueriesForCreateAndDeploy("target");
|
||||||
assertEquals(fService.lookup(-1, "main:/a/b/biz").getGuid(), fService.lookup(-1, "target:/a/b/biz").getGuid());
|
assertEquals(fService.lookup(-1, "main:/a/b/biz").getGuid(), fService.lookup(-1, "target:/a/b/biz").getGuid());
|
||||||
fService.removeNode("main:/a/b/c/foo");
|
fService.removeNode("main:/a/b/c/foo");
|
||||||
|
fService.createFile("main:/a/b/c", "Foo").close();
|
||||||
|
ContentWriter writer = fService.getContentWriter("main:/a/b/c/Foo");
|
||||||
|
writer.setEncoding("UTF-8");
|
||||||
|
writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
|
||||||
|
writer.putContent("I am main:/a/b/c/foo");
|
||||||
|
report = depService.deployDifference(-1, "main:/a", "localhost", 50500, "admin", "admin", "target:/a", matcher, false, false, false, null);
|
||||||
|
System.out.println(report);
|
||||||
|
assertEquals(DeploymentEvent.Type.UPDATED, report.iterator().next().getType());
|
||||||
|
fService.removeNode("main:/a/b/c/foo");
|
||||||
report = depService.deployDifference(-1, "main:/a", "localhost", 50500, "admin", "admin", "target:/a", matcher, false, true, false, null);
|
report = depService.deployDifference(-1, "main:/a", "localhost", 50500, "admin", "admin", "target:/a", matcher, false, true, false, null);
|
||||||
runQueriesForCreateAndDeploy("target");
|
runQueriesForCreateAndDeploy("target");
|
||||||
System.out.println(report);
|
System.out.println(report);
|
||||||
@ -5531,7 +5541,7 @@ public class AVMServiceTest extends AVMServiceTestBase
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Test async indexing.
|
* Test async indexing.
|
||||||
*
|
*
|
||||||
* @throws Exception
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
public void testAsyncIndex() throws Exception
|
public void testAsyncIndex() throws Exception
|
||||||
|
@ -695,7 +695,7 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
|||||||
private SortedMap<String, AVMNodeDescriptor>
|
private SortedMap<String, AVMNodeDescriptor>
|
||||||
translateListing(Map<String, AVMNode> listing, Lookup lPath)
|
translateListing(Map<String, AVMNode> listing, Lookup lPath)
|
||||||
{
|
{
|
||||||
SortedMap<String, AVMNodeDescriptor> results = new TreeMap<String, AVMNodeDescriptor>();
|
SortedMap<String, AVMNodeDescriptor> results = new TreeMap<String, AVMNodeDescriptor>(String.CASE_INSENSITIVE_ORDER);
|
||||||
for (String name : listing.keySet())
|
for (String name : listing.keySet())
|
||||||
{
|
{
|
||||||
// TODO consider doing this at a lower level.
|
// TODO consider doing this at a lower level.
|
||||||
|
@ -778,7 +778,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
// layer and underlying must match for flattening to be useful.
|
// layer and underlying must match for flattening to be useful.
|
||||||
if (!layer.getIndirection().equals(underlying.getPath()))
|
if (!layer.getIndirection().equalsIgnoreCase(underlying.getPath()))
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -514,7 +514,7 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
|
|||||||
public SortedMap<String, AVMNodeDescriptor> getListingDirect(AVMNodeDescriptor dir, boolean includeDeleted)
|
public SortedMap<String, AVMNodeDescriptor> getListingDirect(AVMNodeDescriptor dir, boolean includeDeleted)
|
||||||
{
|
{
|
||||||
List<ChildEntry> children = AVMDAOs.Instance().fChildEntryDAO.getByParent(this);
|
List<ChildEntry> children = AVMDAOs.Instance().fChildEntryDAO.getByParent(this);
|
||||||
SortedMap<String, AVMNodeDescriptor> listing = new TreeMap<String, AVMNodeDescriptor>();
|
SortedMap<String, AVMNodeDescriptor> listing = new TreeMap<String, AVMNodeDescriptor>(String.CASE_INSENSITIVE_ORDER);
|
||||||
for (ChildEntry child : children)
|
for (ChildEntry child : children)
|
||||||
{
|
{
|
||||||
AVMNode childNode = child.getChild();
|
AVMNode childNode = child.getChild();
|
||||||
@ -551,7 +551,7 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
|
|||||||
{
|
{
|
||||||
throw new AVMBadArgumentException("Illegal null argument.");
|
throw new AVMBadArgumentException("Illegal null argument.");
|
||||||
}
|
}
|
||||||
SortedMap<String, AVMNodeDescriptor> baseListing = new TreeMap<String, AVMNodeDescriptor>();
|
SortedMap<String, AVMNodeDescriptor> baseListing = new TreeMap<String, AVMNodeDescriptor>(String.CASE_INSENSITIVE_ORDER);
|
||||||
// If we are not opaque, get the underlying base listing.
|
// If we are not opaque, get the underlying base listing.
|
||||||
if (!fOpacity)
|
if (!fOpacity)
|
||||||
{
|
{
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
package org.alfresco.repo.avm;
|
package org.alfresco.repo.avm;
|
||||||
|
|
||||||
@ -19,22 +19,22 @@ public class LookupKey implements Serializable
|
|||||||
* The name of the store.
|
* The name of the store.
|
||||||
*/
|
*/
|
||||||
private String fStoreName;
|
private String fStoreName;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The path being looked up.
|
* The path being looked up.
|
||||||
*/
|
*/
|
||||||
private SimplePath fPath;
|
private SimplePath fPath;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The version being looked up.
|
* The version being looked up.
|
||||||
*/
|
*/
|
||||||
private int fVersion;
|
private int fVersion;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Whether the lookup is a write lookup.
|
* Whether the lookup is a write lookup.
|
||||||
*/
|
*/
|
||||||
private boolean fWrite;
|
private boolean fWrite;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Whether the lookup includes deleted nodes.
|
* Whether the lookup includes deleted nodes.
|
||||||
*/
|
*/
|
||||||
@ -48,7 +48,7 @@ public class LookupKey implements Serializable
|
|||||||
* @param write Whether this is a write lookup.
|
* @param write Whether this is a write lookup.
|
||||||
* @param includeDeleted Whether this lookup should include deleted items.
|
* @param includeDeleted Whether this lookup should include deleted items.
|
||||||
*/
|
*/
|
||||||
public LookupKey(int version,
|
public LookupKey(int version,
|
||||||
SimplePath path,
|
SimplePath path,
|
||||||
String storeName,
|
String storeName,
|
||||||
boolean write,
|
boolean write,
|
||||||
@ -60,7 +60,7 @@ public class LookupKey implements Serializable
|
|||||||
fWrite = write;
|
fWrite = write;
|
||||||
fIncludeDeleted = includeDeleted;
|
fIncludeDeleted = includeDeleted;
|
||||||
}
|
}
|
||||||
|
|
||||||
public LookupKey(LookupKey other)
|
public LookupKey(LookupKey other)
|
||||||
{
|
{
|
||||||
fVersion = other.fVersion;
|
fVersion = other.fVersion;
|
||||||
@ -69,7 +69,7 @@ public class LookupKey implements Serializable
|
|||||||
fWrite = other.fWrite;
|
fWrite = other.fWrite;
|
||||||
fIncludeDeleted = other.fIncludeDeleted;
|
fIncludeDeleted = other.fIncludeDeleted;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the writeness of this key.
|
* Set the writeness of this key.
|
||||||
*/
|
*/
|
||||||
@ -86,7 +86,7 @@ public class LookupKey implements Serializable
|
|||||||
{
|
{
|
||||||
return fStoreName;
|
return fStoreName;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is this a write lookup.
|
* Is this a write lookup.
|
||||||
* @return Whether this is a write lookup.
|
* @return Whether this is a write lookup.
|
||||||
@ -95,12 +95,12 @@ public class LookupKey implements Serializable
|
|||||||
{
|
{
|
||||||
return fWrite;
|
return fWrite;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* (non-Javadoc)
|
/* (non-Javadoc)
|
||||||
* @see java.lang.Object#equals(java.lang.Object)
|
* @see java.lang.Object#equals(java.lang.Object)
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj)
|
public boolean equals(Object obj)
|
||||||
{
|
{
|
||||||
if (this == obj)
|
if (this == obj)
|
||||||
{
|
{
|
||||||
@ -111,7 +111,7 @@ public class LookupKey implements Serializable
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
LookupKey o = (LookupKey)obj;
|
LookupKey o = (LookupKey)obj;
|
||||||
return fStoreName.equals(o.fStoreName) &&
|
return fStoreName.equalsIgnoreCase(o.fStoreName) &&
|
||||||
fVersion == o.fVersion &&
|
fVersion == o.fVersion &&
|
||||||
fPath.equals(o.fPath) &&
|
fPath.equals(o.fPath) &&
|
||||||
fWrite == o.fWrite &&
|
fWrite == o.fWrite &&
|
||||||
@ -122,9 +122,9 @@ public class LookupKey implements Serializable
|
|||||||
* @see java.lang.Object#hashCode()
|
* @see java.lang.Object#hashCode()
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public int hashCode()
|
public int hashCode()
|
||||||
{
|
{
|
||||||
int hash = fStoreName.hashCode();
|
int hash = fStoreName.toLowerCase().hashCode();
|
||||||
hash += fPath.hashCode();
|
hash += fPath.hashCode();
|
||||||
hash += fVersion;
|
hash += fVersion;
|
||||||
hash += fWrite ? 1 : 0;
|
hash += fWrite ? 1 : 0;
|
||||||
@ -136,8 +136,8 @@ public class LookupKey implements Serializable
|
|||||||
* @see java.lang.Object#toString()
|
* @see java.lang.Object#toString()
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public String toString()
|
public String toString()
|
||||||
{
|
{
|
||||||
return fStoreName + ":" + fPath + "-" + fVersion + "-" + fWrite + "-" + fIncludeDeleted;
|
return fStoreName + ":" + fPath + "-" + fVersion + "-" + fWrite + "-" + fIncludeDeleted;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -163,7 +163,7 @@ class PlainDirectoryNodeImpl extends DirectoryNodeImpl implements PlainDirectory
|
|||||||
{
|
{
|
||||||
throw new AVMBadArgumentException("Path is null.");
|
throw new AVMBadArgumentException("Path is null.");
|
||||||
}
|
}
|
||||||
SortedMap<String, AVMNodeDescriptor> result = new TreeMap<String, AVMNodeDescriptor>();
|
SortedMap<String, AVMNodeDescriptor> result = new TreeMap<String, AVMNodeDescriptor>(String.CASE_INSENSITIVE_ORDER);
|
||||||
List<ChildEntry> children = AVMDAOs.Instance().fChildEntryDAO.getByParent(this);
|
List<ChildEntry> children = AVMDAOs.Instance().fChildEntryDAO.getByParent(this);
|
||||||
for (ChildEntry child : children)
|
for (ChildEntry child : children)
|
||||||
{
|
{
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
package org.alfresco.repo.avm.util;
|
package org.alfresco.repo.avm.util;
|
||||||
|
|
||||||
@ -9,7 +9,7 @@ import java.io.Serializable;
|
|||||||
* Holds a simple path.
|
* Holds a simple path.
|
||||||
* @author britt
|
* @author britt
|
||||||
*/
|
*/
|
||||||
public class SimplePath implements Serializable
|
public class SimplePath implements Serializable
|
||||||
{
|
{
|
||||||
private static final long serialVersionUID = 2696828491008988470L;
|
private static final long serialVersionUID = 2696828491008988470L;
|
||||||
|
|
||||||
@ -17,7 +17,7 @@ public class SimplePath implements Serializable
|
|||||||
* The names of the path's components.
|
* The names of the path's components.
|
||||||
*/
|
*/
|
||||||
private String [] fNames;
|
private String [] fNames;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a new one from a string.
|
* Construct a new one from a string.
|
||||||
* @param path The String representation of the path.
|
* @param path The String representation of the path.
|
||||||
@ -39,7 +39,7 @@ public class SimplePath implements Serializable
|
|||||||
}
|
}
|
||||||
fNames = path.split("/+");
|
fNames = path.split("/+");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the component name at index.
|
* Get the component name at index.
|
||||||
* @param index The index of the component to get.
|
* @param index The index of the component to get.
|
||||||
@ -49,7 +49,7 @@ public class SimplePath implements Serializable
|
|||||||
{
|
{
|
||||||
return fNames[index];
|
return fNames[index];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the number of components in this path.
|
* Get the number of components in this path.
|
||||||
* @return The number of components.
|
* @return The number of components.
|
||||||
@ -63,7 +63,7 @@ public class SimplePath implements Serializable
|
|||||||
* @see java.lang.Object#equals(java.lang.Object)
|
* @see java.lang.Object#equals(java.lang.Object)
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj)
|
public boolean equals(Object obj)
|
||||||
{
|
{
|
||||||
if (this == obj)
|
if (this == obj)
|
||||||
{
|
{
|
||||||
@ -80,7 +80,7 @@ public class SimplePath implements Serializable
|
|||||||
}
|
}
|
||||||
for (int i = 0; i < fNames.length; i++)
|
for (int i = 0; i < fNames.length; i++)
|
||||||
{
|
{
|
||||||
if (!fNames[i].equals(o.fNames[i]))
|
if (!fNames[i].equalsIgnoreCase(o.fNames[i]))
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -92,12 +92,12 @@ public class SimplePath implements Serializable
|
|||||||
* @see java.lang.Object#hashCode()
|
* @see java.lang.Object#hashCode()
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public int hashCode()
|
public int hashCode()
|
||||||
{
|
{
|
||||||
int hash = 0;
|
int hash = 0;
|
||||||
for (String name : fNames)
|
for (String name : fNames)
|
||||||
{
|
{
|
||||||
hash += name.hashCode();
|
hash += name.toLowerCase().hashCode();
|
||||||
}
|
}
|
||||||
return hash;
|
return hash;
|
||||||
}
|
}
|
||||||
@ -106,7 +106,7 @@ public class SimplePath implements Serializable
|
|||||||
* @see java.lang.Object#toString()
|
* @see java.lang.Object#toString()
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public String toString()
|
public String toString()
|
||||||
{
|
{
|
||||||
StringBuilder builder = new StringBuilder();
|
StringBuilder builder = new StringBuilder();
|
||||||
for (String name : fNames)
|
for (String name : fNames)
|
||||||
|
@ -27,21 +27,33 @@ package org.alfresco.repo.content.metadata;
|
|||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.lang.reflect.Array;
|
import java.lang.reflect.Array;
|
||||||
|
import java.text.DateFormat;
|
||||||
|
import java.text.ParseException;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.Date;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.StringTokenizer;
|
import java.util.StringTokenizer;
|
||||||
|
|
||||||
import org.alfresco.error.AlfrescoRuntimeException;
|
import org.alfresco.error.AlfrescoRuntimeException;
|
||||||
|
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||||
|
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||||
|
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
|
||||||
import org.alfresco.service.cmr.repository.ContentIOException;
|
import org.alfresco.service.cmr.repository.ContentIOException;
|
||||||
import org.alfresco.service.cmr.repository.ContentReader;
|
import org.alfresco.service.cmr.repository.ContentReader;
|
||||||
import org.alfresco.service.cmr.repository.MimetypeService;
|
import org.alfresco.service.cmr.repository.MimetypeService;
|
||||||
|
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
|
||||||
|
import org.alfresco.service.cmr.repository.datatype.TypeConversionException;
|
||||||
import org.alfresco.service.namespace.InvalidQNameException;
|
import org.alfresco.service.namespace.InvalidQNameException;
|
||||||
import org.alfresco.service.namespace.QName;
|
import org.alfresco.service.namespace.QName;
|
||||||
|
import org.alfresco.util.ISO8601DateFormat;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
@ -88,15 +100,19 @@ import org.apache.commons.logging.LogFactory;
|
|||||||
abstract public class AbstractMappingMetadataExtracter implements MetadataExtracter
|
abstract public class AbstractMappingMetadataExtracter implements MetadataExtracter
|
||||||
{
|
{
|
||||||
public static final String NAMESPACE_PROPERTY_PREFIX = "namespace.prefix.";
|
public static final String NAMESPACE_PROPERTY_PREFIX = "namespace.prefix.";
|
||||||
|
private static final String ERR_TYPE_CONVERSION = "metadata.extraction.err.type_conversion";
|
||||||
|
|
||||||
protected static Log logger = LogFactory.getLog(AbstractMappingMetadataExtracter.class);
|
protected static Log logger = LogFactory.getLog(AbstractMappingMetadataExtracter.class);
|
||||||
|
|
||||||
private MetadataExtracterRegistry registry;
|
private MetadataExtracterRegistry registry;
|
||||||
private MimetypeService mimetypeService;
|
private MimetypeService mimetypeService;
|
||||||
|
private DictionaryService dictionaryService;
|
||||||
private boolean initialized;
|
private boolean initialized;
|
||||||
|
|
||||||
private Set<String> supportedMimetypes;
|
private Set<String> supportedMimetypes;
|
||||||
private OverwritePolicy overwritePolicy;
|
private OverwritePolicy overwritePolicy;
|
||||||
|
private boolean failOnTypeConversion;
|
||||||
|
private Set<DateFormat> supportedDateFormats;
|
||||||
private Map<String, Set<QName>> mapping;
|
private Map<String, Set<QName>> mapping;
|
||||||
private boolean inheritDefaultMapping;
|
private boolean inheritDefaultMapping;
|
||||||
|
|
||||||
@ -124,6 +140,8 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
|
|||||||
this.supportedMimetypes = supportedMimetypes;
|
this.supportedMimetypes = supportedMimetypes;
|
||||||
// Set defaults
|
// Set defaults
|
||||||
overwritePolicy = OverwritePolicy.PRAGMATIC;
|
overwritePolicy = OverwritePolicy.PRAGMATIC;
|
||||||
|
failOnTypeConversion = true;
|
||||||
|
supportedDateFormats = new HashSet<DateFormat>(0);
|
||||||
mapping = null; // The default will be fetched
|
mapping = null; // The default will be fetched
|
||||||
inheritDefaultMapping = false; // Any overrides are complete
|
inheritDefaultMapping = false; // Any overrides are complete
|
||||||
initialized = false;
|
initialized = false;
|
||||||
@ -156,7 +174,15 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
|
|||||||
{
|
{
|
||||||
return mimetypeService;
|
return mimetypeService;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param dictionaryService the dictionary service to determine which data conversions are necessary
|
||||||
|
*/
|
||||||
|
public void setDictionaryService(DictionaryService dictionaryService)
|
||||||
|
{
|
||||||
|
this.dictionaryService = dictionaryService;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the mimetypes that are supported by the extracter.
|
* Set the mimetypes that are supported by the extracter.
|
||||||
*
|
*
|
||||||
@ -212,6 +238,46 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
|
|||||||
this.overwritePolicy = OverwritePolicy.valueOf(overwritePolicyStr);
|
this.overwritePolicy = OverwritePolicy.valueOf(overwritePolicyStr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set whether the extractor should discard metadata that fails to convert to the target type
|
||||||
|
* defined in the data dictionary model. This is <tt>true</tt> by default i.e. if the data
|
||||||
|
* extracted is not compatible with the target model then the extraction will fail. If this is
|
||||||
|
* <tt>false<tt> then any extracted data that fails to convert will be discarded.
|
||||||
|
*
|
||||||
|
* @param failOnTypeConversion <tt>false</tt> to discard properties that can't get converted
|
||||||
|
* to the dictionary-defined type, or <tt>true</tt> (default)
|
||||||
|
* to fail the extraction if the type doesn't convert
|
||||||
|
*/
|
||||||
|
public void setFailOnTypeConversion(boolean failOnTypeConversion)
|
||||||
|
{
|
||||||
|
this.failOnTypeConversion = failOnTypeConversion;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the date formats, over and above the {@link ISO8601DateFormat ISO8601 format}, that will
|
||||||
|
* be supported for string to date conversions. The supported syntax is described by the
|
||||||
|
* {@link http://java.sun.com/j2se/1.5.0/docs/api/java/text/SimpleDateFormat.html SimpleDateFormat Javadocs}.
|
||||||
|
*
|
||||||
|
* @param supportedDateFormats a list of supported date formats.
|
||||||
|
*/
|
||||||
|
public void setSupportedDateFormats(List<String> supportedDateFormats)
|
||||||
|
{
|
||||||
|
this.supportedDateFormats = new HashSet<DateFormat>(5);
|
||||||
|
for (String dateFormatStr : supportedDateFormats)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
DateFormat df = new SimpleDateFormat(dateFormatStr);
|
||||||
|
this.supportedDateFormats.add(df);
|
||||||
|
}
|
||||||
|
catch (Throwable e)
|
||||||
|
{
|
||||||
|
// No good
|
||||||
|
throw new AlfrescoRuntimeException("Unable to set supported date format: " + dateFormatStr, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set if the property mappings augment or override the mapping generically provided by the
|
* Set if the property mappings augment or override the mapping generically provided by the
|
||||||
* extracter implementation. The default is <tt>false</tt>, i.e. any mapping set completely
|
* extracter implementation. The default is <tt>false</tt>, i.e. any mapping set completely
|
||||||
@ -347,6 +413,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
|
|||||||
*
|
*
|
||||||
* @see #setMappingProperties(Properties)
|
* @see #setMappingProperties(Properties)
|
||||||
*/
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
protected Map<String, Set<QName>> readMappingProperties(Properties mappingProperties)
|
protected Map<String, Set<QName>> readMappingProperties(Properties mappingProperties)
|
||||||
{
|
{
|
||||||
Map<String, String> namespacesByPrefix = new HashMap<String, String>(5);
|
Map<String, String> namespacesByPrefix = new HashMap<String, String>(5);
|
||||||
@ -562,6 +629,8 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
|
|||||||
Map<String, Serializable> rawMetadata = extractRaw(reader);
|
Map<String, Serializable> rawMetadata = extractRaw(reader);
|
||||||
// Convert to system properties (standalone)
|
// Convert to system properties (standalone)
|
||||||
Map<QName, Serializable> systemProperties = mapRawToSystem(rawMetadata);
|
Map<QName, Serializable> systemProperties = mapRawToSystem(rawMetadata);
|
||||||
|
// Convert the properties according to the dictionary types
|
||||||
|
systemProperties = convertSystemPropertyValues(systemProperties);
|
||||||
// Now use the proper overwrite policy
|
// Now use the proper overwrite policy
|
||||||
changedProperties = overwritePolicy.applyProperties(systemProperties, destination);
|
changedProperties = overwritePolicy.applyProperties(systemProperties, destination);
|
||||||
}
|
}
|
||||||
@ -628,6 +697,131 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
|
|||||||
return systemProperties;
|
return systemProperties;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts all values according to their dictionary-defined type. This uses the
|
||||||
|
* {@link #setFailOnTypeConversion(boolean) failOnTypeConversion flag} to determine how failures
|
||||||
|
* are handled i.e. if values fail to convert, the process may discard the property.
|
||||||
|
*
|
||||||
|
* @param systemProperties the values keyed to system property names
|
||||||
|
* @return Returns a modified map of properties that have been converted.
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
private Map<QName, Serializable> convertSystemPropertyValues(Map<QName, Serializable> systemProperties)
|
||||||
|
{
|
||||||
|
Map<QName, Serializable> convertedProperties = new HashMap<QName, Serializable>(systemProperties.size() + 7);
|
||||||
|
for (Map.Entry<QName, Serializable> entry : systemProperties.entrySet())
|
||||||
|
{
|
||||||
|
QName propertyQName = entry.getKey();
|
||||||
|
Serializable propertyValue = entry.getValue();
|
||||||
|
// Get the property definition
|
||||||
|
PropertyDefinition propertyDef = (dictionaryService == null) ? null : dictionaryService.getProperty(propertyQName);
|
||||||
|
if (propertyDef == null)
|
||||||
|
{
|
||||||
|
// There is nothing in the DD about this so just transfer it
|
||||||
|
convertedProperties.put(propertyQName, propertyValue);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// It is in the DD, so attempt the conversion
|
||||||
|
DataTypeDefinition propertyTypeDef = propertyDef.getDataType();
|
||||||
|
Serializable convertedPropertyValue = null;
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// Attempt to make any date conversions
|
||||||
|
if (propertyTypeDef.getName().equals(DataTypeDefinition.DATE) || propertyTypeDef.getName().equals(DataTypeDefinition.DATETIME))
|
||||||
|
{
|
||||||
|
if (propertyValue instanceof Collection)
|
||||||
|
{
|
||||||
|
convertedPropertyValue = (Serializable) makeDates((Collection) propertyValue);
|
||||||
|
}
|
||||||
|
else if (propertyValue instanceof String)
|
||||||
|
{
|
||||||
|
convertedPropertyValue = makeDate((String) propertyValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (propertyValue instanceof Collection)
|
||||||
|
{
|
||||||
|
convertedPropertyValue = (Serializable) DefaultTypeConverter.INSTANCE.convert(
|
||||||
|
propertyTypeDef,
|
||||||
|
(Collection) propertyValue);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
convertedPropertyValue = (Serializable) DefaultTypeConverter.INSTANCE.convert(
|
||||||
|
propertyTypeDef,
|
||||||
|
propertyValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
convertedProperties.put(propertyQName, convertedPropertyValue);
|
||||||
|
}
|
||||||
|
catch (TypeConversionException e)
|
||||||
|
{
|
||||||
|
// Do we just absorb this or is it a problem?
|
||||||
|
if (failOnTypeConversion)
|
||||||
|
{
|
||||||
|
throw AlfrescoRuntimeException.create(
|
||||||
|
e,
|
||||||
|
ERR_TYPE_CONVERSION,
|
||||||
|
this,
|
||||||
|
propertyQName,
|
||||||
|
propertyTypeDef.getName(),
|
||||||
|
propertyValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Done
|
||||||
|
return convertedProperties;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert a collection of date <tt>String</tt> to <tt>Date</tt> objects
|
||||||
|
*/
|
||||||
|
private Collection<Date> makeDates(Collection<String> dateStrs)
|
||||||
|
{
|
||||||
|
List<Date> dates = new ArrayList<Date>(dateStrs.size());
|
||||||
|
for (String dateStr : dateStrs)
|
||||||
|
{
|
||||||
|
Date date = makeDate(dateStr);
|
||||||
|
dates.add(date);
|
||||||
|
}
|
||||||
|
return dates;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert a date <tt>String</tt> to a <tt>Date</tt> object
|
||||||
|
*/
|
||||||
|
private Date makeDate(String dateStr)
|
||||||
|
{
|
||||||
|
Date date = null;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
date = DefaultTypeConverter.INSTANCE.convert(Date.class, dateStr);
|
||||||
|
}
|
||||||
|
catch (TypeConversionException e)
|
||||||
|
{
|
||||||
|
// Try one of the other formats
|
||||||
|
for (DateFormat df : this.supportedDateFormats)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
date = df.parse(dateStr);
|
||||||
|
}
|
||||||
|
catch (ParseException ee)
|
||||||
|
{
|
||||||
|
// Didn't work
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (date == null)
|
||||||
|
{
|
||||||
|
// Still no luck
|
||||||
|
throw new TypeConversionException("Unable to convert string to date: " + dateStr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return date;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds a value to the map if it is non-trivial. A value is trivial if
|
* Adds a value to the map if it is non-trivial. A value is trivial if
|
||||||
* <ul>
|
* <ul>
|
||||||
@ -646,6 +840,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
|
|||||||
* @param destination the map to put values into
|
* @param destination the map to put values into
|
||||||
* @return Returns <tt>true</tt> if set, otherwise <tt>false</tt>
|
* @return Returns <tt>true</tt> if set, otherwise <tt>false</tt>
|
||||||
*/
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
protected boolean putRawValue(String key, Serializable value, Map<String, Serializable> destination)
|
protected boolean putRawValue(String key, Serializable value, Map<String, Serializable> destination)
|
||||||
{
|
{
|
||||||
if (value == null)
|
if (value == null)
|
||||||
|
@ -36,7 +36,9 @@ import org.alfresco.model.ContentModel;
|
|||||||
import org.alfresco.repo.content.MimetypeMap;
|
import org.alfresco.repo.content.MimetypeMap;
|
||||||
import org.alfresco.repo.content.filestore.FileContentReader;
|
import org.alfresco.repo.content.filestore.FileContentReader;
|
||||||
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
|
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
|
||||||
|
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||||
import org.alfresco.service.cmr.repository.ContentReader;
|
import org.alfresco.service.cmr.repository.ContentReader;
|
||||||
|
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
|
||||||
import org.alfresco.service.namespace.QName;
|
import org.alfresco.service.namespace.QName;
|
||||||
import org.alfresco.util.ApplicationContextHelper;
|
import org.alfresco.util.ApplicationContextHelper;
|
||||||
import org.alfresco.util.TempFileProvider;
|
import org.alfresco.util.TempFileProvider;
|
||||||
@ -57,6 +59,7 @@ public abstract class AbstractMetadataExtracterTest extends TestCase
|
|||||||
protected static final String QUICK_CREATOR = "Nevin Nollop";
|
protected static final String QUICK_CREATOR = "Nevin Nollop";
|
||||||
|
|
||||||
protected MimetypeMap mimetypeMap;
|
protected MimetypeMap mimetypeMap;
|
||||||
|
protected DictionaryService dictionaryService;
|
||||||
|
|
||||||
protected abstract MetadataExtracter getExtracter();
|
protected abstract MetadataExtracter getExtracter();
|
||||||
|
|
||||||
@ -67,12 +70,13 @@ public abstract class AbstractMetadataExtracterTest extends TestCase
|
|||||||
public void setUp() throws Exception
|
public void setUp() throws Exception
|
||||||
{
|
{
|
||||||
this.mimetypeMap = (MimetypeMap) ctx.getBean("mimetypeService");
|
this.mimetypeMap = (MimetypeMap) ctx.getBean("mimetypeService");
|
||||||
|
this.dictionaryService = (DictionaryService) ctx.getBean("dictionaryService");
|
||||||
|
|
||||||
// perform a little cleaning up
|
// perform a little cleaning up
|
||||||
long now = System.currentTimeMillis();
|
long now = System.currentTimeMillis();
|
||||||
TempFileProvider.TempFileCleanerJob.removeFiles(now);
|
TempFileProvider.TempFileCleanerJob.removeFiles(now);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check that all objects are present
|
* Check that all objects are present
|
||||||
*/
|
*/
|
||||||
@ -123,9 +127,11 @@ public abstract class AbstractMetadataExtracterTest extends TestCase
|
|||||||
{
|
{
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"Property " + ContentModel.PROP_TITLE + " not found for mimetype " + mimetype,
|
"Property " + ContentModel.PROP_TITLE + " not found for mimetype " + mimetype,
|
||||||
QUICK_TITLE, properties.get(ContentModel.PROP_TITLE));
|
QUICK_TITLE,
|
||||||
|
DefaultTypeConverter.INSTANCE.convert(String.class, properties.get(ContentModel.PROP_TITLE)));
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"Property " + ContentModel.PROP_DESCRIPTION + " not found for mimetype " + mimetype,
|
"Property " + ContentModel.PROP_DESCRIPTION + " not found for mimetype " + mimetype,
|
||||||
QUICK_DESCRIPTION, properties.get(ContentModel.PROP_DESCRIPTION));
|
QUICK_DESCRIPTION,
|
||||||
|
DefaultTypeConverter.INSTANCE.convert(String.class, properties.get(ContentModel.PROP_DESCRIPTION)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -38,6 +38,7 @@ public class HtmlMetadataExtracterTest extends AbstractMetadataExtracterTest
|
|||||||
{
|
{
|
||||||
super.setUp();
|
super.setUp();
|
||||||
extracter = new HtmlMetadataExtracter();
|
extracter = new HtmlMetadataExtracter();
|
||||||
|
extracter.setDictionaryService(dictionaryService);
|
||||||
extracter.register();
|
extracter.register();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -15,6 +15,7 @@ public class OfficeMetadataExtracterTest extends AbstractMetadataExtracterTest
|
|||||||
{
|
{
|
||||||
super.setUp();
|
super.setUp();
|
||||||
extracter = new OfficeMetadataExtracter();
|
extracter = new OfficeMetadataExtracter();
|
||||||
|
extracter.setDictionaryService(dictionaryService);
|
||||||
extracter.register();
|
extracter.register();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -15,6 +15,7 @@ public class OpenDocumentMetadataExtracterTest extends AbstractMetadataExtracter
|
|||||||
{
|
{
|
||||||
super.setUp();
|
super.setUp();
|
||||||
extracter = new OpenDocumentMetadataExtracter();
|
extracter = new OpenDocumentMetadataExtracter();
|
||||||
|
extracter.setDictionaryService(dictionaryService);
|
||||||
extracter.register();
|
extracter.register();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -23,8 +23,6 @@
|
|||||||
package org.alfresco.repo.content.metadata;
|
package org.alfresco.repo.content.metadata;
|
||||||
|
|
||||||
import net.sf.jooreports.openoffice.connection.OpenOfficeConnection;
|
import net.sf.jooreports.openoffice.connection.OpenOfficeConnection;
|
||||||
import net.sf.jooreports.openoffice.connection.SocketOpenOfficeConnection;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author Jesper Steen Møller
|
* @author Jesper Steen Møller
|
||||||
@ -42,6 +40,7 @@ public class OpenOfficeMetadataExtracterTest extends AbstractMetadataExtracterTe
|
|||||||
|
|
||||||
extracter = new OpenOfficeMetadataExtracter();
|
extracter = new OpenOfficeMetadataExtracter();
|
||||||
extracter.setMimetypeService(mimetypeMap);
|
extracter.setMimetypeService(mimetypeMap);
|
||||||
|
extracter.setDictionaryService(dictionaryService);
|
||||||
extracter.setConnection(connection);
|
extracter.setConnection(connection);
|
||||||
extracter.init();
|
extracter.init();
|
||||||
}
|
}
|
||||||
|
@ -16,6 +16,7 @@ public class PdfBoxMetadataExtracterTest extends AbstractMetadataExtracterTest
|
|||||||
{
|
{
|
||||||
super.setUp();
|
super.setUp();
|
||||||
extracter = new PdfBoxMetadataExtracter();
|
extracter = new PdfBoxMetadataExtracter();
|
||||||
|
extracter.setDictionaryService(dictionaryService);
|
||||||
extracter.register();
|
extracter.register();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -70,6 +70,8 @@ import org.alfresco.service.cmr.security.AuthenticationService;
|
|||||||
import org.alfresco.service.namespace.QName;
|
import org.alfresco.service.namespace.QName;
|
||||||
import org.alfresco.util.NameMatcher;
|
import org.alfresco.util.NameMatcher;
|
||||||
import org.alfresco.util.Pair;
|
import org.alfresco.util.Pair;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.springframework.remoting.rmi.RmiProxyFactoryBean;
|
import org.springframework.remoting.rmi.RmiProxyFactoryBean;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -78,6 +80,8 @@ import org.springframework.remoting.rmi.RmiProxyFactoryBean;
|
|||||||
*/
|
*/
|
||||||
public class DeploymentServiceImpl implements DeploymentService
|
public class DeploymentServiceImpl implements DeploymentService
|
||||||
{
|
{
|
||||||
|
private static Log fgLogger = LogFactory.getLog(DeploymentServiceImpl.class);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class to hold Deployment destination information.
|
* Class to hold Deployment destination information.
|
||||||
* Used as a lock to serialize deployments to the same
|
* Used as a lock to serialize deployments to the same
|
||||||
@ -122,6 +126,11 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
{
|
{
|
||||||
return fHost.hashCode() + fPort;
|
return fHost.hashCode() + fPort;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String toString()
|
||||||
|
{
|
||||||
|
return fHost;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -169,6 +178,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
DeploymentDestination dest = getLock(hostName, port);
|
DeploymentDestination dest = getLock(hostName, port);
|
||||||
synchronized (dest)
|
synchronized (dest)
|
||||||
{
|
{
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug("Deploying to Remote Alfresco at " + dest);
|
||||||
|
}
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
DeploymentReport report = new DeploymentReport();
|
DeploymentReport report = new DeploymentReport();
|
||||||
@ -178,6 +191,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.START,
|
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.START,
|
||||||
new Pair<Integer, String>(version, srcPath),
|
new Pair<Integer, String>(version, srcPath),
|
||||||
dstPath);
|
dstPath);
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug(event);
|
||||||
|
}
|
||||||
for (DeploymentCallback callback : callbacks)
|
for (DeploymentCallback callback : callbacks)
|
||||||
{
|
{
|
||||||
callback.eventOccurred(event);
|
callback.eventOccurred(event);
|
||||||
@ -229,6 +246,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
new DeploymentEvent(DeploymentEvent.Type.COPIED,
|
new DeploymentEvent(DeploymentEvent.Type.COPIED,
|
||||||
new Pair<Integer, String>(version, srcPath),
|
new Pair<Integer, String>(version, srcPath),
|
||||||
dstPath);
|
dstPath);
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug(event);
|
||||||
|
}
|
||||||
report.add(event);
|
report.add(event);
|
||||||
if (callbacks != null)
|
if (callbacks != null)
|
||||||
{
|
{
|
||||||
@ -248,6 +269,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
event = new DeploymentEvent(DeploymentEvent.Type.END,
|
event = new DeploymentEvent(DeploymentEvent.Type.END,
|
||||||
new Pair<Integer, String>(version, srcPath),
|
new Pair<Integer, String>(version, srcPath),
|
||||||
dstPath);
|
dstPath);
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug(event);
|
||||||
|
}
|
||||||
for (DeploymentCallback callback : callbacks)
|
for (DeploymentCallback callback : callbacks)
|
||||||
{
|
{
|
||||||
callback.eventOccurred(event);
|
callback.eventOccurred(event);
|
||||||
@ -269,6 +294,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.END,
|
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.END,
|
||||||
new Pair<Integer, String>(version, srcPath),
|
new Pair<Integer, String>(version, srcPath),
|
||||||
dstPath);
|
dstPath);
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug(event);
|
||||||
|
}
|
||||||
for (DeploymentCallback callback : callbacks)
|
for (DeploymentCallback callback : callbacks)
|
||||||
{
|
{
|
||||||
callback.eventOccurred(event);
|
callback.eventOccurred(event);
|
||||||
@ -381,6 +410,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
new DeploymentEvent(DeploymentEvent.Type.DELETED,
|
new DeploymentEvent(DeploymentEvent.Type.DELETED,
|
||||||
source,
|
source,
|
||||||
destination);
|
destination);
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug(event);
|
||||||
|
}
|
||||||
report.add(event);
|
report.add(event);
|
||||||
if (callbacks != null)
|
if (callbacks != null)
|
||||||
{
|
{
|
||||||
@ -428,6 +461,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
|
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
|
||||||
source,
|
source,
|
||||||
destination);
|
destination);
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug(event);
|
||||||
|
}
|
||||||
report.add(event);
|
report.add(event);
|
||||||
if (callbacks != null)
|
if (callbacks != null)
|
||||||
{
|
{
|
||||||
@ -449,6 +486,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
|
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
|
||||||
source,
|
source,
|
||||||
destination);
|
destination);
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug(event);
|
||||||
|
}
|
||||||
report.add(event);
|
report.add(event);
|
||||||
if (callbacks != null)
|
if (callbacks != null)
|
||||||
{
|
{
|
||||||
@ -482,6 +523,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
String destination = dst.getPath();
|
String destination = dst.getPath();
|
||||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
|
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
|
||||||
source, destination);
|
source, destination);
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug(event);
|
||||||
|
}
|
||||||
report.add(event);
|
report.add(event);
|
||||||
if (callbacks != null)
|
if (callbacks != null)
|
||||||
{
|
{
|
||||||
@ -512,6 +557,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.UPDATED,
|
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.UPDATED,
|
||||||
source,
|
source,
|
||||||
destination);
|
destination);
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug(event);
|
||||||
|
}
|
||||||
report.add(event);
|
report.add(event);
|
||||||
if (callbacks != null)
|
if (callbacks != null)
|
||||||
{
|
{
|
||||||
@ -536,6 +585,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.UPDATED,
|
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.UPDATED,
|
||||||
source,
|
source,
|
||||||
destination);
|
destination);
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug(event);
|
||||||
|
}
|
||||||
report.add(event);
|
report.add(event);
|
||||||
if (callbacks != null)
|
if (callbacks != null)
|
||||||
{
|
{
|
||||||
@ -798,6 +851,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
boolean dontDelete, boolean dontDo,
|
boolean dontDelete, boolean dontDo,
|
||||||
List<DeploymentCallback> callbacks)
|
List<DeploymentCallback> callbacks)
|
||||||
{
|
{
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug("Deploying To FileSystem Reciever on " + hostName + " to target " + target);
|
||||||
|
}
|
||||||
DeploymentReport report = new DeploymentReport();
|
DeploymentReport report = new DeploymentReport();
|
||||||
DeploymentReceiverService service = null;
|
DeploymentReceiverService service = null;
|
||||||
String ticket = null;
|
String ticket = null;
|
||||||
@ -807,6 +864,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.START,
|
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.START,
|
||||||
new Pair<Integer, String>(version, srcPath),
|
new Pair<Integer, String>(version, srcPath),
|
||||||
target);
|
target);
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug(event);
|
||||||
|
}
|
||||||
if (callbacks != null)
|
if (callbacks != null)
|
||||||
{
|
{
|
||||||
for (DeploymentCallback callback : callbacks)
|
for (DeploymentCallback callback : callbacks)
|
||||||
@ -893,6 +954,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.DELETED,
|
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.DELETED,
|
||||||
new Pair<Integer, String>(version, extendPath(srcPath, dst.getName())),
|
new Pair<Integer, String>(version, extendPath(srcPath, dst.getName())),
|
||||||
newDstPath);
|
newDstPath);
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug(event);
|
||||||
|
}
|
||||||
if (callbacks != null)
|
if (callbacks != null)
|
||||||
{
|
{
|
||||||
for (DeploymentCallback callback : callbacks)
|
for (DeploymentCallback callback : callbacks)
|
||||||
@ -915,7 +980,7 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
src = null;
|
src = null;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
int diff = src.getName().compareTo(dst.getName());
|
int diff = src.getName().compareToIgnoreCase(dst.getName());
|
||||||
if (diff < 0)
|
if (diff < 0)
|
||||||
{
|
{
|
||||||
if (!excluded(matcher, src.getPath(), null))
|
if (!excluded(matcher, src.getPath(), null))
|
||||||
@ -973,6 +1038,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.DELETED,
|
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.DELETED,
|
||||||
new Pair<Integer, String>(version, extendPath(srcPath, dst.getName())),
|
new Pair<Integer, String>(version, extendPath(srcPath, dst.getName())),
|
||||||
newDstPath);
|
newDstPath);
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug(event);
|
||||||
|
}
|
||||||
if (callbacks != null)
|
if (callbacks != null)
|
||||||
{
|
{
|
||||||
for (DeploymentCallback callback : callbacks)
|
for (DeploymentCallback callback : callbacks)
|
||||||
@ -1008,6 +1077,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
|
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
|
||||||
new Pair<Integer, String>(version, src.getPath()),
|
new Pair<Integer, String>(version, src.getPath()),
|
||||||
dstPath);
|
dstPath);
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug(event);
|
||||||
|
}
|
||||||
if (callbacks != null)
|
if (callbacks != null)
|
||||||
{
|
{
|
||||||
for (DeploymentCallback callback : callbacks)
|
for (DeploymentCallback callback : callbacks)
|
||||||
@ -1049,6 +1122,10 @@ public class DeploymentServiceImpl implements DeploymentService
|
|||||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
|
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.COPIED,
|
||||||
new Pair<Integer, String>(version, src.getPath()),
|
new Pair<Integer, String>(version, src.getPath()),
|
||||||
dstPath);
|
dstPath);
|
||||||
|
if (fgLogger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
fgLogger.debug(event);
|
||||||
|
}
|
||||||
if (callbacks != null)
|
if (callbacks != null)
|
||||||
{
|
{
|
||||||
for (DeploymentCallback callback : callbacks)
|
for (DeploymentCallback callback : callbacks)
|
||||||
|
@ -480,6 +480,11 @@ public interface NodeService
|
|||||||
/**
|
/**
|
||||||
* Get the node with the given name within the context of the parent node. The name
|
* Get the node with the given name within the context of the parent node. The name
|
||||||
* is case-insensitive as Alfresco has to support case-insensitive clients as standard.
|
* is case-insensitive as Alfresco has to support case-insensitive clients as standard.
|
||||||
|
* <p>
|
||||||
|
* That API method getChildByName only works for associations that don't allow duplicate child names.
|
||||||
|
* See <b>cm:folder</b> and the <b>duplicate</b> tag. Child associations without this allow duplicate
|
||||||
|
* child names and therefore it is possible to have multiple children with the same name stored against
|
||||||
|
* the given association type.
|
||||||
*
|
*
|
||||||
* @param nodeRef the parent node - usuall a <b>container</b>
|
* @param nodeRef the parent node - usuall a <b>container</b>
|
||||||
* @param assocTypeQName the type of the association
|
* @param assocTypeQName the type of the association
|
||||||
|
Loading…
x
Reference in New Issue
Block a user