mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-06-09 17:45:10 +00:00
Merge from HEAD into WCM-DEV2. Also fixes build breakage in
jndi-client and catalina-virtual that I introduced earlier. git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/BRANCHES/WCM-DEV2/root@3393 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
parent
c50a4aa669
commit
f7d9d83036
@ -87,6 +87,8 @@
|
||||
</constructor-arg>
|
||||
</bean>
|
||||
|
||||
<bean id="openOfficeConnection" class="net.sf.jooreports.openoffice.connection.SocketOpenOfficeConnection" />
|
||||
|
||||
<!-- Metadata Extraction Regisitry -->
|
||||
<bean id="metadataExtracterRegistry" class="org.alfresco.repo.content.metadata.MetadataExtracterRegistry" >
|
||||
<property name="mimetypeMap">
|
||||
@ -110,10 +112,15 @@
|
||||
<!-- Content Metadata Extracters -->
|
||||
<bean class="org.alfresco.repo.content.metadata.PdfBoxMetadataExtracter" parent="baseMetadataExtracter" />
|
||||
<bean class="org.alfresco.repo.content.metadata.OfficeMetadataExtracter" parent="baseMetadataExtracter" />
|
||||
<bean class="org.alfresco.repo.content.metadata.MailMetadataExtracter" parent="baseMetadataExtracter" />
|
||||
<bean class="org.alfresco.repo.content.metadata.HtmlMetadataExtracter" parent="baseMetadataExtracter" />
|
||||
<bean class="org.alfresco.repo.content.metadata.MP3MetadataExtracter" parent="baseMetadataExtracter" />
|
||||
<bean class="org.alfresco.repo.content.metadata.OpenDocumentMetadataExtracter" parent="baseMetadataExtracter" />
|
||||
<bean class="org.alfresco.repo.content.metadata.UnoMetadataExtracter" parent="baseMetadataExtracter" init-method="init" />
|
||||
<bean class="org.alfresco.repo.content.metadata.OpenOfficeMetadataExtracter" parent="baseMetadataExtracter" init-method="init" >
|
||||
<property name="connection">
|
||||
<ref bean="openOfficeConnection" />
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
|
||||
<!-- Content Transformation Regisitry -->
|
||||
@ -180,9 +187,15 @@
|
||||
parent="baseContentTransformer" />
|
||||
|
||||
<bean id="transformer.OpenOffice"
|
||||
class="org.alfresco.repo.content.transform.UnoContentTransformer"
|
||||
parent="baseContentTransformer"
|
||||
init-method="init" />
|
||||
class="org.alfresco.repo.content.transform.OpenOfficeContentTransformer"
|
||||
parent="baseContentTransformer" >
|
||||
<property name="connection">
|
||||
<ref bean="openOfficeConnection" />
|
||||
</property>
|
||||
<property name="documentFormatsConfiguration">
|
||||
<value>classpath:alfresco/mimetype/openoffice-document-formats.xml</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<bean id="transformer.complex.OpenOffice.PdfBox"
|
||||
class="org.alfresco.repo.content.transform.ComplexContentTransformer"
|
||||
@ -217,7 +230,7 @@
|
||||
<value>imconvert "${source}" ${options} "${target}"</value>
|
||||
</entry>
|
||||
<entry key=".*">
|
||||
<value>imconvert ${source} ${options} ${target}</value>
|
||||
<value>convert ${source} ${options} ${target}</value>
|
||||
</entry>
|
||||
</map>
|
||||
</property>
|
||||
|
@ -76,6 +76,7 @@
|
||||
<value>alfresco.messages.template-service</value>
|
||||
<value>alfresco.messages.lock-service</value>
|
||||
<value>alfresco.messages.patch-service</value>
|
||||
<value>alfresco.messages.webdav-messages</value>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
@ -185,7 +186,7 @@
|
||||
|
||||
<!-- Indexer and searchers for lucene -->
|
||||
<bean id="luceneIndexerAndSearcherFactory"
|
||||
class="org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory">
|
||||
class="org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory2">
|
||||
<property name="nodeService">
|
||||
<ref bean="nodeService" />
|
||||
</property>
|
||||
@ -348,15 +349,12 @@
|
||||
<property name="policyComponent">
|
||||
<ref bean="policyComponent" />
|
||||
</property>
|
||||
<property name="ruleService">
|
||||
<ref bean="ruleService"/>
|
||||
<property name="versionService">
|
||||
<ref bean="versionService"/>
|
||||
</property>
|
||||
<property name="nodeService">
|
||||
<ref bean="nodeService"/>
|
||||
</property>
|
||||
<property name="actionService">
|
||||
<ref bean="actionService"/>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- -->
|
||||
@ -482,6 +480,12 @@
|
||||
<property name="searchService">
|
||||
<ref bean="SearchService" />
|
||||
</property>
|
||||
<property name="permissionService">
|
||||
<ref bean="PermissionService" />
|
||||
</property>
|
||||
<property name="authenticationService">
|
||||
<ref bean="AuthenticationService" />
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- -->
|
||||
@ -562,7 +566,7 @@
|
||||
|
||||
<!-- Bean to backup Lucene indexes -->
|
||||
<bean id="luceneIndexBackupComponent"
|
||||
class="org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory$LuceneIndexBackupComponent">
|
||||
class="org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory2$LuceneIndexBackupComponent">
|
||||
<property name="transactionService">
|
||||
<ref bean="transactionComponent" />
|
||||
</property>
|
||||
|
@ -49,7 +49,7 @@
|
||||
timeToLiveSeconds="0"
|
||||
overflowToDisk="false"
|
||||
/>
|
||||
<!-- approx 25MB memory required -->
|
||||
<!-- approx 15MB memory required -->
|
||||
<cache
|
||||
name="org.alfresco.repo.domain.hibernate.NodeImpl.aspects"
|
||||
maxElementsInMemory="10000"
|
||||
@ -58,7 +58,7 @@
|
||||
timeToLiveSeconds="0"
|
||||
overflowToDisk="false"
|
||||
/>
|
||||
<!-- approx 50MB memory required -->
|
||||
<!-- approx 10MB memory required -->
|
||||
<cache
|
||||
name="org.alfresco.repo.domain.hibernate.NodeImpl.properties"
|
||||
maxElementsInMemory="10000"
|
||||
@ -67,7 +67,7 @@
|
||||
timeToLiveSeconds="0"
|
||||
overflowToDisk="false"
|
||||
/>
|
||||
<!-- approx 50MB memory required -->
|
||||
<!-- approx 20MB memory required -->
|
||||
<cache
|
||||
name="org.alfresco.repo.domain.hibernate.NodeImpl.childAssocs"
|
||||
maxElementsInMemory="10000"
|
||||
@ -76,7 +76,7 @@
|
||||
timeToLiveSeconds="0"
|
||||
overflowToDisk="false"
|
||||
/>
|
||||
<!-- approx 50MB memory required -->
|
||||
<!-- approx 10MB memory required -->
|
||||
<cache
|
||||
name="org.alfresco.repo.domain.hibernate.NodeImpl.parentAssocs"
|
||||
maxElementsInMemory="10000"
|
||||
@ -85,7 +85,7 @@
|
||||
timeToLiveSeconds="0"
|
||||
overflowToDisk="false"
|
||||
/>
|
||||
<!-- approx 70MB memory required -->
|
||||
<!-- approx 250MB memory required -->
|
||||
<cache
|
||||
name="org.alfresco.repo.domain.hibernate.ChildAssocImpl"
|
||||
maxElementsInMemory="200000"
|
||||
@ -94,7 +94,7 @@
|
||||
timeToLiveSeconds="0"
|
||||
overflowToDisk="false"
|
||||
/>
|
||||
<!-- approx 20MB memory required -->
|
||||
<!-- approx 10MB memory required -->
|
||||
<cache
|
||||
name="org.alfresco.repo.domain.hibernate.NodeImpl.sourceNodeAssocs"
|
||||
maxElementsInMemory="10000"
|
||||
@ -103,7 +103,7 @@
|
||||
timeToLiveSeconds="0"
|
||||
overflowToDisk="false"
|
||||
/>
|
||||
<!-- approx 20MB memory required -->
|
||||
<!-- approx 10MB memory required -->
|
||||
<cache
|
||||
name="org.alfresco.repo.domain.hibernate.NodeImpl.targetNodeAssocs"
|
||||
maxElementsInMemory="10000"
|
||||
@ -150,19 +150,19 @@
|
||||
overflowToDisk="false"
|
||||
/>
|
||||
<!-- Permission related caches -->
|
||||
<!-- approx 10MB memory required -->
|
||||
<!-- approx 1MB memory required -->
|
||||
<cache
|
||||
name="org.alfresco.repo.domain.hibernate.DbAccessControlListImpl"
|
||||
maxElementsInMemory="1000"
|
||||
eternal="true"
|
||||
overflowToDisk="false"/>
|
||||
<!-- approx 10MB memory required -->
|
||||
<!-- approx 1MB memory required -->
|
||||
<cache
|
||||
name="org.alfresco.repo.domain.hibernate.DbAccessControlListImpl.entries"
|
||||
maxElementsInMemory="1000"
|
||||
eternal="true"
|
||||
overflowToDisk="false"/>
|
||||
<!-- approx 30MB memory required -->
|
||||
<!-- approx 5MB memory required -->
|
||||
<cache
|
||||
name="org.alfresco.repo.domain.hibernate.DbAccessControlEntryImpl"
|
||||
maxElementsInMemory="5000"
|
||||
|
88
config/alfresco/extension/old-indexer-context.xml.sample
Normal file
88
config/alfresco/extension/old-indexer-context.xml.sample
Normal file
@ -0,0 +1,88 @@
|
||||
<?xml version='1.0' encoding='UTF-8'?>
|
||||
<!DOCTYPE beans PUBLIC '-//SPRING//DTD BEAN//EN' 'http://www.springframework.org/dtd/spring-beans.dtd'>
|
||||
|
||||
<beans>
|
||||
|
||||
<!-- Indexer and searchers for lucene -->
|
||||
|
||||
<bean id="luceneIndexerAndSearcherFactory"
|
||||
class="org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory">
|
||||
<property name="nodeService">
|
||||
<ref bean="nodeService" />
|
||||
</property>
|
||||
<property name="dictionaryService">
|
||||
<ref bean="dictionaryService" />
|
||||
</property>
|
||||
<property name="nameSpaceService">
|
||||
<ref bean="namespaceService" />
|
||||
</property>
|
||||
<property name="luceneIndexLock">
|
||||
<ref bean="luceneIndexLock" />
|
||||
</property>
|
||||
<property name="luceneFullTextSearchIndexer">
|
||||
<ref bean="LuceneFullTextSearchIndexer" />
|
||||
</property>
|
||||
<property name="indexRootLocation">
|
||||
<value>${dir.indexes}</value>
|
||||
</property>
|
||||
<property name="contentService">
|
||||
<ref bean="contentService" />
|
||||
</property>
|
||||
<property name="queryRegister">
|
||||
<ref bean="queryRegisterComponent" />
|
||||
</property>
|
||||
<property name="maxAtomicTransformationTime">
|
||||
<value>${lucene.maxAtomicTransformationTime}</value>
|
||||
</property>
|
||||
<property name="queryMaxClauses">
|
||||
<value>${lucene.query.maxClauses}</value>
|
||||
</property>
|
||||
<property name="indexerBatchSize">
|
||||
<value>${lucene.indexer.batchSize}</value>
|
||||
</property>
|
||||
<property name="indexerMinMergeDocs">
|
||||
<value>${lucene.indexer.minMergeDocs}</value>
|
||||
</property>
|
||||
<property name="indexerMergeFactor">
|
||||
<value>${lucene.indexer.mergeFactor}</value>
|
||||
</property>
|
||||
<property name="indexerMaxMergeDocs">
|
||||
<value>${lucene.indexer.maxMergeDocs}</value>
|
||||
</property>
|
||||
<property name="lockDirectory">
|
||||
<value>${dir.indexes.lock}</value>
|
||||
</property>
|
||||
<property name="indexerMaxFieldLength">
|
||||
<value>${lucene.indexer.maxFieldLength}</value>
|
||||
</property>
|
||||
<property name="writeLockTimeout">
|
||||
<value>${lucene.write.lock.timeout}</value>
|
||||
</property>
|
||||
<property name="commitLockTimeout">
|
||||
<value>${lucene.commit.lock.timeout}</value>
|
||||
</property>
|
||||
<property name="lockPollInterval">
|
||||
<value>${lucene.lock.poll.interval}</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
|
||||
<!-- Bean to backup Lucene indexes -->
|
||||
|
||||
<bean id="luceneIndexBackupComponent"
|
||||
class="org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory$LuceneIndexBackupComponent">
|
||||
<property name="transactionService">
|
||||
<ref bean="transactionComponent" />
|
||||
</property>
|
||||
<property name="factory">
|
||||
<ref bean="luceneIndexerAndSearcherFactory" />
|
||||
</property>
|
||||
<property name="nodeService">
|
||||
<ref bean="nodeService" />
|
||||
</property>
|
||||
<property name="targetLocation">
|
||||
<value>${dir.root}/backup-lucene-indexes</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
</beans>
|
@ -32,6 +32,10 @@
|
||||
<bean id="sessionFactoryBase" abstract="true">
|
||||
<property name="mappingResources">
|
||||
<list>
|
||||
<!-- -->
|
||||
<!-- Alfresco Node Storage -->
|
||||
<!-- -->
|
||||
|
||||
<value>org/alfresco/repo/domain/hibernate/Node.hbm.xml</value>
|
||||
<value>org/alfresco/repo/domain/hibernate/Store.hbm.xml</value>
|
||||
<value>org/alfresco/repo/domain/hibernate/VersionCount.hbm.xml</value>
|
||||
|
@ -69,6 +69,6 @@ patch.scriptsFolder.result.exists=The scripts folder already exists: {0}
|
||||
patch.scriptsFolder.result.created=The scripts folder was successfully created: {0}
|
||||
|
||||
patch.topLevelGroupParentChildAssociationTypePatch.description=Ensure top level groups have the correct child association type.
|
||||
patch.topLevelGroupParentChildAssociationTypePatch.=Fixed top level groups child association type.
|
||||
|
||||
|
||||
patch.topLevelGroupParentChildAssociationTypePatch.result=Fixed {0} top level groups child association types.
|
||||
patch.topLevelGroupParentChildAssociationTypePatch.err.sys_path_not_found=Required authority system path not found: {0}
|
||||
patch.topLevelGroupParentChildAssociationTypePatch.err.auth_path_not_found=Required authority path not found: {0}
|
||||
|
@ -6,3 +6,5 @@ version_service.err_unsupported=The current implementation of the version servic
|
||||
version_service.err_one_preceeding=The current implementation of the version service only supports one preceeding version.
|
||||
version_service.err_restore_no_version=The node {0} cannot be restore since there is no version information available for this node.
|
||||
version_service.err_revert_mismatch=The version provided to revert to does not come from the nodes version history.
|
||||
version_service.initial_version=Initial version
|
||||
version_service.auto_version=Auto version
|
||||
|
13
config/alfresco/messages/webdav-messages.properties
Normal file
13
config/alfresco/messages/webdav-messages.properties
Normal file
@ -0,0 +1,13 @@
|
||||
# webdav HTML page messages
|
||||
|
||||
webdav.repository_title=Alfresco Content Repository
|
||||
webdav.directory_listing=Directory listing for
|
||||
webdav.column.name=Name
|
||||
webdav.column.size=Size
|
||||
webdav.column.type=Type
|
||||
webdav.column.modifieddate=Modified Date
|
||||
webdav.column.navigate_up=Up a level
|
||||
webdav.err.dir=An error occurred whilst generating the directory listing, please contact the system administrator.
|
||||
webdav.size.bytes=bytes
|
||||
webdav.size.kilobytes=Kb
|
||||
webdav.size.megabytes=Mb
|
154
config/alfresco/mimetype/openoffice-document-formats.xml
Normal file
154
config/alfresco/mimetype/openoffice-document-formats.xml
Normal file
@ -0,0 +1,154 @@
|
||||
<?xml version="1.0"?>
|
||||
<document-formats>
|
||||
|
||||
<!-- Export-Only Formats (no family attribute) -->
|
||||
|
||||
<document-format><name>Portable Document Format</name>
|
||||
<mime-type>application/pdf</mime-type>
|
||||
<file-extension>pdf</file-extension>
|
||||
<export-filters>
|
||||
<entry><family>Presentation</family><string>impress_pdf_Export</string></entry>
|
||||
<entry><family>Spreadsheet</family><string>calc_pdf_Export</string></entry>
|
||||
<entry><family>Text</family><string>writer_pdf_Export</string></entry>
|
||||
</export-filters>
|
||||
</document-format>
|
||||
|
||||
<document-format><name>Macromedia Flash</name>
|
||||
<mime-type>application/x-shockwave-flash</mime-type>
|
||||
<file-extension>swf</file-extension>
|
||||
<export-filters>
|
||||
<entry><family>Presentation</family><string>impress_flash_Export</string></entry>
|
||||
</export-filters>
|
||||
</document-format>
|
||||
|
||||
<!--
|
||||
- Note: (X)HTML formats are here for completeness but they are currently unsupported because
|
||||
- 1. additional files may be generated for images and this would require extra care in a servlet environment
|
||||
- 2. output quality does not seem to be very good in many cases
|
||||
-->
|
||||
<document-format><name>HTML</name>
|
||||
<mime-type>text/html</mime-type>
|
||||
<file-extension>html</file-extension>
|
||||
<export-filters>
|
||||
<entry><family>Presentation</family><string>impress_html_Export</string></entry>
|
||||
<entry><family>Spreadsheet</family><string>HTML (StarCalc)</string></entry>
|
||||
<entry><family>Text</family><string>HTML (StarWriter)</string></entry>
|
||||
</export-filters>
|
||||
</document-format>
|
||||
|
||||
<!-- Text (Word Processor) Formats -->
|
||||
|
||||
<document-format><name>OpenDocument Text</name>
|
||||
<family>Text</family>
|
||||
<mime-type>application/vnd.oasis.opendocument.text</mime-type>
|
||||
<file-extension>odt</file-extension>
|
||||
<export-filters>
|
||||
<entry><family>Text</family><string>writer8</string></entry>
|
||||
</export-filters>
|
||||
</document-format>
|
||||
|
||||
<document-format><name>OpenOffice.org 1.0 Text Document</name>
|
||||
<family>Text</family>
|
||||
<mime-type>application/vnd.sun.xml.writer</mime-type>
|
||||
<file-extension>sxw</file-extension>
|
||||
<export-filters>
|
||||
<entry><family>Text</family><string>StarOffice XML (Writer)</string></entry>
|
||||
</export-filters>
|
||||
</document-format>
|
||||
|
||||
<document-format><name>Microsoft Word</name>
|
||||
<family>Text</family>
|
||||
<mime-type>application/msword</mime-type>
|
||||
<file-extension>doc</file-extension>
|
||||
<export-filters>
|
||||
<entry><family>Text</family><string>MS Word 97</string></entry>
|
||||
</export-filters>
|
||||
</document-format>
|
||||
|
||||
<document-format><name>WordPerfect</name>
|
||||
<family>Text</family>
|
||||
<mime-type>application/wordperfect</mime-type>
|
||||
<file-extension>wpd</file-extension>
|
||||
<export-filters>
|
||||
<entry><family>Text</family><string>WordPerfect</string></entry>
|
||||
</export-filters>
|
||||
</document-format>
|
||||
|
||||
<document-format><name>Rich Text Format</name>
|
||||
<family>Text</family>
|
||||
<mime-type>text/rtf</mime-type>
|
||||
<file-extension>rtf</file-extension>
|
||||
<export-filters>
|
||||
<entry><family>Text</family><string>Rich Text Format</string></entry>
|
||||
</export-filters>
|
||||
</document-format>
|
||||
|
||||
<document-format><name>Plain Text</name>
|
||||
<family>Text</family>
|
||||
<mime-type>text/plain</mime-type>
|
||||
<file-extension>txt</file-extension>
|
||||
<export-filters>
|
||||
<entry><family>Text</family><string>Text</string></entry>
|
||||
</export-filters>
|
||||
</document-format>
|
||||
|
||||
<!-- Spreadsheet Formats -->
|
||||
|
||||
<document-format><name>OpenDocument Spreadsheet</name>
|
||||
<family>Spreadsheet</family>
|
||||
<mime-type>application/vnd.oasis.opendocument.spreadsheet</mime-type>
|
||||
<file-extension>ods</file-extension>
|
||||
<export-filters>
|
||||
<entry><family>Spreadsheet</family><string>calc8</string></entry>
|
||||
</export-filters>
|
||||
</document-format>
|
||||
|
||||
<document-format><name>OpenOffice.org 1.0 Spreadsheet</name>
|
||||
<family>Spreadsheet</family>
|
||||
<mime-type>application/vnd.sun.xml.calc</mime-type>
|
||||
<file-extension>sxc</file-extension>
|
||||
<export-filters>
|
||||
<entry><family>Spreadsheet</family><string>StarOffice XML (Calc)</string></entry>
|
||||
</export-filters>
|
||||
</document-format>
|
||||
|
||||
<document-format><name>Microsoft Excel</name>
|
||||
<family>Spreadsheet</family>
|
||||
<mime-type>application/application/vnd.excel</mime-type>
|
||||
<file-extension>xls</file-extension>
|
||||
<export-filters>
|
||||
<entry><family>Spreadsheet</family><string>MS Excel 97</string></entry>
|
||||
</export-filters>
|
||||
</document-format>
|
||||
|
||||
|
||||
<!-- Presentation Formats -->
|
||||
|
||||
<document-format><name>OpenDocument Presentation</name>
|
||||
<family>Presentation</family>
|
||||
<mime-type>application/vnd.oasis.opendocument.presentation</mime-type>
|
||||
<file-extension>odp</file-extension>
|
||||
<export-filters>
|
||||
<entry><family>Presentation</family><string>impress8</string></entry>
|
||||
</export-filters>
|
||||
</document-format>
|
||||
|
||||
<document-format><name>OpenOffice.org 1.0 Presentation</name>
|
||||
<family>Presentation</family>
|
||||
<mime-type>application/vnd.sun.xml.impress</mime-type>
|
||||
<file-extension>sxi</file-extension>
|
||||
<export-filters>
|
||||
<entry><family>Presentation</family><string>StarOffice XML (Impress)</string></entry>
|
||||
</export-filters>
|
||||
</document-format>
|
||||
|
||||
<document-format><name>Microsoft PowerPoint</name>
|
||||
<family>Presentation</family>
|
||||
<mime-type>application/vnd.powerpoint</mime-type>
|
||||
<file-extension>ppt</file-extension>
|
||||
<export-filters>
|
||||
<entry><family>Presentation</family><string>MS PowerPoint 97</string></entry>
|
||||
</export-filters>
|
||||
</document-format>
|
||||
|
||||
</document-formats>
|
@ -230,21 +230,25 @@
|
||||
<title>Created</title>
|
||||
<type>d:datetime</type>
|
||||
<protected>true</protected>
|
||||
<mandatory enforced="true">true</mandatory>
|
||||
</property>
|
||||
<property name="cm:creator">
|
||||
<title>Creator</title>
|
||||
<type>d:text</type>
|
||||
<protected>true</protected>
|
||||
<mandatory enforced="true">true</mandatory>
|
||||
</property>
|
||||
<property name="cm:modified">
|
||||
<title>Modified</title>
|
||||
<type>d:datetime</type>
|
||||
<protected>true</protected>
|
||||
<mandatory enforced="true">true</mandatory>
|
||||
</property>
|
||||
<property name="cm:modifier">
|
||||
<title>Modifier</title>
|
||||
<type>d:text</type>
|
||||
<protected>true</protected>
|
||||
<mandatory enforced="true">true</mandatory>
|
||||
</property>
|
||||
<property name="cm:accessed">
|
||||
<title>Accessed</title>
|
||||
@ -645,6 +649,29 @@
|
||||
</associations>
|
||||
</aspect>
|
||||
|
||||
<aspect name="cm:emailed">
|
||||
<title>Emailed</title>
|
||||
<properties>
|
||||
<property name="cm:originator">
|
||||
<title>Originator</title>
|
||||
<type>d:text</type>
|
||||
</property>
|
||||
<property name="cm:addressee">
|
||||
<title>Addressee</title>
|
||||
<type>d:text</type>
|
||||
</property>
|
||||
<property name="cm:addressees">
|
||||
<title>Addressees</title>
|
||||
<type>d:text</type>
|
||||
<multiple>true</multiple>
|
||||
</property>
|
||||
<property name="cm:sentdate">
|
||||
<title>Sent Date</title>
|
||||
<type>d:datetime</type>
|
||||
</property>
|
||||
</properties>
|
||||
</aspect>
|
||||
|
||||
</aspects>
|
||||
|
||||
</model>
|
||||
|
19
config/alfresco/workflow-context.xml
Normal file
19
config/alfresco/workflow-context.xml
Normal file
@ -0,0 +1,19 @@
|
||||
<?xml version='1.0' encoding='UTF-8'?>
|
||||
<!DOCTYPE beans PUBLIC '-//SPRING//DTD BEAN//EN' 'http://www.springframework.org/dtd/spring-beans.dtd'>
|
||||
|
||||
<beans>
|
||||
|
||||
<!-- jBPM configuration -->
|
||||
<bean id="jbpm.configuration" class="org.springmodules.workflow.jbpm31.LocalJbpmConfigurationFactoryBean">
|
||||
<property name="sessionFactory" ref="sessionFactory"/>
|
||||
<property name="configuration" value="classpath:org/jbpm/default.jbpm.cfg.xml"/>
|
||||
</bean>
|
||||
|
||||
<!-- jBPM template -->
|
||||
<bean id="jbpm.template" class="org.springmodules.workflow.jbpm31.JbpmTemplate">
|
||||
<constructor-arg index="0" ref="jbpm.configuration"/>
|
||||
</bean>
|
||||
|
||||
|
||||
|
||||
</beans>
|
@ -19,6 +19,19 @@ package org.alfresco.filesys.ftp;
|
||||
import java.net.*;
|
||||
import java.io.*;
|
||||
|
||||
import org.alfresco.filesys.server.SrvSession;
|
||||
import org.alfresco.filesys.server.filesys.AccessMode;
|
||||
import org.alfresco.filesys.server.filesys.DiskDeviceContext;
|
||||
import org.alfresco.filesys.server.filesys.DiskInterface;
|
||||
import org.alfresco.filesys.server.filesys.FileAction;
|
||||
import org.alfresco.filesys.server.filesys.FileOpenParams;
|
||||
import org.alfresco.filesys.server.filesys.FileStatus;
|
||||
import org.alfresco.filesys.server.filesys.NetworkFile;
|
||||
import org.alfresco.filesys.server.filesys.NotifyChange;
|
||||
import org.alfresco.filesys.server.filesys.TreeConnection;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
/**
|
||||
* FTP Data Session Class
|
||||
* <p>
|
||||
@ -30,9 +43,16 @@ import java.io.*;
|
||||
*
|
||||
* @author GKSpencer
|
||||
*/
|
||||
public class FTPDataSession implements Runnable
|
||||
public class FTPDataSession extends SrvSession implements Runnable
|
||||
{
|
||||
// Debug logging
|
||||
|
||||
private static final Log logger = LogFactory.getLog("org.alfresco.ftp.protocol");
|
||||
|
||||
// Data session command types
|
||||
|
||||
public enum DataCommand { StoreFile, ReturnFile };
|
||||
|
||||
// FTP session that this data connection is associated with
|
||||
|
||||
private FTPSrvSession m_cmdSess;
|
||||
@ -54,10 +74,6 @@ public class FTPDataSession implements Runnable
|
||||
|
||||
private ServerSocket m_passiveSock;
|
||||
|
||||
// Adapter to bind the passive socket to
|
||||
|
||||
private InetAddress m_bindAddr;
|
||||
|
||||
// Transfer in progress and abort file transfer flags
|
||||
|
||||
private boolean m_transfer;
|
||||
@ -66,7 +82,27 @@ public class FTPDataSession implements Runnable
|
||||
// Send/receive data byte count
|
||||
|
||||
private long m_bytCount;
|
||||
|
||||
// Data command type
|
||||
|
||||
private DataCommand m_dataCmd;
|
||||
|
||||
// Requested file name
|
||||
|
||||
private String m_reqFileName;
|
||||
|
||||
// Path to the local file
|
||||
|
||||
private FTPPath m_ftpPath;
|
||||
|
||||
// Restart position
|
||||
|
||||
private long m_restartPos;
|
||||
|
||||
// Thread that runs the data command
|
||||
|
||||
private Thread m_dataThread;
|
||||
|
||||
/**
|
||||
* Class constructor
|
||||
* <p>
|
||||
@ -77,7 +113,10 @@ public class FTPDataSession implements Runnable
|
||||
*/
|
||||
protected FTPDataSession(FTPSrvSession sess) throws IOException
|
||||
{
|
||||
|
||||
// Setup the base class
|
||||
|
||||
super( -1, sess.getServer(), "FTPDATA", null);
|
||||
|
||||
// Set the associated command session
|
||||
|
||||
m_cmdSess = sess;
|
||||
@ -100,6 +139,9 @@ public class FTPDataSession implements Runnable
|
||||
*/
|
||||
protected FTPDataSession(FTPSrvSession sess, int localPort, InetAddress bindAddr) throws IOException
|
||||
{
|
||||
// Setup the base class
|
||||
|
||||
super( -1, sess.getServer(), "FTPDATA", null);
|
||||
|
||||
// Set the associated command session
|
||||
|
||||
@ -124,6 +166,9 @@ public class FTPDataSession implements Runnable
|
||||
*/
|
||||
protected FTPDataSession(FTPSrvSession sess, InetAddress bindAddr) throws IOException
|
||||
{
|
||||
// Setup the base class
|
||||
|
||||
super( -1, sess.getServer(), "FTPDATA", null);
|
||||
|
||||
// Set the associated command session
|
||||
|
||||
@ -146,6 +191,9 @@ public class FTPDataSession implements Runnable
|
||||
*/
|
||||
protected FTPDataSession(FTPSrvSession sess, InetAddress addr, int port)
|
||||
{
|
||||
// Setup the base class
|
||||
|
||||
super( -1, sess.getServer(), "FTPDATA", null);
|
||||
|
||||
// Set the associated command session
|
||||
|
||||
@ -171,6 +219,9 @@ public class FTPDataSession implements Runnable
|
||||
*/
|
||||
protected FTPDataSession(FTPSrvSession sess, int localPort, InetAddress addr, int port)
|
||||
{
|
||||
// Setup the base class
|
||||
|
||||
super( -1, sess.getServer(), "FTPDATA", null);
|
||||
|
||||
// Set the associated command session
|
||||
|
||||
@ -271,6 +322,16 @@ public class FTPDataSession implements Runnable
|
||||
return m_transfer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if the transfer has been aborted
|
||||
*
|
||||
* @return boolean
|
||||
*/
|
||||
public final boolean isTransferAborted()
|
||||
{
|
||||
return m_abort;
|
||||
}
|
||||
|
||||
/**
|
||||
* Abort an in progress file transfer
|
||||
*/
|
||||
@ -358,12 +419,524 @@ public class FTPDataSession implements Runnable
|
||||
}
|
||||
m_passiveSock = null;
|
||||
}
|
||||
|
||||
// Commit, or rollback, any active user transaction
|
||||
|
||||
try
|
||||
{
|
||||
// Commit or rollback the transaction
|
||||
|
||||
endTransaction();
|
||||
}
|
||||
catch ( Exception ex)
|
||||
{
|
||||
// Debug
|
||||
|
||||
if ( logger.isDebugEnabled())
|
||||
logger.debug("Error committing transaction", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Store a file using a seperate thread to receive the data and write the file
|
||||
*
|
||||
* @param ftpPath FTPPath
|
||||
*/
|
||||
public final void doStoreFile( FTPPath ftpPath, long restartPos, String reqFileName)
|
||||
{
|
||||
// Set the transfer details
|
||||
|
||||
m_dataCmd = DataCommand.StoreFile;
|
||||
m_ftpPath = ftpPath;
|
||||
m_restartPos = restartPos;
|
||||
m_reqFileName = reqFileName;
|
||||
|
||||
// Run the transfer in a seperate thread
|
||||
|
||||
m_dataThread = new Thread(this);
|
||||
m_dataThread.setName(m_cmdSess.getUniqueId() + "_DATA_STORE");
|
||||
m_dataThread.start();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a file using a seperate thread to read the file and send the data
|
||||
*
|
||||
* @param ftpPath FTPPath
|
||||
*/
|
||||
public final void doReturnFile( FTPPath ftpPath, long restartPos, String reqFileName)
|
||||
{
|
||||
// Set the transfer details
|
||||
|
||||
m_dataCmd = DataCommand.ReturnFile;
|
||||
m_ftpPath = ftpPath;
|
||||
m_restartPos = restartPos;
|
||||
m_reqFileName = reqFileName;
|
||||
|
||||
// Run the transfer in a seperate thread
|
||||
|
||||
m_dataThread = new Thread(this);
|
||||
m_dataThread.setName(m_cmdSess.getUniqueId() + "_DATA_RETURN");
|
||||
m_dataThread.start();
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a file send/receive in a seperate thread
|
||||
*/
|
||||
public void run()
|
||||
{
|
||||
// Setup the authentication context as we are running in a seperate thread from the main FTP session
|
||||
|
||||
try
|
||||
{
|
||||
// Setup the authentication context for the thread
|
||||
|
||||
m_cmdSess.authenticateDataSession();
|
||||
|
||||
// Run the required data command
|
||||
|
||||
switch ( m_dataCmd)
|
||||
{
|
||||
// Store a file
|
||||
|
||||
case StoreFile:
|
||||
runStoreFile();
|
||||
break;
|
||||
|
||||
// Return a file
|
||||
|
||||
case ReturnFile:
|
||||
runReturnFile();
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch ( org.alfresco.repo.security.authentication.AuthenticationException ex)
|
||||
{
|
||||
if ( logger.isErrorEnabled())
|
||||
logger.error("Failed to authenticate FTP data session", ex);
|
||||
|
||||
// Close the data connection to the client
|
||||
|
||||
m_cmdSess.getFTPServer().releaseDataSession(this);
|
||||
closeSession();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a file to the client
|
||||
*/
|
||||
private final void runReturnFile()
|
||||
{
|
||||
// Send the file to the client
|
||||
|
||||
OutputStream os = null;
|
||||
DiskInterface disk = null;
|
||||
TreeConnection tree = null;
|
||||
NetworkFile netFile = null;
|
||||
Socket dataSock = null;
|
||||
|
||||
try
|
||||
{
|
||||
|
||||
// Open an output stream to the client
|
||||
|
||||
dataSock = getSocket();
|
||||
os = dataSock.getOutputStream();
|
||||
|
||||
// Create a temporary tree connection
|
||||
|
||||
tree = m_cmdSess.getTreeConnection(m_ftpPath.getSharedDevice());
|
||||
|
||||
// Check if the file exists and it is a file, if so then open the
|
||||
// file
|
||||
|
||||
disk = (DiskInterface) m_ftpPath.getSharedDevice().getInterface();
|
||||
|
||||
// Create the file open parameters
|
||||
|
||||
FileOpenParams params = new FileOpenParams(m_ftpPath.getSharePath(), FileAction.OpenIfExists,
|
||||
AccessMode.ReadOnly, 0);
|
||||
|
||||
// Check if the file exists and it is a file
|
||||
|
||||
int sts = disk.fileExists( this, tree, m_ftpPath.getSharePath());
|
||||
|
||||
if (sts == FileStatus.FileExists)
|
||||
{
|
||||
|
||||
// Open the file
|
||||
|
||||
netFile = disk.openFile( this, tree, params);
|
||||
}
|
||||
|
||||
// Check if the file has been opened
|
||||
|
||||
if (netFile == null)
|
||||
{
|
||||
m_cmdSess.sendFTPResponse(550, "File " + m_reqFileName + " not available");
|
||||
return;
|
||||
}
|
||||
|
||||
// Allocate the buffer for the file data
|
||||
|
||||
byte[] buf = new byte[FTPSrvSession.DEFAULT_BUFFERSIZE];
|
||||
long filePos = m_restartPos;
|
||||
|
||||
int len = -1;
|
||||
|
||||
while (filePos < netFile.getFileSize())
|
||||
{
|
||||
|
||||
// Read another block of data from the file
|
||||
|
||||
len = disk.readFile( this, tree, netFile, buf, 0, buf.length, filePos);
|
||||
|
||||
// DEBUG
|
||||
|
||||
if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_FILEIO))
|
||||
logger.debug(" Write len=" + len + " bytes");
|
||||
|
||||
// Write the current data block to the client, update the file position
|
||||
|
||||
if (len > 0)
|
||||
{
|
||||
|
||||
// Write the data to the client
|
||||
|
||||
os.write(buf, 0, len);
|
||||
|
||||
// Update the file position
|
||||
|
||||
filePos += len;
|
||||
|
||||
// Update the transfer byte count
|
||||
|
||||
m_bytCount += len;
|
||||
}
|
||||
|
||||
// Check if the transfer has been aborted
|
||||
|
||||
if ( isTransferAborted())
|
||||
{
|
||||
// DEBUG
|
||||
|
||||
if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_FILE))
|
||||
logger.debug(" Transfer aborted (RETR)");
|
||||
|
||||
// Send a status to the client
|
||||
|
||||
sendFTPResponse( 226, "Aborted data connection");
|
||||
|
||||
// Finally block will cleanup
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Close the output stream to the client
|
||||
|
||||
os.close();
|
||||
os = null;
|
||||
|
||||
// Indicate that the file has been transmitted
|
||||
|
||||
sendFTPResponse(226, "Closing data connection");
|
||||
|
||||
// Close the data session
|
||||
|
||||
m_cmdSess.getFTPServer().releaseDataSession(this);
|
||||
|
||||
// Close the network file
|
||||
|
||||
disk.closeFile( this, tree, netFile);
|
||||
netFile = null;
|
||||
|
||||
// DEBUG
|
||||
|
||||
if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_FILEIO))
|
||||
logger.debug(" Transfer complete, file closed");
|
||||
}
|
||||
catch (SocketException ex)
|
||||
{
|
||||
|
||||
// DEBUG
|
||||
|
||||
if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_ERROR))
|
||||
logger.debug(" Error during transfer", ex);
|
||||
|
||||
// Indicate that there was an error during transmission of the file
|
||||
// data
|
||||
|
||||
sendFTPResponse(426, "Data connection closed by client");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
|
||||
// DEBUG
|
||||
|
||||
if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_ERROR))
|
||||
logger.debug(" Error during transfer", ex);
|
||||
|
||||
// Indicate that there was an error during transmission of the file
|
||||
// data
|
||||
|
||||
sendFTPResponse(426, "Error during transmission");
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
// Close the network file
|
||||
|
||||
if (netFile != null && disk != null && tree != null)
|
||||
disk.closeFile(m_cmdSess, tree, netFile);
|
||||
|
||||
// Close the output stream to the client
|
||||
|
||||
if (os != null)
|
||||
os.close();
|
||||
|
||||
// Close the data connection to the client
|
||||
|
||||
m_cmdSess.getFTPServer().releaseDataSession( this);
|
||||
closeSession();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
if ( logger.isErrorEnabled())
|
||||
logger.error( "Error during FTP data session close", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Store a file received from the client
|
||||
*/
|
||||
private final void runStoreFile()
|
||||
{
|
||||
// Store the file from the client
|
||||
|
||||
InputStream is = null;
|
||||
DiskInterface disk = null;
|
||||
TreeConnection tree = null;
|
||||
NetworkFile netFile = null;
|
||||
Socket dataSock = null;
|
||||
|
||||
try
|
||||
{
|
||||
|
||||
// Create a temporary tree connection
|
||||
|
||||
tree = m_cmdSess.getTreeConnection(m_ftpPath.getSharedDevice());
|
||||
|
||||
// Check if the session has the required access to the filesystem
|
||||
|
||||
if (tree == null || tree.hasWriteAccess() == false)
|
||||
{
|
||||
|
||||
// Session does not have write access to the filesystem
|
||||
|
||||
sendFTPResponse(550, "Access denied");
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if the file exists
|
||||
|
||||
disk = (DiskInterface) m_ftpPath.getSharedDevice().getInterface();
|
||||
int sts = disk.fileExists(this, tree, m_ftpPath.getSharePath());
|
||||
|
||||
if (sts == FileStatus.DirectoryExists)
|
||||
{
|
||||
|
||||
// Return an error status
|
||||
|
||||
sendFTPResponse(500, "Invalid path (existing directory)");
|
||||
return;
|
||||
}
|
||||
|
||||
// Create the file open parameters
|
||||
|
||||
FileOpenParams params = new FileOpenParams(m_ftpPath.getSharePath(),
|
||||
sts == FileStatus.FileExists ? FileAction.TruncateExisting : FileAction.CreateNotExist,
|
||||
AccessMode.ReadWrite, 0);
|
||||
|
||||
// Create a new file to receive the data
|
||||
|
||||
if (sts == FileStatus.FileExists)
|
||||
{
|
||||
|
||||
// Overwrite the existing file
|
||||
|
||||
netFile = disk.openFile(this, tree, params);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
// Create a new file
|
||||
|
||||
netFile = disk.createFile(this, tree, params);
|
||||
}
|
||||
|
||||
// Notify change listeners that a new file has been created
|
||||
|
||||
DiskDeviceContext diskCtx = (DiskDeviceContext) tree.getContext();
|
||||
|
||||
if (diskCtx.hasChangeHandler())
|
||||
diskCtx.getChangeHandler().notifyFileChanged(NotifyChange.ActionAdded, m_ftpPath.getSharePath());
|
||||
|
||||
// Send the intermediate response
|
||||
|
||||
sendFTPResponse(150, "File status okay, about to open data connection");
|
||||
|
||||
// Get the data connection socket
|
||||
|
||||
try
|
||||
{
|
||||
dataSock = getSocket();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
}
|
||||
|
||||
if (dataSock == null)
|
||||
{
|
||||
sendFTPResponse(426, "Connection closed; transfer aborted");
|
||||
return;
|
||||
}
|
||||
|
||||
// Open an input stream from the client
|
||||
|
||||
is = dataSock.getInputStream();
|
||||
|
||||
// DEBUG
|
||||
|
||||
if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_FILE))
|
||||
logger.debug("Storing ftp="
|
||||
+ m_ftpPath.getFTPPath() + ", share=" + m_ftpPath.getShareName() + ", path="
|
||||
+ m_ftpPath.getSharePath());
|
||||
|
||||
// Allocate the buffer for the file data
|
||||
|
||||
byte[] buf = new byte[FTPSrvSession.DEFAULT_BUFFERSIZE];
|
||||
long filePos = 0;
|
||||
int len = is.read(buf, 0, buf.length);
|
||||
|
||||
while (len > 0)
|
||||
{
|
||||
|
||||
// DEBUG
|
||||
|
||||
if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_FILEIO))
|
||||
logger.debug(" Receive len=" + len + " bytes");
|
||||
|
||||
// Write the current data block to the file, update the file
|
||||
// position
|
||||
|
||||
disk.writeFile(this, tree, netFile, buf, 0, len, filePos);
|
||||
filePos += len;
|
||||
|
||||
// Read another block of data from the client
|
||||
|
||||
len = is.read(buf, 0, buf.length);
|
||||
}
|
||||
|
||||
// Close the input stream from the client
|
||||
|
||||
is.close();
|
||||
is = null;
|
||||
|
||||
// Close the network file
|
||||
|
||||
disk.closeFile(this, tree, netFile);
|
||||
netFile = null;
|
||||
|
||||
// Commit the transaction now before notifying the client that the transfer is finished
|
||||
|
||||
endTransaction();
|
||||
|
||||
// Indicate that the file has been received
|
||||
|
||||
sendFTPResponse(226, "Closing data connection");
|
||||
|
||||
// DEBUG
|
||||
|
||||
if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_FILEIO))
|
||||
logger.debug(" Transfer complete, file closed");
|
||||
}
|
||||
catch (SocketException ex)
|
||||
{
|
||||
|
||||
// DEBUG
|
||||
|
||||
if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_ERROR))
|
||||
logger.debug(" Error during transfer", ex);
|
||||
|
||||
// Indicate that there was an error during transmission of the file data
|
||||
|
||||
sendFTPResponse(426, "Data connection closed by client");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
|
||||
// DEBUG
|
||||
|
||||
if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_ERROR))
|
||||
logger.debug(" Error during transfer", ex);
|
||||
|
||||
// Indicate that there was an error during transmission of the file
|
||||
// data
|
||||
|
||||
sendFTPResponse(426, "Error during transmission");
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
// Close the network file
|
||||
|
||||
if (netFile != null && disk != null && tree != null)
|
||||
disk.closeFile( this, tree, netFile);
|
||||
|
||||
// Close the input stream to the client
|
||||
|
||||
if (is != null)
|
||||
is.close();
|
||||
|
||||
// Close the data connection to the client
|
||||
|
||||
m_cmdSess.getFTPServer().releaseDataSession(this);
|
||||
closeSession();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
if ( logger.isErrorEnabled())
|
||||
logger.error( "Error during FTP data session close", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send an FTP response to the client via the command session
|
||||
*
|
||||
* @param stsCode int
|
||||
* @param msg String
|
||||
*/
|
||||
protected final void sendFTPResponse(int stsCode, String msg)
|
||||
{
|
||||
try
|
||||
{
|
||||
m_cmdSess.sendFTPResponse( stsCode, msg);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the client address
|
||||
*
|
||||
* @return InetAddress
|
||||
*/
|
||||
public InetAddress getRemoteAddress() {
|
||||
return m_cmdSess.getRemoteAddress();
|
||||
}
|
||||
}
|
||||
|
@ -98,10 +98,10 @@ public class FTPDate
|
||||
buf.append(hr);
|
||||
buf.append(":");
|
||||
|
||||
int sec = cal.get(Calendar.SECOND);
|
||||
if (sec < 10)
|
||||
int min = cal.get(Calendar.MINUTE);
|
||||
if (min < 10)
|
||||
buf.append("0");
|
||||
buf.append(sec);
|
||||
buf.append(min);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -460,8 +460,6 @@ public class ServerConfiguration implements ApplicationListener
|
||||
throw new AlfrescoRuntimeException("Property 'configService' not set");
|
||||
}
|
||||
|
||||
initialised = false;
|
||||
|
||||
// Create the configuration context
|
||||
|
||||
ConfigLookupContext configCtx = new ConfigLookupContext(ConfigArea);
|
||||
@ -470,59 +468,106 @@ public class ServerConfiguration implements ApplicationListener
|
||||
|
||||
determinePlatformType();
|
||||
|
||||
// Initialize the filesystems
|
||||
|
||||
boolean filesysInitOK = false;
|
||||
Config config = null;
|
||||
|
||||
try
|
||||
{
|
||||
|
||||
// Process the CIFS server configuration
|
||||
|
||||
Config config = configService.getConfig(ConfigCIFS, configCtx);
|
||||
processCIFSServerConfig(config);
|
||||
|
||||
// Process the FTP server configuration
|
||||
|
||||
config = configService.getConfig(ConfigFTP, configCtx);
|
||||
processFTPServerConfig(config);
|
||||
|
||||
// Process the security configuration
|
||||
|
||||
config = configService.getConfig(ConfigSecurity, configCtx);
|
||||
processSecurityConfig(config);
|
||||
|
||||
// Process the filesystems configuration
|
||||
|
||||
config = configService.getConfig(ConfigFilesystems, configCtx);
|
||||
processFilesystemsConfig(config);
|
||||
|
||||
// Successful initialisation
|
||||
initialised = true;
|
||||
|
||||
// Indicate that the filesystems were initialized
|
||||
|
||||
filesysInitOK = true;
|
||||
}
|
||||
catch (UnsatisfiedLinkError ex)
|
||||
{
|
||||
// Error accessing the Win32NetBIOS DLL code
|
||||
|
||||
logger.error("Error accessing Win32 NetBIOS, check DLL is on the path");
|
||||
|
||||
// Disable the CIFS server
|
||||
|
||||
setNetBIOSSMB(false);
|
||||
setTcpipSMB(false);
|
||||
setWin32NetBIOS(false);
|
||||
|
||||
setSMBServerEnabled(false);
|
||||
}
|
||||
catch (Throwable ex)
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Configuration error
|
||||
|
||||
logger.error("File server configuration error, " + ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
// Disable the CIFS server
|
||||
// Initialize the CIFS and FTP servers, if the filesystem(s) initialized successfully
|
||||
|
||||
if ( filesysInitOK == true)
|
||||
{
|
||||
// Initialize the CIFS server
|
||||
|
||||
setNetBIOSSMB(false);
|
||||
setTcpipSMB(false);
|
||||
setWin32NetBIOS(false);
|
||||
try
|
||||
{
|
||||
|
||||
// Process the CIFS server configuration
|
||||
|
||||
config = configService.getConfig(ConfigCIFS, configCtx);
|
||||
processCIFSServerConfig(config);
|
||||
|
||||
// Process the security configuration
|
||||
|
||||
config = configService.getConfig(ConfigSecurity, configCtx);
|
||||
processSecurityConfig(config);
|
||||
|
||||
// Log the successful startup
|
||||
|
||||
logger.info("CIFS server started");
|
||||
}
|
||||
catch (UnsatisfiedLinkError ex)
|
||||
{
|
||||
// Error accessing the Win32NetBIOS DLL code
|
||||
|
||||
logger.error("Error accessing Win32 NetBIOS, check DLL is on the path");
|
||||
|
||||
// Disable the CIFS server
|
||||
|
||||
setNetBIOSSMB(false);
|
||||
setTcpipSMB(false);
|
||||
setWin32NetBIOS(false);
|
||||
|
||||
setSMBServerEnabled(false);
|
||||
}
|
||||
catch (Throwable ex)
|
||||
{
|
||||
// Configuration error
|
||||
|
||||
logger.error("CIFS server configuration error, " + ex.getMessage(), ex);
|
||||
|
||||
// Disable the CIFS server
|
||||
|
||||
setNetBIOSSMB(false);
|
||||
setTcpipSMB(false);
|
||||
setWin32NetBIOS(false);
|
||||
|
||||
setSMBServerEnabled(false);
|
||||
}
|
||||
|
||||
setSMBServerEnabled(false);
|
||||
// Initialize the FTP server
|
||||
|
||||
try
|
||||
{
|
||||
// Process the FTP server configuration
|
||||
|
||||
config = configService.getConfig(ConfigFTP, configCtx);
|
||||
processFTPServerConfig(config);
|
||||
|
||||
// Log the successful startup
|
||||
|
||||
logger.info("FTP server started");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Configuration error
|
||||
|
||||
logger.error("FTP server configuration error, " + ex.getMessage(), ex);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Log the error
|
||||
|
||||
logger.error("CIFS and FTP servers not started due to filesystem initialization error");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -174,7 +174,11 @@ public class DiskDeviceContext extends DeviceContext
|
||||
*/
|
||||
public void CloseContext()
|
||||
{
|
||||
|
||||
// Close the notify handler
|
||||
|
||||
if ( hasChangeHandler())
|
||||
getChangeHandler().shutdownRequest();
|
||||
|
||||
// Call the base class
|
||||
|
||||
super.CloseContext();
|
||||
|
@ -28,17 +28,18 @@ public final class FileSystem
|
||||
// Filesystem attributes
|
||||
|
||||
public static final int CaseSensitiveSearch = 0x00000001;
|
||||
public static final int CasePreservedNames = 0x00000002;
|
||||
public static final int UnicodeOnDisk = 0x00000004;
|
||||
public static final int PersistentACLs = 0x00000008;
|
||||
public static final int FileCompression = 0x00000010;
|
||||
public static final int VolumeQuotas = 0x00000020;
|
||||
public static final int SparseFiles = 0x00000040;
|
||||
public static final int ReparsePoints = 0x00000080;
|
||||
public static final int RemoteStorage = 0x00000100;
|
||||
public static final int VolumeIsCompressed = 0x00008000;
|
||||
public static final int ObjectIds = 0x00010000;
|
||||
public static final int Encryption = 0x00020000;
|
||||
public static final int CasePreservedNames = 0x00000002;
|
||||
public static final int UnicodeOnDisk = 0x00000004;
|
||||
public static final int PersistentACLs = 0x00000008;
|
||||
public static final int FileCompression = 0x00000010;
|
||||
public static final int VolumeQuotas = 0x00000020;
|
||||
public static final int SparseFiles = 0x00000040;
|
||||
public static final int ReparsePoints = 0x00000080;
|
||||
public static final int RemoteStorage = 0x00000100;
|
||||
public static final int LFNAPISupport = 0x00004000;
|
||||
public static final int VolumeIsCompressed = 0x00008000;
|
||||
public static final int ObjectIds = 0x00010000;
|
||||
public static final int Encryption = 0x00020000;
|
||||
|
||||
// Filesystem type strings
|
||||
|
||||
|
@ -75,6 +75,10 @@ public abstract class HostAnnouncer extends Thread
|
||||
|
||||
private byte m_updateCount;
|
||||
|
||||
// Error count
|
||||
|
||||
private int m_errorCount;
|
||||
|
||||
// Shutdown flag, host announcer should remove the announced name as it shuts down
|
||||
|
||||
private boolean m_shutdown = false;
|
||||
@ -156,6 +160,16 @@ public abstract class HostAnnouncer extends Thread
|
||||
return m_names.numberOfStrings();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the error count
|
||||
*
|
||||
* @return int
|
||||
*/
|
||||
protected final int getErrorCount()
|
||||
{
|
||||
return m_errorCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the specified host name being announced.
|
||||
*
|
||||
@ -493,6 +507,24 @@ public abstract class HostAnnouncer extends Thread
|
||||
m_srvtype = typ;
|
||||
}
|
||||
|
||||
/**
|
||||
* Increment the error count
|
||||
*
|
||||
* @return int
|
||||
*/
|
||||
protected final int incrementErrorCount()
|
||||
{
|
||||
return ++m_errorCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the error count
|
||||
*/
|
||||
protected final void clearErrorCount()
|
||||
{
|
||||
m_errorCount = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Shutdown the host announcer and remove the announced name from Network Neighborhood.
|
||||
*/
|
||||
|
@ -32,6 +32,10 @@ import org.alfresco.filesys.smb.server.win32.Win32NetBIOSSessionSocketHandler;
|
||||
public class Win32NetBIOSHostAnnouncer extends HostAnnouncer
|
||||
{
|
||||
|
||||
// Number of send errors before marking the LANA as offline
|
||||
|
||||
private static final int SendErrorCount = 3;
|
||||
|
||||
// Associated session handler
|
||||
|
||||
Win32NetBIOSSessionSocketHandler m_handler;
|
||||
@ -120,6 +124,36 @@ public class Win32NetBIOSHostAnnouncer extends HostAnnouncer
|
||||
|
||||
int sts = Win32NetBIOS.SendDatagram(getLana(), getNameNumber(), destName, buf, 0, len);
|
||||
if ( sts != NetBIOS.NRC_GoodRet)
|
||||
logger.debug("Win32NetBIOS host announce error " + NetBIOS.getErrorString( -sts));
|
||||
{
|
||||
// Log the error
|
||||
|
||||
if ( logger.isErrorEnabled())
|
||||
logger.error("Host announce error " + NetBIOS.getErrorString( -sts) +
|
||||
" (LANA " + getLana() + ")");
|
||||
|
||||
// Update the error count
|
||||
|
||||
if ( incrementErrorCount() == SendErrorCount)
|
||||
{
|
||||
// Mark the LANA as offline
|
||||
|
||||
m_handler.lanaStatusChange( getLana(), false);
|
||||
|
||||
// Clear the error count
|
||||
|
||||
clearErrorCount();
|
||||
|
||||
// Log the error
|
||||
|
||||
if ( logger.isErrorEnabled())
|
||||
logger.error("Marked LANA as unavailable due to send errors");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Clear the error count
|
||||
|
||||
clearErrorCount();
|
||||
}
|
||||
}
|
||||
}
|
@ -16,6 +16,8 @@
|
||||
*/
|
||||
package org.alfresco.filesys.smb.mailslot;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.alfresco.filesys.netbios.NetBIOSName;
|
||||
import org.alfresco.filesys.netbios.win32.NetBIOS;
|
||||
import org.alfresco.filesys.netbios.win32.NetBIOSSocket;
|
||||
@ -34,6 +36,10 @@ import org.alfresco.filesys.smb.server.win32.Win32NetBIOSSessionSocketHandler;
|
||||
*/
|
||||
public class WinsockNetBIOSHostAnnouncer extends HostAnnouncer
|
||||
{
|
||||
// Number of send errors before marking the LANA as offline
|
||||
|
||||
private static final int SendErrorCount = 3;
|
||||
|
||||
// Associated session handler
|
||||
|
||||
private Win32NetBIOSSessionSocketHandler m_handler;
|
||||
@ -116,8 +122,49 @@ public class WinsockNetBIOSHostAnnouncer extends HostAnnouncer
|
||||
|
||||
// Send the host announce datagram via the Win32 Netbios() API call
|
||||
|
||||
int sts = m_dgramSocket.sendDatagram(destNbName, buf, 0, len);
|
||||
if ( sts != len)
|
||||
logger.debug("WinsockNetBIOS host announce error");
|
||||
boolean txOK = false;
|
||||
|
||||
try
|
||||
{
|
||||
int sts = m_dgramSocket.sendDatagram(destNbName, buf, 0, len);
|
||||
if ( sts == len)
|
||||
txOK = true;
|
||||
}
|
||||
catch ( IOException ex)
|
||||
{
|
||||
// Log the error
|
||||
|
||||
if ( logger.isErrorEnabled())
|
||||
logger.error("Host announce error, " + ex.getMessage() + ", (LANA " + getLana() + ")");
|
||||
}
|
||||
|
||||
// Check if the send was successful
|
||||
|
||||
if ( txOK == false)
|
||||
{
|
||||
// Update the error count
|
||||
|
||||
if ( incrementErrorCount() == SendErrorCount)
|
||||
{
|
||||
// Mark the LANA as offline
|
||||
|
||||
m_handler.lanaStatusChange( getLana(), false);
|
||||
|
||||
// Clear the error count
|
||||
|
||||
clearErrorCount();
|
||||
|
||||
// Log the error
|
||||
|
||||
if ( logger.isErrorEnabled())
|
||||
logger.error("Marked LANA as unavailable due to send errors, (LANA " + getLana() + ")");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Clear the error count
|
||||
|
||||
clearErrorCount();
|
||||
}
|
||||
}
|
||||
}
|
@ -40,24 +40,24 @@ class FindInfoPacker
|
||||
|
||||
// File information levels
|
||||
|
||||
public static final int InfoStandard = 1;
|
||||
public static final int InfoQueryEASize = 2;
|
||||
public static final int InfoQueryEAFromList = 3;
|
||||
public static final int InfoDirectory = 0x101;
|
||||
public static final int InfoFullDirectory = 0x102;
|
||||
public static final int InfoNames = 0x103;
|
||||
public static final int InfoDirectoryBoth = 0x104;
|
||||
public static final int InfoMacHfsInfo = 0x302;
|
||||
public static final int InfoStandard = 1;
|
||||
public static final int InfoQueryEASize = 2;
|
||||
public static final int InfoQueryEAFromList = 3;
|
||||
public static final int InfoDirectory = 0x101;
|
||||
public static final int InfoFullDirectory = 0x102;
|
||||
public static final int InfoNames = 0x103;
|
||||
public static final int InfoDirectoryBoth = 0x104;
|
||||
public static final int InfoMacHfsInfo = 0x302;
|
||||
|
||||
// File information fixed lengths, includes nulls on strings.
|
||||
|
||||
public static final int InfoStandardLen = 24;
|
||||
public static final int InfoQueryEASizeLen = 28;
|
||||
public static final int InfoDirectoryLen = 64;
|
||||
public static final int InfoFullDirectoryLen = 68;
|
||||
public static final int InfoNamesLen = 12;
|
||||
public static final int InfoDirectoryBothLen = 94;
|
||||
public static final int InfoMacHfsLen = 120;
|
||||
public static final int InfoStandardLen = 24;
|
||||
public static final int InfoQueryEASizeLen = 28;
|
||||
public static final int InfoDirectoryLen = 64;
|
||||
public static final int InfoFullDirectoryLen = 68;
|
||||
public static final int InfoNamesLen = 12;
|
||||
public static final int InfoDirectoryBothLen = 94;
|
||||
public static final int InfoMacHfsLen = 120;
|
||||
|
||||
/**
|
||||
* Pack a file information object into the specified buffer, using information level 1 format.
|
||||
@ -426,7 +426,7 @@ class FindInfoPacker
|
||||
|
||||
// Align the buffer pointer and set the offset to the next file information entry
|
||||
|
||||
buf.longwordAlign();
|
||||
buf.wordAlign();
|
||||
|
||||
int curPos = buf.getPosition();
|
||||
buf.setPosition(startPos);
|
||||
@ -518,7 +518,7 @@ class FindInfoPacker
|
||||
|
||||
// Align the buffer pointer and set the offset to the next file information entry
|
||||
|
||||
buf.longwordAlign();
|
||||
buf.wordAlign();
|
||||
|
||||
int curPos = buf.getPosition();
|
||||
buf.setPosition(startPos);
|
||||
@ -615,7 +615,7 @@ class FindInfoPacker
|
||||
|
||||
// Align the buffer pointer and set the offset to the next file information entry
|
||||
|
||||
buf.longwordAlign();
|
||||
buf.wordAlign();
|
||||
|
||||
int curPos = buf.getPosition();
|
||||
buf.setPosition(startPos);
|
||||
@ -718,7 +718,7 @@ class FindInfoPacker
|
||||
|
||||
// Align the buffer pointer and set the offset to the next file information entry
|
||||
|
||||
buf.longwordAlign();
|
||||
buf.wordAlign();
|
||||
|
||||
int curPos = buf.getPosition();
|
||||
buf.setPosition(startPos);
|
||||
@ -839,7 +839,7 @@ class FindInfoPacker
|
||||
|
||||
// Align the buffer pointer and set the offset to the next file information entry
|
||||
|
||||
buf.longwordAlign();
|
||||
buf.wordAlign();
|
||||
|
||||
int curPos = buf.getPosition();
|
||||
buf.setPosition(startPos);
|
||||
|
@ -6576,41 +6576,12 @@ public class NTProtocolHandler extends CoreProtocolHandler
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if this is a buffer length check, if so the maximum returned data count will be
|
||||
// zero
|
||||
// Return an empty security descriptor
|
||||
|
||||
byte[] paramblk = new byte[4];
|
||||
DataPacker.putIntelInt(0, paramblk, 0);
|
||||
|
||||
if (tbuf.getReturnDataLimit() == 0)
|
||||
{
|
||||
|
||||
// Return the security descriptor length in the parameter block
|
||||
|
||||
byte[] paramblk = new byte[4];
|
||||
DataPacker.putIntelInt(_sdEveryOne.length, paramblk, 0);
|
||||
|
||||
// Initialize the transaction reply
|
||||
|
||||
outPkt.initTransactReply(paramblk, paramblk.length, null, 0);
|
||||
|
||||
// Set a warning status to indicate the supplied data buffer was too small to return the
|
||||
// security
|
||||
// descriptor
|
||||
|
||||
outPkt.setLongErrorCode(SMBStatus.NTBufferTooSmall);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
// Return the security descriptor length in the parameter block
|
||||
|
||||
byte[] paramblk = new byte[4];
|
||||
DataPacker.putIntelInt(_sdEveryOne.length, paramblk, 0);
|
||||
|
||||
// Initialize the transaction reply. Return the fixed security descriptor that allows
|
||||
// anyone to access the
|
||||
// file/directory
|
||||
|
||||
outPkt.initTransactReply(paramblk, paramblk.length, _sdEveryOne, _sdEveryOne.length);
|
||||
}
|
||||
outPkt.initTransactReply(paramblk, paramblk.length, null, 0);
|
||||
|
||||
// Send back the response
|
||||
|
||||
|
@ -78,7 +78,7 @@ public class SMBServer extends NetworkFileServer implements Runnable
|
||||
|
||||
// Server type flags, used when announcing the host
|
||||
|
||||
private int m_srvType = ServerType.WorkStation + ServerType.Server;
|
||||
private int m_srvType = ServerType.WorkStation + ServerType.Server + ServerType.NTServer;
|
||||
|
||||
// Next available session id
|
||||
|
||||
|
@ -311,7 +311,8 @@ public class ContentDiskDriver implements DiskInterface, IOCtlInterface
|
||||
|
||||
// Set parameters
|
||||
|
||||
context.setFilesystemAttributes(FileSystem.CasePreservedNames);
|
||||
context.setFilesystemAttributes(FileSystem.CasePreservedNames + FileSystem.UnicodeOnDisk +
|
||||
FileSystem.CaseSensitiveSearch);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
@ -597,10 +598,8 @@ public class ContentDiskDriver implements DiskInterface, IOCtlInterface
|
||||
{
|
||||
// a valid use case
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Getting file information - File not found: \n" +
|
||||
" path: " + path);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
catch (org.alfresco.repo.security.permissions.AccessDeniedException ex)
|
||||
@ -1448,7 +1447,8 @@ public class ContentDiskDriver implements DiskInterface, IOCtlInterface
|
||||
|
||||
try
|
||||
{
|
||||
// get the node
|
||||
// Get the node
|
||||
|
||||
NodeRef nodeRef = getNodeForPath(tree, name);
|
||||
if (nodeService.exists(nodeRef))
|
||||
{
|
||||
@ -1468,15 +1468,6 @@ public class ContentDiskDriver implements DiskInterface, IOCtlInterface
|
||||
" node: " + nodeRef);
|
||||
}
|
||||
}
|
||||
catch (FileNotFoundException e)
|
||||
{
|
||||
// already gone
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Deleted file <alfready gone>: \n" +
|
||||
" file: " + name);
|
||||
}
|
||||
}
|
||||
catch (NodeLockedException ex)
|
||||
{
|
||||
// Debug
|
||||
@ -1610,8 +1601,11 @@ public class ContentDiskDriver implements DiskInterface, IOCtlInterface
|
||||
|
||||
// DEBUG
|
||||
|
||||
if ( logger.isDebugEnabled())
|
||||
if ( logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Cached rename state for " + oldName + ", state=" + fstate);
|
||||
logger.debug(" new name " + newName + ", state=" + newState);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@ -20,7 +20,6 @@ import org.alfresco.filesys.locking.FileLock;
|
||||
import org.alfresco.filesys.locking.FileLockList;
|
||||
import org.alfresco.filesys.locking.LockConflictException;
|
||||
import org.alfresco.filesys.locking.NotLockedException;
|
||||
import org.alfresco.filesys.server.filesys.FileName;
|
||||
import org.alfresco.filesys.server.filesys.FileOpenParams;
|
||||
import org.alfresco.filesys.server.filesys.FileStatus;
|
||||
import org.alfresco.filesys.smb.SharingMode;
|
||||
@ -587,35 +586,7 @@ public class FileState
|
||||
*/
|
||||
public final static String normalizePath(String path)
|
||||
{
|
||||
|
||||
// Split the path into directories and file name, only uppercase the directories to
|
||||
// normalize the path.
|
||||
|
||||
String normPath = path;
|
||||
|
||||
if (path.length() > 3)
|
||||
{
|
||||
|
||||
// Split the path to seperate the folders/file name
|
||||
|
||||
int pos = path.lastIndexOf(FileName.DOS_SEPERATOR);
|
||||
if (pos != -1)
|
||||
{
|
||||
|
||||
// Get the path and file name parts, normalize the path
|
||||
|
||||
String pathPart = path.substring(0, pos).toUpperCase();
|
||||
String namePart = path.substring(pos);
|
||||
|
||||
// Rebuild the path string
|
||||
|
||||
normPath = pathPart + namePart;
|
||||
}
|
||||
}
|
||||
|
||||
// Return the normalized path
|
||||
|
||||
return normPath;
|
||||
return path.toUpperCase();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -448,7 +448,7 @@ public class FileStateTable implements Runnable
|
||||
// Dump the file state cache entries to the specified stream
|
||||
|
||||
if (m_stateTable.size() > 0)
|
||||
logger.info("++ FileStateCache Entries:");
|
||||
logger.debug("++ FileStateCache Entries:");
|
||||
|
||||
Enumeration enm = m_stateTable.keys();
|
||||
long curTime = System.currentTimeMillis();
|
||||
@ -458,7 +458,7 @@ public class FileStateTable implements Runnable
|
||||
String fname = (String) enm.nextElement();
|
||||
FileState state = m_stateTable.get(fname);
|
||||
|
||||
logger.info(" ++ " + fname + "(" + state.getSecondsToExpire(curTime) + ") : " + state);
|
||||
logger.debug(" ++ " + fname + "(" + state.getSecondsToExpire(curTime) + ") : " + state);
|
||||
}
|
||||
}
|
||||
}
|
@ -39,7 +39,7 @@ public class Win32NetBIOSLanaMonitor extends Thread
|
||||
//
|
||||
// Initial LANA listener array size
|
||||
|
||||
private static final int LanaListenerArraySize = 16;
|
||||
private static final int LanaListenerArraySize = 256;
|
||||
|
||||
// Debug logging
|
||||
|
||||
@ -153,24 +153,7 @@ public class Win32NetBIOSLanaMonitor extends Thread
|
||||
// Check if the listener array has been allocated
|
||||
|
||||
if ( m_listeners == null)
|
||||
{
|
||||
int len = LanaListenerArraySize;
|
||||
if ( lana > len)
|
||||
len = (lana + 3) & 0x00FC;
|
||||
|
||||
m_listeners = new LanaListener[len];
|
||||
}
|
||||
else if ( lana >= m_listeners.length)
|
||||
{
|
||||
// Extend the LANA listener array
|
||||
|
||||
LanaListener[] newArray = new LanaListener[(lana + 3) & 0x00FC];
|
||||
|
||||
// Copy the existing array to the extended array
|
||||
|
||||
System.arraycopy(m_listeners, 0, newArray, 0, m_listeners.length);
|
||||
m_listeners = newArray;
|
||||
}
|
||||
m_listeners = new LanaListener[LanaListenerArraySize];
|
||||
|
||||
// Add the LANA listener
|
||||
|
||||
@ -343,6 +326,10 @@ public class Win32NetBIOSLanaMonitor extends Thread
|
||||
|
||||
m_lanas.set(lana);
|
||||
m_lanaSts.set(lana, true);
|
||||
|
||||
// Add a listener for the new LANA
|
||||
|
||||
addLanaListener( sessHandler.getLANANumber(), sessHandler);
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@ -645,19 +645,29 @@ public class SessionImpl implements Session
|
||||
if (isLive())
|
||||
{
|
||||
// invalidate authentication
|
||||
getRepositoryImpl().getServiceRegistry().getAuthenticationService().invalidateTicket(getTicket());
|
||||
ticket = null;
|
||||
|
||||
// clean up resources
|
||||
try
|
||||
{
|
||||
sessionIsolation.rollback();
|
||||
try
|
||||
{
|
||||
getRepositoryImpl().getServiceRegistry().getAuthenticationService().invalidateTicket(getTicket());
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
sessionIsolation.rollback();
|
||||
}
|
||||
catch(RepositoryException e)
|
||||
{
|
||||
// continue execution and force logout
|
||||
}
|
||||
}
|
||||
}
|
||||
catch(RepositoryException e)
|
||||
finally
|
||||
{
|
||||
// force logout
|
||||
ticket = null;
|
||||
repository.deregisterSession();
|
||||
}
|
||||
repository.deregisterSession();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -94,5 +94,29 @@ public class SessionImplTest extends BaseJCRTest
|
||||
assertFalse(isLive);
|
||||
}
|
||||
|
||||
|
||||
public void testSessionThread()
|
||||
{
|
||||
SimpleCredentials superuser = new SimpleCredentials("superuser", "".toCharArray());
|
||||
try
|
||||
{
|
||||
Session anotherSession = repository.login(superuser, getWorkspace());
|
||||
fail("Exception not thrown when establishing two sessions on same thread");
|
||||
}
|
||||
catch(RepositoryException e)
|
||||
{
|
||||
// successful - multiple sessions on one thread caught
|
||||
}
|
||||
superuserSession.logout();
|
||||
try
|
||||
{
|
||||
Session anotherSession = repository.login(superuser, getWorkspace());
|
||||
anotherSession.logout();
|
||||
}
|
||||
catch(RepositoryException e)
|
||||
{
|
||||
fail("Exception thrown when it shouldn't of been.");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@ -18,7 +18,6 @@ package org.alfresco.repo.action.executer;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.action.ParameterDefinitionImpl;
|
||||
import org.alfresco.service.cmr.action.Action;
|
||||
@ -168,6 +167,10 @@ public class TransformActionExecuter extends ActionExecuterAbstractBase
|
||||
overwrite = overwriteValue.booleanValue();
|
||||
}
|
||||
|
||||
// Calculate the destination name
|
||||
String originalName = (String)nodeService.getProperty(actionedUponNodeRef, ContentModel.PROP_NAME);
|
||||
String newName = transformName(originalName, mimeType);
|
||||
|
||||
// Since we are overwriting we need to figure out whether the destination node exists
|
||||
NodeRef copyNodeRef = null;
|
||||
if (overwrite == true)
|
||||
@ -182,9 +185,10 @@ public class TransformActionExecuter extends ActionExecuterAbstractBase
|
||||
if (this.nodeService.hasAspect(copy, ContentModel.ASPECT_WORKING_COPY) == false)
|
||||
{
|
||||
// We can assume that we are looking for a node created by this action so the primary parent will
|
||||
// match the destination folder
|
||||
// match the destination folder and the name will be the same
|
||||
NodeRef parent = this.nodeService.getPrimaryParent(copy).getParentRef();
|
||||
if (parent.equals(destinationParent) == true)
|
||||
String copyName = (String)this.nodeService.getProperty(copy, ContentModel.PROP_NAME);
|
||||
if (parent.equals(destinationParent) == true && copyName.equals(newName) == true)
|
||||
{
|
||||
if (copyNodeRef == null)
|
||||
{
|
||||
@ -212,58 +216,46 @@ public class TransformActionExecuter extends ActionExecuterAbstractBase
|
||||
destinationAssocQName,
|
||||
false);
|
||||
newCopy = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Get the content reader
|
||||
ContentReader contentReader = this.contentService.getReader(actionedUponNodeRef, ContentModel.PROP_CONTENT);
|
||||
if (contentReader == null)
|
||||
{
|
||||
// for some reason, this action is premature
|
||||
throw new AlfrescoRuntimeException(
|
||||
"Attempting to execute content transformation rule " +
|
||||
"but content has not finished writing, i.e. no URL is available.");
|
||||
}
|
||||
String originalMimetype = contentReader.getMimetype();
|
||||
|
||||
// get the writer and set it up
|
||||
ContentWriter contentWriter = this.contentService.getWriter(copyNodeRef, ContentModel.PROP_CONTENT, true);
|
||||
contentWriter.setMimetype(mimeType); // new mimetype
|
||||
contentWriter.setEncoding(contentReader.getEncoding()); // original encoding
|
||||
|
||||
if (newCopy == true)
|
||||
{
|
||||
// Adjust the name of the copy
|
||||
String originalName = (String)nodeService.getProperty(actionedUponNodeRef, ContentModel.PROP_NAME);
|
||||
String newName = transformName(originalName, originalMimetype, mimeType);
|
||||
nodeService.setProperty(copyNodeRef, ContentModel.PROP_NAME, newName);
|
||||
String originalTitle = (String)nodeService.getProperty(actionedUponNodeRef, ContentModel.PROP_TITLE);
|
||||
if (originalTitle != null && originalTitle.length() > 0)
|
||||
{
|
||||
String newTitle = transformName(originalTitle, originalMimetype, mimeType);
|
||||
String newTitle = transformName(originalTitle, mimeType);
|
||||
nodeService.setProperty(copyNodeRef, ContentModel.PROP_TITLE, newTitle);
|
||||
}
|
||||
}
|
||||
|
||||
// Try and transform the content
|
||||
try
|
||||
|
||||
// Get the content reader
|
||||
ContentReader contentReader = this.contentService.getReader(actionedUponNodeRef, ContentModel.PROP_CONTENT);
|
||||
// Only do the transformation if some content is available
|
||||
if (contentReader != null)
|
||||
{
|
||||
doTransform(ruleAction, contentReader, contentWriter);
|
||||
}
|
||||
catch(NoTransformerException e)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
// get the writer and set it up
|
||||
ContentWriter contentWriter = this.contentService.getWriter(copyNodeRef, ContentModel.PROP_CONTENT, true);
|
||||
contentWriter.setMimetype(mimeType); // new mimetype
|
||||
contentWriter.setEncoding(contentReader.getEncoding()); // original encoding
|
||||
|
||||
// Try and transform the content
|
||||
try
|
||||
{
|
||||
logger.debug("No transformer found to execute rule: \n" +
|
||||
" reader: " + contentReader + "\n" +
|
||||
" writer: " + contentWriter + "\n" +
|
||||
" action: " + this);
|
||||
doTransform(ruleAction, contentReader, contentWriter);
|
||||
}
|
||||
//if (newCopy == true)
|
||||
//{
|
||||
// TODO: Revisit this for alternative solutions
|
||||
// nodeService.deleteNode(copyNodeRef);
|
||||
// }
|
||||
}
|
||||
catch(NoTransformerException e)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("No transformer found to execute rule: \n" +
|
||||
" reader: " + contentReader + "\n" +
|
||||
" writer: " + contentWriter + "\n" +
|
||||
" action: " + this);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void doTransform(Action ruleAction, ContentReader contentReader, ContentWriter contentWriter)
|
||||
@ -279,7 +271,7 @@ public class TransformActionExecuter extends ActionExecuterAbstractBase
|
||||
* @param newMimetype
|
||||
* @return
|
||||
*/
|
||||
private String transformName(String original, String originalMimetype, String newMimetype)
|
||||
private String transformName(String original, String newMimetype)
|
||||
{
|
||||
// get the current extension
|
||||
int dotIndex = original.lastIndexOf('.');
|
||||
|
@ -25,14 +25,14 @@ import org.alfresco.repo.admin.patch.AbstractPatch;
|
||||
import org.alfresco.repo.security.authority.AuthorityDAOImpl;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.namespace.NamespacePrefixResolver;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.namespace.RegexQNamePattern;
|
||||
|
||||
public class TopLevelGroupParentChildAssociationTypePatch extends AbstractPatch
|
||||
{
|
||||
private static final String MSG_RESULT = "patch.topLevelGroupParentChildAssociationTypePatch.result";
|
||||
private static final String ERR_SYS_PATH_NOT_FOUND = "patch.topLevelGroupParentChildAssociationTypePatch.err.sys_path_not_found";
|
||||
private static final String ERR_AUTH_PATH_NOT_FOUND = "patch.topLevelGroupParentChildAssociationTypePatch.err.auth_path_not_found";
|
||||
|
||||
public TopLevelGroupParentChildAssociationTypePatch()
|
||||
{
|
||||
@ -48,12 +48,15 @@ public class TopLevelGroupParentChildAssociationTypePatch extends AbstractPatch
|
||||
{
|
||||
if (!car.getTypeQName().equals(ContentModel.ASSOC_CHILDREN))
|
||||
{
|
||||
nodeService
|
||||
.moveNode(car.getChildRef(), car.getParentRef(), ContentModel.ASSOC_CHILDREN, car.getQName());
|
||||
nodeService.moveNode(
|
||||
car.getChildRef(),
|
||||
car.getParentRef(),
|
||||
ContentModel.ASSOC_CHILDREN,
|
||||
car.getQName());
|
||||
}
|
||||
}
|
||||
|
||||
return I18NUtil.getMessage(MSG_RESULT);
|
||||
return I18NUtil.getMessage(MSG_RESULT, results.size());
|
||||
}
|
||||
|
||||
private NodeRef getAuthorityContainer()
|
||||
@ -68,7 +71,7 @@ public class TopLevelGroupParentChildAssociationTypePatch extends AbstractPatch
|
||||
NodeRef sysNodeRef = null;
|
||||
if (results.size() == 0)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Required authority system path not found: " + qnameAssocSystem);
|
||||
throw new AlfrescoRuntimeException(ERR_SYS_PATH_NOT_FOUND, new Object[] {qnameAssocSystem});
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -78,7 +81,7 @@ public class TopLevelGroupParentChildAssociationTypePatch extends AbstractPatch
|
||||
NodeRef authNodeRef = null;
|
||||
if (results.size() == 0)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Required authority path not found: " + qnameAssocAuthorities);
|
||||
throw new AlfrescoRuntimeException(ERR_AUTH_PATH_NOT_FOUND, new Object[] {qnameAssocAuthorities});
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -140,7 +140,7 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the authenticatin service
|
||||
* Sets the authentication service
|
||||
*
|
||||
* @param authenticationService the authentication service
|
||||
*/
|
||||
@ -244,7 +244,7 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
|
||||
workingCopyProperties.put(ContentModel.PROP_WORKING_COPY_OWNER, userName);
|
||||
this.nodeService.addAspect(workingCopy, ContentModel.ASPECT_WORKING_COPY, workingCopyProperties);
|
||||
|
||||
// Lock the origional node
|
||||
// Lock the original node
|
||||
this.lockService.lock(nodeRef, LockType.READ_ONLY_LOCK);
|
||||
|
||||
// Return the working copy
|
||||
@ -307,11 +307,11 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
|
||||
try
|
||||
{
|
||||
Map<QName, Serializable> workingCopyProperties = nodeService.getProperties(workingCopyNodeRef);
|
||||
// Try and get the origional node reference
|
||||
// Try and get the original node reference
|
||||
nodeRef = (NodeRef) workingCopyProperties.get(ContentModel.PROP_COPY_REFERENCE);
|
||||
if(nodeRef == null)
|
||||
{
|
||||
// Error since the origional node can not be found
|
||||
// Error since the original node can not be found
|
||||
throw new CheckOutCheckInServiceException(MSG_ERR_BAD_COPY);
|
||||
}
|
||||
|
||||
@ -347,7 +347,7 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
|
||||
contentData);
|
||||
}
|
||||
|
||||
// Copy the contents of the working copy onto the origional
|
||||
// Copy the contents of the working copy onto the original
|
||||
this.copyService.copy(workingCopyNodeRef, nodeRef);
|
||||
|
||||
if (versionProperties != null && this.nodeService.hasAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE) == true)
|
||||
@ -364,7 +364,7 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
|
||||
}
|
||||
else
|
||||
{
|
||||
// Re-lock the origional node
|
||||
// Re-lock the original node
|
||||
this.lockService.lock(nodeRef, LockType.READ_ONLY_LOCK);
|
||||
}
|
||||
}
|
||||
@ -421,15 +421,15 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
|
||||
// Ensure that the node has the copy aspect
|
||||
if (this.nodeService.hasAspect(workingCopyNodeRef, ContentModel.ASPECT_COPIEDFROM) == true)
|
||||
{
|
||||
// Get the origional node
|
||||
// Get the original node
|
||||
nodeRef = (NodeRef)this.nodeService.getProperty(workingCopyNodeRef, ContentModel.PROP_COPY_REFERENCE);
|
||||
if (nodeRef == null)
|
||||
{
|
||||
// Error since the origional node can not be found
|
||||
// Error since the original node can not be found
|
||||
throw new CheckOutCheckInServiceException(MSG_ERR_BAD_COPY);
|
||||
}
|
||||
|
||||
// Release the lock on the origional node
|
||||
// Release the lock on the original node
|
||||
this.lockService.unlock(nodeRef);
|
||||
|
||||
// Delete the working copy
|
||||
@ -452,7 +452,7 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
|
||||
{
|
||||
NodeRef workingCopy = null;
|
||||
|
||||
// Do a search to find the origional document
|
||||
// Do a search to find the working copy document
|
||||
ResultSet resultSet = null;
|
||||
try
|
||||
{
|
||||
|
@ -40,6 +40,8 @@ import org.apache.commons.logging.LogFactory;
|
||||
*/
|
||||
public class MimetypeMap implements MimetypeService
|
||||
{
|
||||
public static final String EXTENSION_BINARY = "bin";
|
||||
|
||||
public static final String MIMETYPE_TEXT_PLAIN = "text/plain";
|
||||
public static final String MIMETYPE_TEXT_CSS = "text/css";
|
||||
public static final String MIMETYPE_XML = "text/xml";
|
||||
@ -87,6 +89,8 @@ public class MimetypeMap implements MimetypeService
|
||||
public static final String MIMETYPE_STAROFFICE5_WRITER = "application/vnd.stardivision.writer";
|
||||
public static final String MIMETYPE_STAROFFICE5_WRITER_GLOBAL = "application/vnd.stardivision.writer-global";
|
||||
public static final String MIMETYPE_STAROFFICE5_MATH = "application/vnd.stardivision.math";
|
||||
// WordPerfect
|
||||
public static final String MIMETYPE_WORDPERFECT = "application/wordperfect";
|
||||
// Audio
|
||||
public static final String MIMETYPE_MP3 = "audio/x-mpeg";
|
||||
// Alfresco
|
||||
@ -207,18 +211,26 @@ public class MimetypeMap implements MimetypeService
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the file extension associated with the mimetype.
|
||||
*
|
||||
* @param mimetype a valid mimetype
|
||||
* @return Returns the default extension for the mimetype
|
||||
* @throws AlfrescoRuntimeException if the mimetype doesn't exist
|
||||
* @return Returns the default extension for the mimetype. Returns the {@link #MIMETYPE_BINARY binary}
|
||||
* mimetype extension.
|
||||
*
|
||||
* @see #MIMETYPE_BINARY
|
||||
* @see #EXTENSION_BINARY
|
||||
*/
|
||||
public String getExtension(String mimetype)
|
||||
{
|
||||
String extension = extensionsByMimetype.get(mimetype);
|
||||
if (extension == null)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("No extension available for mimetype: " + mimetype);
|
||||
return EXTENSION_BINARY;
|
||||
}
|
||||
else
|
||||
{
|
||||
return extension;
|
||||
}
|
||||
return extension;
|
||||
}
|
||||
|
||||
public Map<String, String> getDisplaysByExtension()
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Jesper Steen Møller
|
||||
* Copyright (C) 2005 Jesper Steen M<EFBFBD>ller
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
@ -31,11 +31,11 @@ import org.apache.commons.logging.LogFactory;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Jesper Steen Møller
|
||||
* @author Jesper Steen Møller
|
||||
*/
|
||||
abstract public class AbstractMetadataExtracter implements MetadataExtracter
|
||||
{
|
||||
private static Log logger = LogFactory.getLog(AbstractMetadataExtracter.class);
|
||||
protected static Log logger = LogFactory.getLog(AbstractMetadataExtracter.class);
|
||||
|
||||
private MimetypeService mimetypeService;
|
||||
private MetadataExtracterRegistry registry;
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Jesper Steen Møller
|
||||
* Copyright (C) 2005 Jesper Steen M<EFBFBD>ller
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
@ -38,7 +38,7 @@ import org.springframework.context.ApplicationContext;
|
||||
* @see org.alfresco.repo.content.metadata.MetadataExtracter
|
||||
* @see org.alfresco.repo.content.metadata.AbstractMetadataExtracter
|
||||
*
|
||||
* @author Jesper Steen Møller
|
||||
* @author Jesper Steen Møller
|
||||
*/
|
||||
public abstract class AbstractMetadataExtracterTest extends TestCase
|
||||
{
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Jesper Steen Møller
|
||||
* Copyright (C) 2005 Jesper Steen M<EFBFBD>ller
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
@ -38,7 +38,7 @@ import org.alfresco.service.namespace.QName;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Jesper Steen Møller
|
||||
* @author Jesper Steen Møller
|
||||
*/
|
||||
public class HtmlMetadataExtracter extends AbstractMetadataExtracter
|
||||
{
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Jesper Steen Møller
|
||||
* Copyright (C) 2005 Jesper Steen M<EFBFBD>ller
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
@ -19,7 +19,7 @@ package org.alfresco.repo.content.metadata;
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
|
||||
/**
|
||||
* @author Jesper Steen Møller
|
||||
* @author Jesper Steen Møller
|
||||
*/
|
||||
public class HtmlMetadataExtracterTest extends AbstractMetadataExtracterTest
|
||||
{
|
||||
|
@ -0,0 +1,180 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Jesper Steen M<EFBFBD>ller
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.content.metadata;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.service.cmr.repository.ContentIOException;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.apache.poi.poifs.eventfilesystem.POIFSReader;
|
||||
import org.apache.poi.poifs.eventfilesystem.POIFSReaderEvent;
|
||||
import org.apache.poi.poifs.eventfilesystem.POIFSReaderListener;
|
||||
import org.apache.poi.poifs.filesystem.DocumentInputStream;
|
||||
|
||||
/**
|
||||
* Outlook format email meta-data extractor
|
||||
*
|
||||
* @author Kevin Roast
|
||||
*/
|
||||
public class MailMetadataExtracter extends AbstractMetadataExtracter
|
||||
{
|
||||
public static String[] SUPPORTED_MIMETYPES = new String[] {
|
||||
"message/rfc822"};
|
||||
|
||||
private static final String SUBSTG_MESSAGEBODY = "__substg1.0_1000001E";
|
||||
private static final String SUBSTG_RECIPIENTEMAIL = "__substg1.0_39FE001E";
|
||||
private static final String SUBSTG_RECEIVEDEMAIL = "__substg1.0_0076001E";
|
||||
private static final String SUBSTG_SENDEREMAIL = "__substg1.0_0C1F001E";
|
||||
private static final String SUBSTG_DATE = "__substg1.0_00470102";
|
||||
|
||||
private static final QName ASPECT_MAILED = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "emailed");
|
||||
private static final QName PROP_SENTDATE = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "sentdate");
|
||||
private static final QName PROP_ORIGINATOR = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "originator");
|
||||
private static final QName PROP_ADDRESSEE = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "addressee");
|
||||
private static final QName PROP_ADDRESSEES = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "addressees");
|
||||
|
||||
// the CC: email addresses
|
||||
private ThreadLocal<List<String>> receipientEmails = new ThreadLocal<List<String>>();
|
||||
|
||||
public MailMetadataExtracter()
|
||||
{
|
||||
super(new HashSet<String>(Arrays.asList(SUPPORTED_MIMETYPES)), 1.0, 1000);
|
||||
}
|
||||
|
||||
public void extractInternal(ContentReader reader, final Map<QName, Serializable> destination) throws Throwable
|
||||
{
|
||||
POIFSReaderListener readerListener = new POIFSReaderListener()
|
||||
{
|
||||
public void processPOIFSReaderEvent(final POIFSReaderEvent event)
|
||||
{
|
||||
try
|
||||
{
|
||||
String name = event.getName();
|
||||
|
||||
if (name.equals(SUBSTG_RECIPIENTEMAIL)) // a recipient email address
|
||||
{
|
||||
String emailAddress = readPlainTextStream(event.getStream());
|
||||
receipientEmails.get().add(convertExchangeAddress(emailAddress));
|
||||
}
|
||||
else if (name.equals(SUBSTG_RECEIVEDEMAIL)) // receiver email address
|
||||
{
|
||||
String emailAddress = readPlainTextStream(event.getStream());
|
||||
destination.put(PROP_ADDRESSEE, convertExchangeAddress(emailAddress));
|
||||
}
|
||||
else if (name.equals(SUBSTG_SENDEREMAIL)) // sender email - NOTE either email OR full Exchange data e.g. : /O=HOSTEDSERVICE2/OU=FIRST ADMINISTRATIVE GROUP/CN=RECIPIENTS/CN=MIKE.FARMAN@BEN
|
||||
{
|
||||
String emailAddress = readPlainTextStream(event.getStream());
|
||||
destination.put(PROP_ORIGINATOR, convertExchangeAddress(emailAddress));
|
||||
}
|
||||
else if (name.equals(SUBSTG_DATE))
|
||||
{
|
||||
// the date is not really plain text - but it's easier to parse as such
|
||||
String date = readPlainTextStream(event.getStream());
|
||||
int valueIndex = date.indexOf("l=");
|
||||
if (valueIndex != -1)
|
||||
{
|
||||
int dateIndex = date.indexOf('-', valueIndex);
|
||||
if (dateIndex != -1)
|
||||
{
|
||||
dateIndex++;
|
||||
String strYear = date.substring(dateIndex, dateIndex + 2);
|
||||
int year = Integer.parseInt(strYear) + (2000 - 1900);
|
||||
String strMonth = date.substring(dateIndex + 2, dateIndex + 4);
|
||||
int month = Integer.parseInt(strMonth) - 1;
|
||||
String strDay = date.substring(dateIndex + 4, dateIndex + 6);
|
||||
int day = Integer.parseInt(strDay);
|
||||
String strHour = date.substring(dateIndex + 6, dateIndex + 8);
|
||||
int hour = Integer.parseInt(strHour);
|
||||
String strMinute = date.substring(dateIndex + 10, dateIndex + 12);
|
||||
int minute = Integer.parseInt(strMinute);
|
||||
destination.put(PROP_SENTDATE, new Date(year, month, day, hour, minute));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new ContentIOException("Property set stream: " + event.getPath() + event.getName(), ex);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
InputStream is = null;
|
||||
try
|
||||
{
|
||||
this.receipientEmails.set(new ArrayList<String>());
|
||||
|
||||
is = reader.getContentInputStream();
|
||||
POIFSReader poiFSReader = new POIFSReader();
|
||||
poiFSReader.registerListener(readerListener);
|
||||
|
||||
try
|
||||
{
|
||||
poiFSReader.read(is);
|
||||
}
|
||||
catch (IOException err)
|
||||
{
|
||||
// probably not an Outlook format MSG - ignore for now
|
||||
logger.warn("Unable to extract meta-data from message: " + err.getMessage());
|
||||
}
|
||||
|
||||
// store multi-value extracted property
|
||||
if (receipientEmails.get().size() != 0)
|
||||
{
|
||||
destination.put(PROP_ADDRESSEES, (Serializable)receipientEmails.get());
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (is != null)
|
||||
{
|
||||
try { is.close(); } catch (IOException e) {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static String readPlainTextStream(DocumentInputStream stream)
|
||||
throws IOException
|
||||
{
|
||||
byte[] data = new byte[stream.available()];
|
||||
int read = stream.read(data);
|
||||
return new String(data);
|
||||
}
|
||||
|
||||
private static String convertExchangeAddress(String email)
|
||||
{
|
||||
if (email.lastIndexOf("/CN=") == -1)
|
||||
{
|
||||
return email;
|
||||
}
|
||||
else
|
||||
{
|
||||
// found a full Exchange format To header
|
||||
return email.substring(email.lastIndexOf("/CN=") + 4);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Jesper Steen Møller
|
||||
* Copyright (C) 2005 Jesper Steen M<EFBFBD>ller
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
@ -25,7 +25,7 @@ import org.alfresco.service.namespace.QName;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Jesper Steen Møller
|
||||
* @author Jesper Steen Møller
|
||||
*/
|
||||
public interface MetadataExtracter
|
||||
{
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Jesper Steen Møller
|
||||
* Copyright (C) 2005 Jesper Steen M<EFBFBD>ller
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
@ -36,7 +36,7 @@ import org.apache.commons.logging.LogFactory;
|
||||
* The extracters themselves know how well they are able to extract metadata.
|
||||
*
|
||||
* @see org.alfresco.repo.content.metadata.MetadataExtracter
|
||||
* @author Jesper Steen Møller
|
||||
* @author Jesper Steen Møller
|
||||
*/
|
||||
public class MetadataExtracterRegistry
|
||||
{
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Jesper Steen Møller
|
||||
* Copyright (C) 2005 Jesper Steen M<EFBFBD>ller
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
@ -28,7 +28,6 @@ import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.service.cmr.repository.ContentIOException;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.apache.poi.hpsf.DocumentSummaryInformation;
|
||||
import org.apache.poi.hpsf.PropertySet;
|
||||
import org.apache.poi.hpsf.PropertySetFactory;
|
||||
import org.apache.poi.hpsf.SummaryInformation;
|
||||
@ -37,15 +36,16 @@ import org.apache.poi.poifs.eventfilesystem.POIFSReaderEvent;
|
||||
import org.apache.poi.poifs.eventfilesystem.POIFSReaderListener;
|
||||
|
||||
/**
|
||||
* Office file format Metadata Extracter
|
||||
*
|
||||
* @author Jesper Steen Møller
|
||||
* @author Jesper Steen Møller
|
||||
*/
|
||||
public class OfficeMetadataExtracter extends AbstractMetadataExtracter
|
||||
{
|
||||
public static String[] SUPPORTED_MIMETYPES = new String[] {
|
||||
MimetypeMap.MIMETYPE_WORD,
|
||||
MimetypeMap.MIMETYPE_EXCEL,
|
||||
MimetypeMap.MIMETYPE_PPT };
|
||||
MimetypeMap.MIMETYPE_PPT};
|
||||
|
||||
public OfficeMetadataExtracter()
|
||||
{
|
||||
@ -64,6 +64,7 @@ public class OfficeMetadataExtracter extends AbstractMetadataExtracter
|
||||
if (ps instanceof SummaryInformation)
|
||||
{
|
||||
SummaryInformation si = (SummaryInformation) ps;
|
||||
|
||||
// Titled aspect
|
||||
trimPut(ContentModel.PROP_TITLE, si.getTitle(), destination);
|
||||
trimPut(ContentModel.PROP_DESCRIPTION, si.getSubject(), destination);
|
||||
@ -73,16 +74,6 @@ public class OfficeMetadataExtracter extends AbstractMetadataExtracter
|
||||
trimPut(ContentModel.PROP_MODIFIED, si.getLastSaveDateTime(), destination);
|
||||
trimPut(ContentModel.PROP_AUTHOR, si.getAuthor(), destination);
|
||||
}
|
||||
else if (ps instanceof DocumentSummaryInformation)
|
||||
{
|
||||
// DocumentSummaryInformation dsi = (DocumentSummaryInformation) ps;
|
||||
|
||||
// These are not really interesting to any aspect:
|
||||
// trimPut(ContentModel.PROP_xxx, dsi.getCompany(),
|
||||
// destination);
|
||||
// trimPut(ContentModel.PROP_yyy, dsi.getManager(),
|
||||
// destination);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
@ -90,6 +81,7 @@ public class OfficeMetadataExtracter extends AbstractMetadataExtracter
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
InputStream is = null;
|
||||
try
|
||||
{
|
||||
|
@ -4,7 +4,7 @@ package org.alfresco.repo.content.metadata;
|
||||
/**
|
||||
* @see org.alfresco.repo.content.transform.OfficeMetadataExtracter
|
||||
*
|
||||
* @author Jesper Steen Møller
|
||||
* @author Jesper Steen Møller
|
||||
*/
|
||||
public class OfficeMetadataExtracterTest extends AbstractMetadataExtracterTest
|
||||
{
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Jesper Steen Møller
|
||||
* Copyright (C) 2005 Jesper Steen M<EFBFBD>ller
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
@ -24,12 +24,13 @@ import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
|
||||
import net.sf.joott.uno.UnoConnection;
|
||||
import net.sf.jooreports.openoffice.connection.OpenOfficeConnection;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.PropertyCheck;
|
||||
import org.alfresco.util.TempFileProvider;
|
||||
|
||||
import com.sun.star.beans.PropertyValue;
|
||||
@ -41,9 +42,9 @@ import com.sun.star.ucb.XFileIdentifierConverter;
|
||||
import com.sun.star.uno.UnoRuntime;
|
||||
|
||||
/**
|
||||
* @author Jesper Steen Møller
|
||||
* @author Jesper Steen Møller
|
||||
*/
|
||||
public class UnoMetadataExtracter extends AbstractMetadataExtracter
|
||||
public class OpenOfficeMetadataExtracter extends AbstractMetadataExtracter
|
||||
{
|
||||
public static String[] SUPPORTED_MIMETYPES = new String[] {
|
||||
MimetypeMap.MIMETYPE_STAROFFICE5_WRITER,
|
||||
@ -55,31 +56,26 @@ public class UnoMetadataExtracter extends AbstractMetadataExtracter
|
||||
// quality since they involve conversion.
|
||||
};
|
||||
|
||||
private String contentUrl;
|
||||
private MyUnoConnection connection;
|
||||
private OpenOfficeConnection connection;
|
||||
private boolean isConnected;
|
||||
|
||||
public UnoMetadataExtracter()
|
||||
public OpenOfficeMetadataExtracter()
|
||||
{
|
||||
super(new HashSet<String>(Arrays.asList(SUPPORTED_MIMETYPES)), 1.00, 10000);
|
||||
this.contentUrl = UnoConnection.DEFAULT_CONNECTION_STRING;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param contentUrl the URL to connect to
|
||||
*/
|
||||
public void setContentUrl(String contentUrl)
|
||||
public void setConnection(OpenOfficeConnection connection)
|
||||
{
|
||||
this.contentUrl = contentUrl;
|
||||
this.connection = connection;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Initialises the bean by establishing an UNO connection
|
||||
*/
|
||||
public synchronized void init()
|
||||
{
|
||||
connection = new MyUnoConnection(contentUrl);
|
||||
PropertyCheck.mandatory("OpenOfficeMetadataExtracter", "connection", connection);
|
||||
|
||||
// attempt to make an connection
|
||||
try
|
||||
{
|
||||
@ -109,7 +105,7 @@ public class UnoMetadataExtracter extends AbstractMetadataExtracter
|
||||
|
||||
// create temporary files to convert from and to
|
||||
File tempFromFile = TempFileProvider.createTempFile(
|
||||
"UnoContentTransformer_", "."
|
||||
"OpenOfficeMetadataExtracter-", "."
|
||||
+ getMimetypeService().getExtension(sourceMimetype));
|
||||
// download the content from the source reader
|
||||
reader.getContent(tempFromFile);
|
||||
@ -158,9 +154,9 @@ public class UnoMetadataExtracter extends AbstractMetadataExtracter
|
||||
}
|
||||
}
|
||||
|
||||
public String toUrl(File file, MyUnoConnection connection) throws ConnectException
|
||||
public String toUrl(File file, OpenOfficeConnection connection) throws ConnectException
|
||||
{
|
||||
Object fcp = connection.getFileContentService();
|
||||
Object fcp = connection.getFileContentProvider();
|
||||
XFileIdentifierConverter fic = (XFileIdentifierConverter) UnoRuntime.queryInterface(
|
||||
XFileIdentifierConverter.class, fcp);
|
||||
return fic.getFileURLFromSystemPath("", file.getAbsolutePath());
|
||||
@ -181,17 +177,4 @@ public class UnoMetadataExtracter extends AbstractMetadataExtracter
|
||||
property.Value = value;
|
||||
return property;
|
||||
}
|
||||
|
||||
static class MyUnoConnection extends UnoConnection
|
||||
{
|
||||
public MyUnoConnection(String url)
|
||||
{
|
||||
super(url);
|
||||
}
|
||||
|
||||
public Object getFileContentService() throws ConnectException
|
||||
{
|
||||
return getService("com.sun.star.ucb.FileContentProvider");
|
||||
}
|
||||
}
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Jesper Steen Møller
|
||||
* Copyright (C) 2005 Jesper Steen M<EFBFBD>ller
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
@ -16,20 +16,27 @@
|
||||
*/
|
||||
package org.alfresco.repo.content.metadata;
|
||||
|
||||
import net.sf.jooreports.openoffice.connection.OpenOfficeConnection;
|
||||
import net.sf.jooreports.openoffice.connection.SocketOpenOfficeConnection;
|
||||
|
||||
|
||||
/**
|
||||
* @author Jesper Steen Møller
|
||||
* @author Jesper Steen Møller
|
||||
*/
|
||||
public class UnoMetadataExtracterTest extends AbstractMetadataExtracterTest
|
||||
public class OpenOfficeMetadataExtracterTest extends AbstractMetadataExtracterTest
|
||||
{
|
||||
private UnoMetadataExtracter extracter;
|
||||
private OpenOfficeMetadataExtracter extracter;
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception
|
||||
{
|
||||
super.setUp();
|
||||
extracter = new UnoMetadataExtracter();
|
||||
|
||||
OpenOfficeConnection connection = new SocketOpenOfficeConnection();
|
||||
|
||||
extracter = new OpenOfficeMetadataExtracter();
|
||||
extracter.setMimetypeService(mimetypeMap);
|
||||
extracter.setConnection(connection);
|
||||
extracter.init();
|
||||
}
|
||||
|
||||
@ -48,7 +55,7 @@ public class UnoMetadataExtracterTest extends AbstractMetadataExtracterTest
|
||||
return;
|
||||
}
|
||||
|
||||
for (String mimetype : UnoMetadataExtracter.SUPPORTED_MIMETYPES)
|
||||
for (String mimetype : OpenOfficeMetadataExtracter.SUPPORTED_MIMETYPES)
|
||||
{
|
||||
double reliability = extracter.getReliability(mimetype);
|
||||
assertTrue("Expected above zero reliability", reliability > 0.0);
|
||||
@ -61,7 +68,7 @@ public class UnoMetadataExtracterTest extends AbstractMetadataExtracterTest
|
||||
{
|
||||
return;
|
||||
}
|
||||
for (String mimetype : UnoMetadataExtracter.SUPPORTED_MIMETYPES)
|
||||
for (String mimetype : OpenOfficeMetadataExtracter.SUPPORTED_MIMETYPES)
|
||||
{
|
||||
testExtractFromMimetype(mimetype);
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Jesper Steen Møller
|
||||
* Copyright (C) 2005 Jesper Steen M<EFBFBD>ller
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
@ -31,7 +31,7 @@ import org.pdfbox.pdmodel.PDDocumentInformation;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Jesper Steen Møller
|
||||
* @author Jesper Steen Møller
|
||||
*/
|
||||
public class PdfBoxMetadataExtracter extends AbstractMetadataExtracter
|
||||
{
|
||||
|
@ -5,7 +5,7 @@ import org.alfresco.repo.content.MimetypeMap;
|
||||
/**
|
||||
* @see org.alfresco.repo.content.metadata.PdfBoxMetadataExtracter
|
||||
*
|
||||
* @author Jesper Steen Møller
|
||||
* @author Jesper Steen Møller
|
||||
*/
|
||||
public class PdfBoxMetadataExtracterTest extends AbstractMetadataExtracterTest
|
||||
{
|
||||
|
@ -123,12 +123,8 @@ public abstract class AbstractContentTransformer implements ContentTransformer
|
||||
{
|
||||
if (registry == null)
|
||||
{
|
||||
if (registry == null)
|
||||
{
|
||||
logger.warn("Property 'registry' has not been set. Ignoring auto-registration: \n" +
|
||||
" transformer: " + this);
|
||||
return;
|
||||
}
|
||||
logger.warn("Property 'registry' has not been set. Ignoring auto-registration: \n" +
|
||||
" transformer: " + this);
|
||||
return;
|
||||
}
|
||||
// first register any explicit transformations
|
||||
|
@ -50,7 +50,6 @@ public class ContentTransformerRegistry
|
||||
private MimetypeMap mimetypeMap;
|
||||
/** Cache of previously used transactions */
|
||||
private Map<TransformationKey, List<ContentTransformer>> transformationCache;
|
||||
private short accessCount;
|
||||
/** Controls read access to the transformation cache */
|
||||
private Lock transformationCacheReadLock;
|
||||
/** controls write access to the transformation cache */
|
||||
@ -67,7 +66,6 @@ public class ContentTransformerRegistry
|
||||
this.transformers = new ArrayList<ContentTransformer>(10);
|
||||
transformationCache = new HashMap<TransformationKey, List<ContentTransformer>>(17);
|
||||
|
||||
accessCount = 0;
|
||||
// create lock objects for access to the cache
|
||||
ReadWriteLock transformationCacheLock = new ReentrantReadWriteLock();
|
||||
transformationCacheReadLock = transformationCacheLock.readLock();
|
||||
@ -120,7 +118,6 @@ public class ContentTransformerRegistry
|
||||
try
|
||||
{
|
||||
transformationCache.clear();
|
||||
accessCount = 0;
|
||||
}
|
||||
finally
|
||||
{
|
||||
@ -243,7 +240,6 @@ public class ContentTransformerRegistry
|
||||
private List<ContentTransformer> findDirectTransformers(String sourceMimetype, String targetMimetype)
|
||||
{
|
||||
double maxReliability = 0.0;
|
||||
long leastTime = 100000L; // 100 seconds - longer than anyone would think of waiting
|
||||
List<ContentTransformer> bestTransformers = new ArrayList<ContentTransformer>(2);
|
||||
// loop through transformers
|
||||
for (ContentTransformer transformer : this.transformers)
|
||||
@ -289,6 +285,7 @@ public class ContentTransformerRegistry
|
||||
/**
|
||||
* Recursive method to build up a list of content transformers
|
||||
*/
|
||||
@SuppressWarnings("unused")
|
||||
private void buildTransformer(List<ContentTransformer> transformers,
|
||||
double reliability,
|
||||
List<String> touchedMimetypes,
|
||||
|
@ -0,0 +1,254 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Alfresco, Inc.
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.content.transform;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.ConnectException;
|
||||
import java.util.Map;
|
||||
|
||||
import net.sf.jooreports.converter.DocumentFamily;
|
||||
import net.sf.jooreports.converter.DocumentFormat;
|
||||
import net.sf.jooreports.converter.DocumentFormatRegistry;
|
||||
import net.sf.jooreports.converter.XmlDocumentFormatRegistry;
|
||||
import net.sf.jooreports.openoffice.connection.OpenOfficeConnection;
|
||||
import net.sf.jooreports.openoffice.connection.OpenOfficeException;
|
||||
import net.sf.jooreports.openoffice.converter.OpenOfficeDocumentConverter;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.service.cmr.repository.ContentIOException;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||
import org.alfresco.service.cmr.repository.MimetypeService;
|
||||
import org.alfresco.util.PropertyCheck;
|
||||
import org.alfresco.util.TempFileProvider;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.core.io.DefaultResourceLoader;
|
||||
|
||||
/**
|
||||
* Makes use of the {@link http://sourceforge.net/projects/joott/ JOOConverter} library to
|
||||
* perform OpenOffice-drive conversions.
|
||||
*
|
||||
* @author Derek Hulley
|
||||
*/
|
||||
public class OpenOfficeContentTransformer extends AbstractContentTransformer
|
||||
{
|
||||
private static Log logger = LogFactory.getLog(OpenOfficeContentTransformer.class);
|
||||
|
||||
private OpenOfficeConnection connection;
|
||||
private boolean connected;
|
||||
private OpenOfficeDocumentConverter converter;
|
||||
private String documentFormatsConfiguration;
|
||||
private DocumentFormatRegistry formatRegistry;
|
||||
|
||||
public OpenOfficeContentTransformer()
|
||||
{
|
||||
this.connected = false;
|
||||
}
|
||||
|
||||
public void setConnection(OpenOfficeConnection connection)
|
||||
{
|
||||
this.connection = connection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a non-default location from which to load the document format mappings.
|
||||
*
|
||||
* @param path a resource location supporting the <b>file:</b> or <b>classpath:</b> prefixes
|
||||
*/
|
||||
public void setDocumentFormatsConfiguration(String path)
|
||||
{
|
||||
this.documentFormatsConfiguration = path;
|
||||
}
|
||||
|
||||
public boolean isConnected()
|
||||
{
|
||||
return connected;
|
||||
}
|
||||
|
||||
private synchronized void connect()
|
||||
{
|
||||
try
|
||||
{
|
||||
connection.connect();
|
||||
connected = true;
|
||||
}
|
||||
catch (ConnectException e)
|
||||
{
|
||||
logger.warn(e.getMessage());
|
||||
connected = false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void register()
|
||||
{
|
||||
PropertyCheck.mandatory("OpenOfficeContentTransformer", "connection", connection);
|
||||
|
||||
// attempt to establish a connection
|
||||
connect();
|
||||
|
||||
// set up the converter
|
||||
converter = new OpenOfficeDocumentConverter(connection);
|
||||
|
||||
// load the document conversion configuration
|
||||
if (documentFormatsConfiguration != null)
|
||||
{
|
||||
DefaultResourceLoader resourceLoader = new DefaultResourceLoader();
|
||||
try
|
||||
{
|
||||
InputStream is = resourceLoader.getResource(documentFormatsConfiguration).getInputStream();
|
||||
formatRegistry = new XmlDocumentFormatRegistry(is);
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
throw new AlfrescoRuntimeException(
|
||||
"Unable to load document formats configuration file: " + documentFormatsConfiguration);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
formatRegistry = new XmlDocumentFormatRegistry();
|
||||
}
|
||||
|
||||
if (connected)
|
||||
{
|
||||
// register
|
||||
super.register();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DocumentFormatRegistry
|
||||
*/
|
||||
public double getReliability(String sourceMimetype, String targetMimetype)
|
||||
{
|
||||
if (!connected)
|
||||
{
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
// there are some conversions that fail, despite the converter believing them possible
|
||||
if (targetMimetype.equals(MimetypeMap.MIMETYPE_XHTML))
|
||||
{
|
||||
return 0.0;
|
||||
}
|
||||
else if (targetMimetype.equals(MimetypeMap.MIMETYPE_WORDPERFECT))
|
||||
{
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
MimetypeService mimetypeService = getMimetypeService();
|
||||
String sourceExtension = mimetypeService.getExtension(sourceMimetype);
|
||||
String targetExtension = mimetypeService.getExtension(targetMimetype);
|
||||
// query the registry for the source format
|
||||
DocumentFormat sourceFormat = formatRegistry.getFormatByFileExtension(sourceExtension);
|
||||
if (sourceFormat == null)
|
||||
{
|
||||
// no document format
|
||||
return 0.0;
|
||||
}
|
||||
// query the registry for the target format
|
||||
DocumentFormat targetFormat = formatRegistry.getFormatByFileExtension(targetExtension);
|
||||
if (targetFormat == null)
|
||||
{
|
||||
// no document format
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
// get the family of the target document
|
||||
DocumentFamily sourceFamily = sourceFormat.getFamily();
|
||||
// does the format support the conversion
|
||||
if (!targetFormat.isExportableFrom(sourceFamily))
|
||||
{
|
||||
// unable to export from source family of documents to the target format
|
||||
return 0.0;
|
||||
}
|
||||
else
|
||||
{
|
||||
return 1.0;
|
||||
}
|
||||
}
|
||||
|
||||
protected void transformInternal(
|
||||
ContentReader reader,
|
||||
ContentWriter writer,
|
||||
Map<String, Object> options) throws Exception
|
||||
{
|
||||
String sourceMimetype = getMimetype(reader);
|
||||
String targetMimetype = getMimetype(writer);
|
||||
|
||||
MimetypeService mimetypeService = getMimetypeService();
|
||||
String sourceExtension = mimetypeService.getExtension(sourceMimetype);
|
||||
String targetExtension = mimetypeService.getExtension(targetMimetype);
|
||||
// query the registry for the source format
|
||||
DocumentFormat sourceFormat = formatRegistry.getFormatByFileExtension(sourceExtension);
|
||||
if (sourceFormat == null)
|
||||
{
|
||||
// source format is not recognised
|
||||
throw new ContentIOException("No OpenOffice document format for source extension: " + sourceExtension);
|
||||
}
|
||||
// query the registry for the target format
|
||||
DocumentFormat targetFormat = formatRegistry.getFormatByFileExtension(targetExtension);
|
||||
if (targetFormat == null)
|
||||
{
|
||||
// target format is not recognised
|
||||
throw new ContentIOException("No OpenOffice document format for target extension: " + sourceExtension);
|
||||
}
|
||||
// get the family of the target document
|
||||
DocumentFamily sourceFamily = sourceFormat.getFamily();
|
||||
// does the format support the conversion
|
||||
if (!targetFormat.isExportableFrom(sourceFamily))
|
||||
{
|
||||
throw new ContentIOException(
|
||||
"OpenOffice conversion not supported: \n" +
|
||||
" reader: " + reader + "\n" +
|
||||
" writer: " + writer);
|
||||
}
|
||||
|
||||
// create temporary files to convert from and to
|
||||
File tempFromFile = TempFileProvider.createTempFile(
|
||||
"OpenOfficeContentTransformer-source-",
|
||||
"." + sourceExtension);
|
||||
File tempToFile = TempFileProvider.createTempFile(
|
||||
"OpenOfficeContentTransformer-target-",
|
||||
"." + targetExtension);
|
||||
// download the content from the source reader
|
||||
reader.getContent(tempFromFile);
|
||||
|
||||
try
|
||||
{
|
||||
converter.convert(tempFromFile, sourceFormat, tempToFile, targetFormat);
|
||||
// conversion success
|
||||
}
|
||||
catch (OpenOfficeException e)
|
||||
{
|
||||
throw new ContentIOException("OpenOffice server conversion failed: \n" +
|
||||
" reader: " + reader + "\n" +
|
||||
" writer: " + writer + "\n" +
|
||||
" from file: " + tempFromFile + "\n" +
|
||||
" to file: " + tempToFile,
|
||||
e);
|
||||
}
|
||||
|
||||
// upload the temp output to the writer given us
|
||||
writer.putContent(tempToFile);
|
||||
}
|
||||
}
|
@ -16,24 +16,30 @@
|
||||
*/
|
||||
package org.alfresco.repo.content.transform;
|
||||
|
||||
import net.sf.jooreports.openoffice.connection.OpenOfficeConnection;
|
||||
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
|
||||
/**
|
||||
* @see org.alfresco.repo.content.transform.UnoContentTransformer
|
||||
* @see org.alfresco.repo.content.transform.OpenOfficeContentTransformer
|
||||
*
|
||||
* @author Derek Hulley
|
||||
*/
|
||||
public class UnoContentTransformerTest extends AbstractContentTransformerTest
|
||||
public class OpenOfficeContentTransformerTest extends AbstractContentTransformerTest
|
||||
{
|
||||
private static String MIMETYPE_RUBBISH = "text/rubbish";
|
||||
|
||||
private UnoContentTransformer transformer;
|
||||
private OpenOfficeContentTransformer transformer;
|
||||
|
||||
public void onSetUpInTransaction() throws Exception
|
||||
{
|
||||
transformer = new UnoContentTransformer();
|
||||
OpenOfficeConnection connection = (OpenOfficeConnection) applicationContext.getBean("openOfficeConnection");
|
||||
|
||||
transformer = new OpenOfficeContentTransformer();
|
||||
transformer.setMimetypeService(mimetypeMap);
|
||||
transformer.init();
|
||||
transformer.setConnection(connection);
|
||||
transformer.setDocumentFormatsConfiguration("classpath:alfresco/mimetype/openoffice-document-formats.xml");
|
||||
transformer.register();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -62,6 +68,8 @@ public class UnoContentTransformerTest extends AbstractContentTransformerTest
|
||||
assertEquals("Mimetype should not be supported", 0.0, reliability);
|
||||
reliability = transformer.getReliability(MimetypeMap.MIMETYPE_TEXT_PLAIN, MIMETYPE_RUBBISH);
|
||||
assertEquals("Mimetype should not be supported", 0.0, reliability);
|
||||
reliability = transformer.getReliability(MimetypeMap.MIMETYPE_TEXT_PLAIN, MimetypeMap.MIMETYPE_XHTML);
|
||||
assertEquals("Mimetype should not be supported", 0.0, reliability);
|
||||
reliability = transformer.getReliability(MimetypeMap.MIMETYPE_TEXT_PLAIN, MimetypeMap.MIMETYPE_WORD);
|
||||
assertEquals("Mimetype should be supported", 1.0, reliability);
|
||||
reliability = transformer.getReliability(MimetypeMap.MIMETYPE_WORD, MimetypeMap.MIMETYPE_TEXT_PLAIN);
|
@ -1,337 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Alfresco, Inc.
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.content.transform;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.ConnectException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import net.sf.joott.uno.DocumentConverter;
|
||||
import net.sf.joott.uno.DocumentFormat;
|
||||
import net.sf.joott.uno.UnoConnection;
|
||||
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.service.cmr.repository.ContentIOException;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||
import org.alfresco.util.TempFileProvider;
|
||||
|
||||
/**
|
||||
* Makes use of the OpenOffice Uno interfaces to convert the content.
|
||||
* <p>
|
||||
* The conversions are slow but reliable. Not <b>all</b> possible combinations of transformations
|
||||
* have been enabled because they don't necessarily work and need to be specifically tested before
|
||||
* being made available generally. As the conversion process is mostly automated, the introduction
|
||||
* of faulty transformations can lead to unnecessary bugs. Feel free to experiment and, assuming
|
||||
* that the unit test works, report any interesting conversions that can be enabled.
|
||||
*
|
||||
* @author Derek Hulley
|
||||
*/
|
||||
public class UnoContentTransformer extends AbstractContentTransformer
|
||||
{
|
||||
/** map of <tt>DocumentFormat</tt> instances keyed by mimetype conversion */
|
||||
private static Map<ContentTransformerRegistry.TransformationKey, DocumentFormatWrapper> formatsByConversion;
|
||||
|
||||
static
|
||||
{
|
||||
// Build the map of known Uno document formats and store by conversion key
|
||||
formatsByConversion = new HashMap<ContentTransformerRegistry.TransformationKey, DocumentFormatWrapper>(17);
|
||||
|
||||
// Open Office 2.0 / Open Document
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENDOCUMENT_TEXT, MimetypeMap.MIMETYPE_TEXT_PLAIN),
|
||||
new DocumentFormatWrapper(DocumentFormat.TEXT, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENDOCUMENT_TEXT, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENDOCUMENT_SPREADSHEET, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_CALC, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENDOCUMENT_PRESENTATION, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
|
||||
// Open Office
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENOFFICE1_WRITER, MimetypeMap.MIMETYPE_TEXT_PLAIN),
|
||||
new DocumentFormatWrapper(DocumentFormat.TEXT, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENOFFICE1_WRITER, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENOFFICE1_CALC, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENOFFICE1_DRAW, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_IMPRESS, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENOFFICE1_IMPRESS, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_IMPRESS, 1.0));
|
||||
// Star Office 5.x
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_STAROFFICE5_DRAW, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_IMPRESS, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_STAROFFICE5_CALC, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_CALC, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_STAROFFICE5_CHART, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_STAROFFICE5_IMPRESS, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_STAROFFICE5_IMPRESS_PACKED, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_IMPRESS, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_STAROFFICE5_WRITER, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_STAROFFICE5_WRITER_GLOBAL, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
|
||||
// MS Office
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_WORD, MimetypeMap.MIMETYPE_TEXT_PLAIN),
|
||||
new DocumentFormatWrapper(DocumentFormat.TEXT, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_WORD, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_EXCEL, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_CALC, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_WORD, MimetypeMap.MIMETYPE_HTML),
|
||||
new DocumentFormatWrapper(DocumentFormat.HTML_WRITER, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_PPT, MimetypeMap.MIMETYPE_FLASH),
|
||||
new DocumentFormatWrapper(DocumentFormat.FLASH_IMPRESS, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_PPT, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_IMPRESS, 1.0));
|
||||
// Other
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_TEXT_PLAIN, MimetypeMap.MIMETYPE_HTML),
|
||||
new DocumentFormatWrapper(DocumentFormat.HTML_WRITER, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_TEXT_PLAIN, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_TEXT_PLAIN, MimetypeMap.MIMETYPE_WORD),
|
||||
new DocumentFormatWrapper(DocumentFormat.TEXT, 1.0));
|
||||
formatsByConversion.put(
|
||||
new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_HTML, MimetypeMap.MIMETYPE_PDF),
|
||||
new DocumentFormatWrapper(DocumentFormat.PDF_WRITER_WEB, 1.0));
|
||||
|
||||
// there are many more formats available and therefore many more transformation combinations possible
|
||||
// DocumentFormat.FLASH_IMPRESS
|
||||
// DocumentFormat.HTML_CALC
|
||||
// DocumentFormat.HTML_WRITER
|
||||
// DocumentFormat.MS_EXCEL_97
|
||||
// DocumentFormat.MS_POWERPOINT_97
|
||||
// DocumentFormat.MS_WORD_97
|
||||
// DocumentFormat.PDF_CALC
|
||||
// DocumentFormat.PDF_IMPRESS
|
||||
// DocumentFormat.PDF_WRITER
|
||||
// DocumentFormat.PDF_WRITER_WEB
|
||||
// DocumentFormat.RTF
|
||||
// DocumentFormat.TEXT
|
||||
// DocumentFormat.TEXT_CALC
|
||||
// DocumentFormat.XML_CALC
|
||||
// DocumentFormat.XML_IMPRESS
|
||||
// DocumentFormat.XML_WRITER
|
||||
// DocumentFormat.XML_WRITER_WEB
|
||||
}
|
||||
|
||||
private String connectionUrl = UnoConnection.DEFAULT_CONNECTION_STRING;
|
||||
private UnoConnection connection;
|
||||
private boolean isConnected;
|
||||
|
||||
/**
|
||||
* Constructs the default transformer that will attempt to connect to the
|
||||
* Uno server using the default connect string.
|
||||
*
|
||||
* @see UnoConnection#DEFAULT_CONNECTION_STRING
|
||||
*/
|
||||
public UnoContentTransformer()
|
||||
{
|
||||
isConnected = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Override the default connection URL with a new one.
|
||||
*
|
||||
* @param connectionUrl the connection string
|
||||
*
|
||||
* @see UnoConnection#DEFAULT_CONNECTION_STRING
|
||||
*/
|
||||
public void setConnectionUrl(String connectionUrl)
|
||||
{
|
||||
this.connectionUrl = connectionUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Connects to the OpenOffice server. If successful, then
|
||||
* {@link AbstractContentTransformer#register() auto-registers}.
|
||||
*/
|
||||
public synchronized void init()
|
||||
{
|
||||
connection = new UnoConnection(connectionUrl);
|
||||
// attempt to make an connection
|
||||
try
|
||||
{
|
||||
connection.connect();
|
||||
isConnected = true;
|
||||
// register
|
||||
super.register();
|
||||
}
|
||||
catch (ConnectException e)
|
||||
{
|
||||
isConnected = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Returns true if a connection to the Uno server could be established
|
||||
*/
|
||||
public boolean isConnected()
|
||||
{
|
||||
return isConnected;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param sourceMimetype
|
||||
* @param targetMimetype
|
||||
* @return Returns a document format wrapper that is valid for the given source and target mimetypes
|
||||
*/
|
||||
private static DocumentFormatWrapper getDocumentFormatWrapper(String sourceMimetype, String targetMimetype)
|
||||
{
|
||||
// get the well-known document format for the specific conversion
|
||||
ContentTransformerRegistry.TransformationKey key =
|
||||
new ContentTransformerRegistry.TransformationKey(sourceMimetype, targetMimetype);
|
||||
DocumentFormatWrapper wrapper = UnoContentTransformer.formatsByConversion.get(key);
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks how reliable the conversion will be when performed by the Uno server.
|
||||
* <p>
|
||||
* The connection for the Uno server is checked in order to have any chance of
|
||||
* being reliable.
|
||||
* <p>
|
||||
* The conversions' reliabilities are set up statically based on prior tests that
|
||||
* included checking performance as well as accuracy.
|
||||
*/
|
||||
public double getReliability(String sourceMimetype, String targetMimetype)
|
||||
{
|
||||
// check if a connection to the Uno server can be established
|
||||
if (!isConnected())
|
||||
{
|
||||
// no connection means that conversion is not possible
|
||||
return 0.0;
|
||||
}
|
||||
// check if the source and target mimetypes are supported
|
||||
DocumentFormatWrapper docFormatWrapper = getDocumentFormatWrapper(sourceMimetype, targetMimetype);
|
||||
if (docFormatWrapper == null)
|
||||
{
|
||||
return 0.0;
|
||||
}
|
||||
else
|
||||
{
|
||||
return docFormatWrapper.getReliability();
|
||||
}
|
||||
}
|
||||
|
||||
public void transformInternal(ContentReader reader, ContentWriter writer, Map<String, Object> options)
|
||||
throws Exception
|
||||
{
|
||||
String sourceMimetype = getMimetype(reader);
|
||||
String targetMimetype = getMimetype(writer);
|
||||
|
||||
// create temporary files to convert from and to
|
||||
File tempFromFile = TempFileProvider.createTempFile(
|
||||
"UnoContentTransformer",
|
||||
"." + getMimetypeService().getExtension(sourceMimetype));
|
||||
File tempToFile = TempFileProvider.createTempFile(
|
||||
"UnoContentTransformer",
|
||||
"." + getMimetypeService().getExtension(targetMimetype));
|
||||
// download the content from the source reader
|
||||
reader.getContent(tempFromFile);
|
||||
|
||||
// get the document format that should be used
|
||||
DocumentFormatWrapper docFormatWrapper = getDocumentFormatWrapper(sourceMimetype, targetMimetype);
|
||||
try
|
||||
{
|
||||
docFormatWrapper.execute(tempFromFile, tempToFile, connection);
|
||||
// conversion success
|
||||
}
|
||||
catch (ConnectException e)
|
||||
{
|
||||
throw new ContentIOException("Connection to Uno server failed: \n" +
|
||||
" reader: " + reader + "\n" +
|
||||
" writer: " + writer,
|
||||
e);
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
throw new ContentIOException("Uno server conversion failed: \n" +
|
||||
" reader: " + reader + "\n" +
|
||||
" writer: " + writer + "\n" +
|
||||
" from file: " + tempFromFile + "\n" +
|
||||
" to file: " + tempToFile,
|
||||
e);
|
||||
}
|
||||
|
||||
// upload the temp output to the writer given us
|
||||
writer.putContent(tempToFile);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wraps a document format as well the reliability. The source and target mimetypes
|
||||
* are not kept, but will probably be closely associated with the reliability.
|
||||
*/
|
||||
private static class DocumentFormatWrapper
|
||||
{
|
||||
/*
|
||||
* Source and target mimetypes not kept -> class is private as it doesn't keep
|
||||
* enough info to be used safely externally
|
||||
*/
|
||||
|
||||
private DocumentFormat documentFormat;
|
||||
private double reliability;
|
||||
|
||||
public DocumentFormatWrapper(DocumentFormat documentFormat, double reliability)
|
||||
{
|
||||
this.documentFormat = documentFormat;
|
||||
this.reliability = reliability;
|
||||
}
|
||||
|
||||
public double getReliability()
|
||||
{
|
||||
return reliability;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executs the transformation
|
||||
*/
|
||||
public void execute(File fromFile, File toFile, UnoConnection connection) throws ConnectException, IOException
|
||||
{
|
||||
DocumentConverter converter = new DocumentConverter(connection);
|
||||
converter.convert(fromFile, toFile, documentFormat);
|
||||
}
|
||||
}
|
||||
}
|
@ -47,6 +47,10 @@ import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.rule.RuleService;
|
||||
import org.alfresco.service.cmr.search.ResultSet;
|
||||
import org.alfresco.service.cmr.search.SearchService;
|
||||
import org.alfresco.service.cmr.security.AccessPermission;
|
||||
import org.alfresco.service.cmr.security.AccessStatus;
|
||||
import org.alfresco.service.cmr.security.AuthenticationService;
|
||||
import org.alfresco.service.cmr.security.PermissionService;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.namespace.RegexQNamePattern;
|
||||
@ -73,6 +77,12 @@ public class CopyServiceImpl implements CopyService
|
||||
|
||||
/** Rule service */
|
||||
private RuleService ruleService;
|
||||
|
||||
/** Permission service */
|
||||
private PermissionService permissionService;
|
||||
|
||||
/** Authentication service */
|
||||
private AuthenticationService authenticationService;
|
||||
|
||||
/** Policy delegates */
|
||||
private ClassPolicyDelegate<CopyServicePolicies.OnCopyNodePolicy> onCopyNodeDelegate;
|
||||
@ -128,6 +138,26 @@ public class CopyServiceImpl implements CopyService
|
||||
this.ruleService = ruleService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the permission service
|
||||
*
|
||||
* @param permissionService the permission service
|
||||
*/
|
||||
public void setPermissionService(PermissionService permissionService)
|
||||
{
|
||||
this.permissionService = permissionService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the authentication service
|
||||
*
|
||||
* @param authenticationService the authentication service
|
||||
*/
|
||||
public void setAuthenticationService(AuthenticationService authenticationService)
|
||||
{
|
||||
this.authenticationService = authenticationService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialise method
|
||||
*/
|
||||
@ -395,6 +425,9 @@ public class CopyServiceImpl implements CopyService
|
||||
|
||||
// Copy the associations
|
||||
copyAssociations(destinationNodeRef, copyDetails, copyChildren, copiedChildren);
|
||||
|
||||
// Copy permissions
|
||||
copyPermissions(sourceNodeRef, destinationNodeRef);
|
||||
}
|
||||
finally
|
||||
{
|
||||
@ -404,6 +437,34 @@ public class CopyServiceImpl implements CopyService
|
||||
return destinationNodeRef;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies the permissions of the source node reference onto the destination node reference
|
||||
*
|
||||
* @param sourceNodeRef the source node reference
|
||||
* @param destinationNodeRef the destination node reference
|
||||
*/
|
||||
private void copyPermissions(NodeRef sourceNodeRef, NodeRef destinationNodeRef)
|
||||
{
|
||||
// Get the permission details of the source node reference
|
||||
Set<AccessPermission> permissions = this.permissionService.getAllSetPermissions(sourceNodeRef);
|
||||
boolean includeInherited = this.permissionService.getInheritParentPermissions(sourceNodeRef);
|
||||
|
||||
AccessStatus writePermission = permissionService.hasPermission(destinationNodeRef, PermissionService.CHANGE_PERMISSIONS);
|
||||
if (this.authenticationService.isCurrentUserTheSystemUser() || writePermission.equals(AccessStatus.ALLOWED))
|
||||
{
|
||||
// Set the permission values on the destination node
|
||||
for (AccessPermission permission : permissions)
|
||||
{
|
||||
this.permissionService.setPermission(
|
||||
destinationNodeRef,
|
||||
permission.getAuthority(),
|
||||
permission.getPermission(),
|
||||
permission.getAccessStatus().equals(AccessStatus.ALLOWED));
|
||||
}
|
||||
this.permissionService.setInheritParentPermissions(destinationNodeRef, includeInherited);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the copy details. This calls the appropriate policies that have been registered
|
||||
* against the node and aspect types in order to pick-up any type specific copy behaviour.
|
||||
|
@ -21,6 +21,7 @@ import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -53,6 +54,8 @@ import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.namespace.RegexQNamePattern;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.mozilla.javascript.Context;
|
||||
import org.mozilla.javascript.NativeArray;
|
||||
import org.mozilla.javascript.ScriptableObject;
|
||||
import org.mozilla.javascript.Wrapper;
|
||||
import org.springframework.util.StringUtils;
|
||||
@ -847,56 +850,80 @@ public final class Node implements Serializable
|
||||
for (String key : this.properties.keySet())
|
||||
{
|
||||
Serializable value = (Serializable)this.properties.get(key);
|
||||
if (value instanceof Node)
|
||||
{
|
||||
// convert back to NodeRef
|
||||
value = ((Node)value).getNodeRef();
|
||||
}
|
||||
else if (value instanceof ScriptContentData)
|
||||
{
|
||||
// convert back to ContentData
|
||||
value = ((ScriptContentData)value).contentData;
|
||||
}
|
||||
else if (value instanceof Wrapper)
|
||||
{
|
||||
// unwrap a Java object from a JavaScript wrapper
|
||||
value = (Serializable)((Wrapper)value).unwrap();
|
||||
}
|
||||
else if (value instanceof ScriptableObject)
|
||||
{
|
||||
// a scriptable object will probably indicate a multi-value property
|
||||
// set using a JavaScript Array object
|
||||
ScriptableObject values = (ScriptableObject)value;
|
||||
|
||||
// convert JavaScript array of values to a List of Serializable objects
|
||||
Object[] propIds = values.getIds();
|
||||
List<Serializable> propValues = new ArrayList<Serializable>(propIds.length);
|
||||
for (int i=0; i<propIds.length; i++)
|
||||
{
|
||||
// work on each key in turn
|
||||
Object propId = propIds[i];
|
||||
|
||||
// we are only interested in keys that indicate a list of values
|
||||
if (propId instanceof Integer)
|
||||
{
|
||||
// get the value out for the specified key - make sure it is Serializable
|
||||
Object val = values.get((Integer)propId, values);
|
||||
if (val instanceof Wrapper)
|
||||
{
|
||||
val = ((Wrapper)val).unwrap();
|
||||
}
|
||||
if (val instanceof Serializable)
|
||||
{
|
||||
propValues.add((Serializable)val);
|
||||
}
|
||||
}
|
||||
}
|
||||
value = (Serializable)propValues;
|
||||
}
|
||||
|
||||
// perform the conversion from script wrapper object to repo serializable values
|
||||
value = convertValue(value);
|
||||
|
||||
props.put(createQName(key), value);
|
||||
}
|
||||
this.nodeService.setProperties(this.nodeRef, props);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an object from any script wrapper value to a valid repository serializable value.
|
||||
* This includes converting JavaScript Array objects to Lists of valid objects.
|
||||
*
|
||||
* @param value Value to convert from script wrapper object to repo serializable value
|
||||
*
|
||||
* @return valid repo value
|
||||
*/
|
||||
private static Serializable convertValue(Serializable value)
|
||||
{
|
||||
if (value instanceof Node)
|
||||
{
|
||||
// convert back to NodeRef
|
||||
value = ((Node)value).getNodeRef();
|
||||
}
|
||||
else if (value instanceof ScriptContentData)
|
||||
{
|
||||
// convert back to ContentData
|
||||
value = ((ScriptContentData)value).contentData;
|
||||
}
|
||||
else if (value instanceof Wrapper)
|
||||
{
|
||||
// unwrap a Java object from a JavaScript wrapper
|
||||
// recursively call this method to convert the unwrapped value
|
||||
value = convertValue((Serializable)((Wrapper)value).unwrap());
|
||||
}
|
||||
else if (value instanceof ScriptableObject)
|
||||
{
|
||||
// a scriptable object will probably indicate a multi-value property
|
||||
// set using a JavaScript Array object
|
||||
ScriptableObject values = (ScriptableObject)value;
|
||||
|
||||
if (value instanceof NativeArray)
|
||||
{
|
||||
// convert JavaScript array of values to a List of Serializable objects
|
||||
Object[] propIds = values.getIds();
|
||||
List<Serializable> propValues = new ArrayList<Serializable>(propIds.length);
|
||||
for (int i=0; i<propIds.length; i++)
|
||||
{
|
||||
// work on each key in turn
|
||||
Object propId = propIds[i];
|
||||
|
||||
// we are only interested in keys that indicate a list of values
|
||||
if (propId instanceof Integer)
|
||||
{
|
||||
// get the value out for the specified key
|
||||
Serializable val = (Serializable)values.get((Integer)propId, values);
|
||||
// recursively call this method to convert the value
|
||||
propValues.add(convertValue(val));
|
||||
}
|
||||
}
|
||||
value = (Serializable)propValues;
|
||||
}
|
||||
else
|
||||
{
|
||||
// TODO: add code here to use the dictionary and convert to correct value type
|
||||
Object javaObj = Context.jsToJava(value, Date.class);
|
||||
if (javaObj instanceof Date)
|
||||
{
|
||||
value = (Date)javaObj;
|
||||
}
|
||||
}
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new File (cm:content) node as a child of this node.
|
||||
@ -1166,14 +1193,8 @@ public final class Node implements Serializable
|
||||
{
|
||||
// get the value out for the specified key - make sure it is Serializable
|
||||
Object value = props.get((String)propId, props);
|
||||
if (value instanceof Wrapper)
|
||||
{
|
||||
value = ((Wrapper)value).unwrap();
|
||||
}
|
||||
if (value instanceof Serializable)
|
||||
{
|
||||
aspectProps.put(createQName((String)propId), (Serializable)value);
|
||||
}
|
||||
value = convertValue((Serializable)value);
|
||||
aspectProps.put(createQName((String)propId), (Serializable)value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -467,21 +467,34 @@ public class FileFolderServiceImpl implements FileFolderService
|
||||
targetParentRef = assocRef.getParentRef();
|
||||
}
|
||||
|
||||
// there is nothing to do if both the name and parent folder haven't changed
|
||||
if (targetParentRef.equals(assocRef.getParentRef()) && newName.equals(beforeFileInfo.getName()))
|
||||
boolean checkExists = true;
|
||||
if (targetParentRef.equals(assocRef.getParentRef()))
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
// there is nothing to do if both the name and parent folder haven't changed
|
||||
if (newName.equals(beforeFileInfo.getName()))
|
||||
{
|
||||
logger.debug("Doing nothing - neither filename or parent has not changed: \n" +
|
||||
" parent: " + targetParentRef + "\n" +
|
||||
" before: " + beforeFileInfo + "\n" +
|
||||
" new name: " + newName);
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Doing nothing - neither filename or parent has changed: \n" +
|
||||
" parent: " + targetParentRef + "\n" +
|
||||
" before: " + beforeFileInfo + "\n" +
|
||||
" new name: " + newName);
|
||||
}
|
||||
return beforeFileInfo;
|
||||
}
|
||||
else if (newName.equalsIgnoreCase(beforeFileInfo.getName()))
|
||||
{
|
||||
// name has only changed case so don't bother with exists check
|
||||
checkExists = false;
|
||||
}
|
||||
return beforeFileInfo;
|
||||
}
|
||||
|
||||
// check for existing file or folder
|
||||
checkExists(targetParentRef, newName);
|
||||
// check for existing file or folder (if name has changed)
|
||||
if (checkExists)
|
||||
{
|
||||
checkExists(targetParentRef, newName);
|
||||
}
|
||||
|
||||
|
||||
QName qname = QName.createQName(
|
||||
NamespaceService.CONTENT_MODEL_1_0_URI,
|
||||
|
@ -23,6 +23,7 @@ import java.util.Set;
|
||||
import org.alfresco.repo.search.IndexerException;
|
||||
import org.alfresco.repo.search.impl.lucene.index.IndexInfo;
|
||||
import org.alfresco.repo.search.impl.lucene.index.TransactionStatus;
|
||||
import org.alfresco.repo.search.impl.lucene.index.IndexInfo.LockWork;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
@ -279,5 +280,11 @@ public abstract class LuceneBase2
|
||||
{
|
||||
return deltaId;
|
||||
}
|
||||
|
||||
|
||||
public <R> R doWithWriteLock(LockWork<R> lockWork)
|
||||
{
|
||||
return indexInfo.doWithWriteLock(lockWork);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -18,6 +18,8 @@ package org.alfresco.repo.search.impl.lucene;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory.LuceneIndexBackupComponent;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationComponent;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
@ -26,8 +28,6 @@ import org.alfresco.util.ApplicationContextHelper;
|
||||
import org.alfresco.util.TempFileProvider;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
/**
|
||||
* @see org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory.LuceneIndexBackupComponent
|
||||
*
|
||||
@ -47,7 +47,7 @@ public class LuceneIndexBackupComponentTest extends TestCase
|
||||
{
|
||||
TransactionService transactionService = (TransactionService) ctx.getBean("transactionComponent");
|
||||
NodeService nodeService = (NodeService) ctx.getBean("NodeService");
|
||||
LuceneIndexerAndSearcherFactory factory = (LuceneIndexerAndSearcherFactory) ctx.getBean("luceneIndexerAndSearcherFactory");
|
||||
LuceneIndexerAndSearcher factory = (LuceneIndexerAndSearcher) ctx.getBean("luceneIndexerAndSearcherFactory");
|
||||
|
||||
this.authenticationComponent = (AuthenticationComponent)ctx.getBean("authenticationComponent");
|
||||
this.authenticationComponent.setSystemUserAsCurrentUser();
|
||||
|
@ -18,10 +18,9 @@ package org.alfresco.repo.search.impl.lucene;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.repo.search.Indexer;
|
||||
import org.alfresco.repo.search.IndexerSPI;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FTSIndexerAware;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
|
||||
import org.alfresco.repo.search.impl.lucene.index.IndexInfo;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
@ -44,4 +43,6 @@ public interface LuceneIndexer2 extends IndexerSPI
|
||||
public void flushPending() throws LuceneIndexException;
|
||||
public Set<NodeRef> getDeletions();
|
||||
public boolean getDeleteOnlyNodes();
|
||||
|
||||
public <R> R doWithWriteLock(IndexInfo.LockWork <R> lockWork);
|
||||
}
|
||||
|
@ -24,4 +24,12 @@ public interface LuceneIndexerAndSearcher extends IndexerAndSearcher, LuceneConf
|
||||
public int prepare() throws IndexerException;
|
||||
public void commit() throws IndexerException;
|
||||
public void rollback();
|
||||
|
||||
|
||||
public interface WithAllWriteLocksWork<Result>
|
||||
{
|
||||
public Result doWork() throws Exception;
|
||||
}
|
||||
|
||||
public <R> R doWithAllWriteLocks(WithAllWriteLocksWork<R> lockWork);
|
||||
}
|
||||
|
@ -33,6 +33,7 @@ import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.repo.search.IndexerException;
|
||||
import org.alfresco.repo.search.QueryRegisterComponent;
|
||||
import org.alfresco.repo.search.SearcherException;
|
||||
import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcher.WithAllWriteLocksWork;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
|
||||
import org.alfresco.repo.search.transaction.LuceneIndexLock;
|
||||
import org.alfresco.repo.search.transaction.SimpleTransaction;
|
||||
@ -61,11 +62,9 @@ import org.quartz.JobExecutionException;
|
||||
/**
|
||||
* This class is resource manager LuceneIndexers and LuceneSearchers.
|
||||
*
|
||||
* It supports two phase commit inside XA transactions and outside transactions
|
||||
* it provides thread local transaction support.
|
||||
* It supports two phase commit inside XA transactions and outside transactions it provides thread local transaction support.
|
||||
*
|
||||
* TODO: Provide pluggable support for a transaction manager TODO: Integrate
|
||||
* with Spring transactions
|
||||
* TODO: Provide pluggable support for a transaction manager TODO: Integrate with Spring transactions
|
||||
*
|
||||
* @author andyh
|
||||
*
|
||||
@ -73,6 +72,8 @@ import org.quartz.JobExecutionException;
|
||||
|
||||
public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher, XAResource
|
||||
{
|
||||
private static Log logger = LogFactory.getLog(LuceneIndexerAndSearcherFactory.class);
|
||||
|
||||
private DictionaryService dictionaryService;
|
||||
|
||||
private NamespaceService nameSpaceService;
|
||||
@ -90,9 +91,7 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
private String lockDirectory;
|
||||
|
||||
/**
|
||||
* A map of active global transactions . It contains all the indexers a
|
||||
* transaction has used, with at most one indexer for each store within a
|
||||
* transaction
|
||||
* A map of active global transactions . It contains all the indexers a transaction has used, with at most one indexer for each store within a transaction
|
||||
*/
|
||||
|
||||
private static Map<Xid, Map<StoreRef, LuceneIndexer>> activeIndexersInGlobalTx = new HashMap<Xid, Map<StoreRef, LuceneIndexer>>();
|
||||
@ -191,20 +190,18 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum average transformation time allowed to a transformer in order to have
|
||||
* the transformation performed in the current transaction. The default is 20ms.
|
||||
* Set the maximum average transformation time allowed to a transformer in order to have the transformation performed in the current transaction. The default is 20ms.
|
||||
*
|
||||
* @param maxAtomicTransformationTime the maximum average time that a text transformation may
|
||||
* take in order to be performed atomically.
|
||||
* @param maxAtomicTransformationTime
|
||||
* the maximum average time that a text transformation may take in order to be performed atomically.
|
||||
*/
|
||||
public void setMaxAtomicTransformationTime(long maxAtomicTransformationTime)
|
||||
{
|
||||
this.maxAtomicTransformationTime = maxAtomicTransformationTime;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Check if we are in a global transactoin according to the transaction
|
||||
* manager
|
||||
* Check if we are in a global transactoin according to the transaction manager
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@ -240,8 +237,7 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an indexer for the store to use in the current transaction for this
|
||||
* thread of control.
|
||||
* Get an indexer for the store to use in the current transaction for this thread of control.
|
||||
*
|
||||
* @param storeRef -
|
||||
* the id of the store
|
||||
@ -454,7 +450,8 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
}
|
||||
return;
|
||||
}
|
||||
} finally
|
||||
}
|
||||
finally
|
||||
{
|
||||
activeIndexersInGlobalTx.remove(xid);
|
||||
}
|
||||
@ -587,7 +584,8 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
{
|
||||
indexer.rollback();
|
||||
}
|
||||
} finally
|
||||
}
|
||||
finally
|
||||
{
|
||||
activeIndexersInGlobalTx.remove(xid);
|
||||
}
|
||||
@ -677,7 +675,8 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (threadLocalIndexers.get() != null)
|
||||
{
|
||||
@ -875,17 +874,17 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
{
|
||||
IndexWriter.WRITE_LOCK_TIMEOUT = timeout;
|
||||
}
|
||||
|
||||
|
||||
public void setCommitLockTimeout(long timeout)
|
||||
{
|
||||
IndexWriter.COMMIT_LOCK_TIMEOUT = timeout;
|
||||
}
|
||||
|
||||
|
||||
public void setLockPollInterval(long time)
|
||||
{
|
||||
Lock.LOCK_POLL_INTERVAL = time;
|
||||
}
|
||||
|
||||
|
||||
public int getIndexerMaxFieldLength()
|
||||
{
|
||||
return indexerMaxFieldLength;
|
||||
@ -896,25 +895,24 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
this.indexerMaxFieldLength = indexerMaxFieldLength;
|
||||
System.setProperty("org.apache.lucene.maxFieldLength", "" + indexerMaxFieldLength);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This component is able to <i>safely</i> perform backups of the Lucene indexes while
|
||||
* the server is running.
|
||||
* This component is able to <i>safely</i> perform backups of the Lucene indexes while the server is running.
|
||||
* <p>
|
||||
* It can be run directly by calling the {@link #backup() } method, but the convenience
|
||||
* {@link LuceneIndexBackupJob} can be used to call it as well.
|
||||
* It can be run directly by calling the {@link #backup() } method, but the convenience {@link LuceneIndexBackupJob} can be used to call it as well.
|
||||
*
|
||||
* @author Derek Hulley
|
||||
*/
|
||||
public static class LuceneIndexBackupComponent
|
||||
{
|
||||
private static Log logger = LogFactory.getLog(LuceneIndexerAndSearcherFactory.class);
|
||||
|
||||
private TransactionService transactionService;
|
||||
private LuceneIndexerAndSearcherFactory factory;
|
||||
|
||||
private LuceneIndexerAndSearcher factory;
|
||||
|
||||
private NodeService nodeService;
|
||||
|
||||
private String targetLocation;
|
||||
|
||||
|
||||
public LuceneIndexBackupComponent()
|
||||
{
|
||||
}
|
||||
@ -932,9 +930,10 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
/**
|
||||
* Set the Lucene index factory that will be used to control the index locks
|
||||
*
|
||||
* @param factory the index factory
|
||||
* @param factory
|
||||
* the index factory
|
||||
*/
|
||||
public void setFactory(LuceneIndexerAndSearcherFactory factory)
|
||||
public void setFactory(LuceneIndexerAndSearcher factory)
|
||||
{
|
||||
this.factory = factory;
|
||||
}
|
||||
@ -942,7 +941,8 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
/**
|
||||
* Used to retrieve the stores
|
||||
*
|
||||
* @param nodeService the node service
|
||||
* @param nodeService
|
||||
* the node service
|
||||
*/
|
||||
public void setNodeService(NodeService nodeService)
|
||||
{
|
||||
@ -952,13 +952,14 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
/**
|
||||
* Set the directory to which the backup will be copied
|
||||
*
|
||||
* @param targetLocation the backup directory
|
||||
* @param targetLocation
|
||||
* the backup directory
|
||||
*/
|
||||
public void setTargetLocation(String targetLocation)
|
||||
{
|
||||
this.targetLocation = targetLocation;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Backup the Lucene indexes
|
||||
*/
|
||||
@ -978,64 +979,48 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
private void backupImpl()
|
||||
{
|
||||
// create the location to copy to
|
||||
File targetDir = new File(targetLocation);
|
||||
final File targetDir = new File(targetLocation);
|
||||
if (targetDir.exists() && !targetDir.isDirectory())
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Target location is a file and not a directory: " + targetDir);
|
||||
}
|
||||
File targetParentDir = targetDir.getParentFile();
|
||||
final File targetParentDir = targetDir.getParentFile();
|
||||
if (targetParentDir == null)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Target location may not be a root directory: " + targetDir);
|
||||
}
|
||||
File tempDir = new File(targetParentDir, "indexbackup_temp");
|
||||
final File tempDir = new File(targetParentDir, "indexbackup_temp");
|
||||
|
||||
// get all the available stores
|
||||
List<StoreRef> storeRefs = nodeService.getStores();
|
||||
|
||||
// lock all the stores
|
||||
List<StoreRef> lockedStores = new ArrayList<StoreRef>(storeRefs.size());
|
||||
try
|
||||
factory.doWithAllWriteLocks(new WithAllWriteLocksWork<Object> ()
|
||||
{
|
||||
for (StoreRef storeRef : storeRefs)
|
||||
{
|
||||
factory.luceneIndexLock.getWriteLock(storeRef);
|
||||
lockedStores.add(storeRef);
|
||||
}
|
||||
File indexRootDir = new File(factory.indexRootLocation);
|
||||
// perform the copy
|
||||
backupDirectory(indexRootDir, tempDir, targetDir);
|
||||
}
|
||||
catch (Throwable e)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Failed to copy Lucene index root: \n" +
|
||||
" Index root: " + factory.indexRootLocation + "\n" +
|
||||
" Target: " + targetDir,
|
||||
e);
|
||||
}
|
||||
finally
|
||||
{
|
||||
for (StoreRef storeRef : lockedStores)
|
||||
public Object doWork()
|
||||
{
|
||||
try
|
||||
{
|
||||
factory.luceneIndexLock.releaseWriteLock(storeRef);
|
||||
File indexRootDir = new File(factory.getIndexRootLocation());
|
||||
// perform the copy
|
||||
backupDirectory(indexRootDir, tempDir, targetDir);
|
||||
return null;
|
||||
}
|
||||
catch (Throwable e)
|
||||
{
|
||||
logger.error("Failed to release index lock for store " + storeRef, e);
|
||||
throw new AlfrescoRuntimeException(
|
||||
"Failed to copy Lucene index root: \n"
|
||||
+ " Index root: " + factory.getIndexRootLocation() + "\n" + " Target: "
|
||||
+ targetDir, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Backed up Lucene indexes: \n" +
|
||||
" Target directory: " + targetDir);
|
||||
logger.debug("Backed up Lucene indexes: \n" + " Target directory: " + targetDir);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Makes a backup of the source directory via a temporary folder
|
||||
*
|
||||
* @param storeRef
|
||||
*/
|
||||
private void backupDirectory(File sourceDir, File tempDir, File targetDir) throws Exception
|
||||
@ -1086,14 +1071,15 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
{
|
||||
/** KEY_LUCENE_INDEX_BACKUP_COMPONENT = 'luceneIndexBackupComponent' */
|
||||
public static final String KEY_LUCENE_INDEX_BACKUP_COMPONENT = "luceneIndexBackupComponent";
|
||||
|
||||
|
||||
/**
|
||||
* Locks the Lucene indexes and copies them to a backup location
|
||||
*/
|
||||
public void execute(JobExecutionContext context) throws JobExecutionException
|
||||
{
|
||||
JobDataMap jobData = context.getJobDetail().getJobDataMap();
|
||||
LuceneIndexBackupComponent backupComponent = (LuceneIndexBackupComponent) jobData.get(KEY_LUCENE_INDEX_BACKUP_COMPONENT);
|
||||
LuceneIndexBackupComponent backupComponent = (LuceneIndexBackupComponent) jobData
|
||||
.get(KEY_LUCENE_INDEX_BACKUP_COMPONENT);
|
||||
if (backupComponent == null)
|
||||
{
|
||||
throw new JobExecutionException("Missing job data: " + KEY_LUCENE_INDEX_BACKUP_COMPONENT);
|
||||
@ -1102,4 +1088,53 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
|
||||
backupComponent.backup();
|
||||
}
|
||||
}
|
||||
|
||||
public <R> R doWithAllWriteLocks(WithAllWriteLocksWork<R> lockWork)
|
||||
{
|
||||
// get all the available stores
|
||||
List<StoreRef> storeRefs = nodeService.getStores();
|
||||
|
||||
// lock all the stores
|
||||
List<StoreRef> lockedStores = new ArrayList<StoreRef>(storeRefs.size());
|
||||
try
|
||||
{
|
||||
for (StoreRef storeRef : storeRefs)
|
||||
{
|
||||
luceneIndexLock.getWriteLock(storeRef);
|
||||
lockedStores.add(storeRef);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
return lockWork.doWork();
|
||||
}
|
||||
catch (Throwable exception)
|
||||
{
|
||||
|
||||
// Re-throw the exception
|
||||
if (exception instanceof RuntimeException)
|
||||
{
|
||||
throw (RuntimeException) exception;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new RuntimeException("Error during run with lock.", exception);
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
for (StoreRef storeRef : lockedStores)
|
||||
{
|
||||
try
|
||||
{
|
||||
luceneIndexLock.releaseWriteLock(storeRef);
|
||||
}
|
||||
catch (Throwable e)
|
||||
{
|
||||
logger.error("Failed to release index lock for store " + storeRef, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -17,7 +17,6 @@
|
||||
package org.alfresco.repo.search.impl.lucene;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -34,6 +33,7 @@ import org.alfresco.repo.search.IndexerException;
|
||||
import org.alfresco.repo.search.QueryRegisterComponent;
|
||||
import org.alfresco.repo.search.SearcherException;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
|
||||
import org.alfresco.repo.search.impl.lucene.index.IndexInfo;
|
||||
import org.alfresco.repo.search.transaction.LuceneIndexLock;
|
||||
import org.alfresco.repo.search.transaction.SimpleTransaction;
|
||||
import org.alfresco.repo.search.transaction.SimpleTransactionManager;
|
||||
@ -61,11 +61,9 @@ import org.quartz.JobExecutionException;
|
||||
/**
|
||||
* This class is resource manager LuceneIndexers and LuceneSearchers.
|
||||
*
|
||||
* It supports two phase commit inside XA transactions and outside transactions
|
||||
* it provides thread local transaction support.
|
||||
* It supports two phase commit inside XA transactions and outside transactions it provides thread local transaction support.
|
||||
*
|
||||
* TODO: Provide pluggable support for a transaction manager TODO: Integrate
|
||||
* with Spring transactions
|
||||
* TODO: Provide pluggable support for a transaction manager TODO: Integrate with Spring transactions
|
||||
*
|
||||
* @author andyh
|
||||
*
|
||||
@ -73,6 +71,8 @@ import org.quartz.JobExecutionException;
|
||||
|
||||
public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearcher, XAResource
|
||||
{
|
||||
private static Log logger = LogFactory.getLog(LuceneIndexerAndSearcherFactory2.class);
|
||||
|
||||
private DictionaryService dictionaryService;
|
||||
|
||||
private NamespaceService nameSpaceService;
|
||||
@ -90,9 +90,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
private String lockDirectory;
|
||||
|
||||
/**
|
||||
* A map of active global transactions . It contains all the indexers a
|
||||
* transaction has used, with at most one indexer for each store within a
|
||||
* transaction
|
||||
* A map of active global transactions . It contains all the indexers a transaction has used, with at most one indexer for each store within a transaction
|
||||
*/
|
||||
|
||||
private static Map<Xid, Map<StoreRef, LuceneIndexer2>> activeIndexersInGlobalTx = new HashMap<Xid, Map<StoreRef, LuceneIndexer2>>();
|
||||
@ -191,20 +189,18 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum average transformation time allowed to a transformer in order to have
|
||||
* the transformation performed in the current transaction. The default is 20ms.
|
||||
* Set the maximum average transformation time allowed to a transformer in order to have the transformation performed in the current transaction. The default is 20ms.
|
||||
*
|
||||
* @param maxAtomicTransformationTime the maximum average time that a text transformation may
|
||||
* take in order to be performed atomically.
|
||||
* @param maxAtomicTransformationTime
|
||||
* the maximum average time that a text transformation may take in order to be performed atomically.
|
||||
*/
|
||||
public void setMaxAtomicTransformationTime(long maxAtomicTransformationTime)
|
||||
{
|
||||
this.maxAtomicTransformationTime = maxAtomicTransformationTime;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Check if we are in a global transactoin according to the transaction
|
||||
* manager
|
||||
* Check if we are in a global transactoin according to the transaction manager
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@ -240,8 +236,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an indexer for the store to use in the current transaction for this
|
||||
* thread of control.
|
||||
* Get an indexer for the store to use in the current transaction for this thread of control.
|
||||
*
|
||||
* @param storeRef -
|
||||
* the id of the store
|
||||
@ -357,7 +352,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
LuceneIndexerImpl2 indexer = LuceneIndexerImpl2.getUpdateIndexer(storeRef, deltaId, this);
|
||||
indexer.setNodeService(nodeService);
|
||||
indexer.setDictionaryService(dictionaryService);
|
||||
//indexer.setLuceneIndexLock(luceneIndexLock);
|
||||
// indexer.setLuceneIndexLock(luceneIndexLock);
|
||||
indexer.setLuceneFullTextSearchIndexer(luceneFullTextSearchIndexer);
|
||||
indexer.setContentService(contentService);
|
||||
indexer.setMaxAtomicTransformationTime(maxAtomicTransformationTime);
|
||||
@ -395,7 +390,7 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
{
|
||||
LuceneSearcherImpl2 searcher = LuceneSearcherImpl2.getSearcher(storeRef, indexer, this);
|
||||
searcher.setNamespacePrefixResolver(nameSpaceService);
|
||||
//searcher.setLuceneIndexLock(luceneIndexLock);
|
||||
// searcher.setLuceneIndexLock(luceneIndexLock);
|
||||
searcher.setNodeService(nodeService);
|
||||
searcher.setDictionaryService(dictionaryService);
|
||||
searcher.setQueryRegister(queryRegister);
|
||||
@ -454,7 +449,8 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
}
|
||||
return;
|
||||
}
|
||||
} finally
|
||||
}
|
||||
finally
|
||||
{
|
||||
activeIndexersInGlobalTx.remove(xid);
|
||||
}
|
||||
@ -587,7 +583,8 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
{
|
||||
indexer.rollback();
|
||||
}
|
||||
} finally
|
||||
}
|
||||
finally
|
||||
{
|
||||
activeIndexersInGlobalTx.remove(xid);
|
||||
}
|
||||
@ -677,7 +674,8 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (threadLocalIndexers.get() != null)
|
||||
{
|
||||
@ -875,17 +873,17 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
{
|
||||
IndexWriter.WRITE_LOCK_TIMEOUT = timeout;
|
||||
}
|
||||
|
||||
|
||||
public void setCommitLockTimeout(long timeout)
|
||||
{
|
||||
IndexWriter.COMMIT_LOCK_TIMEOUT = timeout;
|
||||
}
|
||||
|
||||
|
||||
public void setLockPollInterval(long time)
|
||||
{
|
||||
Lock.LOCK_POLL_INTERVAL = time;
|
||||
}
|
||||
|
||||
|
||||
public int getIndexerMaxFieldLength()
|
||||
{
|
||||
return indexerMaxFieldLength;
|
||||
@ -896,25 +894,25 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
this.indexerMaxFieldLength = indexerMaxFieldLength;
|
||||
System.setProperty("org.apache.lucene.maxFieldLength", "" + indexerMaxFieldLength);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This component is able to <i>safely</i> perform backups of the Lucene indexes while
|
||||
* the server is running.
|
||||
* This component is able to <i>safely</i> perform backups of the Lucene indexes while the server is running.
|
||||
* <p>
|
||||
* It can be run directly by calling the {@link #backup() } method, but the convenience
|
||||
* {@link LuceneIndexBackupJob} can be used to call it as well.
|
||||
* It can be run directly by calling the {@link #backup() } method, but the convenience {@link LuceneIndexBackupJob} can be used to call it as well.
|
||||
*
|
||||
* @author Derek Hulley
|
||||
*/
|
||||
public static class LuceneIndexBackupComponent
|
||||
{
|
||||
private static Log logger = LogFactory.getLog(LuceneIndexerAndSearcherFactory2.class);
|
||||
|
||||
|
||||
private TransactionService transactionService;
|
||||
private LuceneIndexerAndSearcherFactory2 factory;
|
||||
|
||||
private LuceneIndexerAndSearcher factory;
|
||||
|
||||
private NodeService nodeService;
|
||||
|
||||
private String targetLocation;
|
||||
|
||||
|
||||
public LuceneIndexBackupComponent()
|
||||
{
|
||||
}
|
||||
@ -932,9 +930,10 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
/**
|
||||
* Set the Lucene index factory that will be used to control the index locks
|
||||
*
|
||||
* @param factory the index factory
|
||||
* @param factory
|
||||
* the index factory
|
||||
*/
|
||||
public void setFactory(LuceneIndexerAndSearcherFactory2 factory)
|
||||
public void setFactory(LuceneIndexerAndSearcher factory)
|
||||
{
|
||||
this.factory = factory;
|
||||
}
|
||||
@ -942,7 +941,8 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
/**
|
||||
* Used to retrieve the stores
|
||||
*
|
||||
* @param nodeService the node service
|
||||
* @param nodeService
|
||||
* the node service
|
||||
*/
|
||||
public void setNodeService(NodeService nodeService)
|
||||
{
|
||||
@ -952,13 +952,14 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
/**
|
||||
* Set the directory to which the backup will be copied
|
||||
*
|
||||
* @param targetLocation the backup directory
|
||||
* @param targetLocation
|
||||
* the backup directory
|
||||
*/
|
||||
public void setTargetLocation(String targetLocation)
|
||||
{
|
||||
this.targetLocation = targetLocation;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Backup the Lucene indexes
|
||||
*/
|
||||
@ -978,62 +979,45 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
private void backupImpl()
|
||||
{
|
||||
// create the location to copy to
|
||||
File targetDir = new File(targetLocation);
|
||||
final File targetDir = new File(targetLocation);
|
||||
if (targetDir.exists() && !targetDir.isDirectory())
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Target location is a file and not a directory: " + targetDir);
|
||||
}
|
||||
File targetParentDir = targetDir.getParentFile();
|
||||
final File targetParentDir = targetDir.getParentFile();
|
||||
if (targetParentDir == null)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Target location may not be a root directory: " + targetDir);
|
||||
}
|
||||
File tempDir = new File(targetParentDir, "indexbackup_temp");
|
||||
final File tempDir = new File(targetParentDir, "indexbackup_temp");
|
||||
|
||||
// get all the available stores
|
||||
List<StoreRef> storeRefs = nodeService.getStores();
|
||||
|
||||
// lock all the stores
|
||||
List<StoreRef> lockedStores = new ArrayList<StoreRef>(storeRefs.size());
|
||||
try
|
||||
factory.doWithAllWriteLocks(new WithAllWriteLocksWork<Object>()
|
||||
{
|
||||
for (StoreRef storeRef : storeRefs)
|
||||
{
|
||||
factory.luceneIndexLock.getWriteLock(storeRef);
|
||||
lockedStores.add(storeRef);
|
||||
}
|
||||
File indexRootDir = new File(factory.indexRootLocation);
|
||||
// perform the copy
|
||||
backupDirectory(indexRootDir, tempDir, targetDir);
|
||||
}
|
||||
catch (Throwable e)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Failed to copy Lucene index root: \n" +
|
||||
" Index root: " + factory.indexRootLocation + "\n" +
|
||||
" Target: " + targetDir,
|
||||
e);
|
||||
}
|
||||
finally
|
||||
{
|
||||
for (StoreRef storeRef : lockedStores)
|
||||
public Object doWork()
|
||||
{
|
||||
try
|
||||
{
|
||||
factory.luceneIndexLock.releaseWriteLock(storeRef);
|
||||
File indexRootDir = new File(factory.getIndexRootLocation());
|
||||
// perform the copy
|
||||
backupDirectory(indexRootDir, tempDir, targetDir);
|
||||
return null;
|
||||
}
|
||||
catch (Throwable e)
|
||||
{
|
||||
logger.error("Failed to release index lock for store " + storeRef, e);
|
||||
throw new AlfrescoRuntimeException(
|
||||
"Failed to copy Lucene index root: \n"
|
||||
+ " Index root: " + factory.getIndexRootLocation() + "\n" + " Target: "
|
||||
+ targetDir, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Backed up Lucene indexes: \n" +
|
||||
" Target directory: " + targetDir);
|
||||
logger.debug("Backed up Lucene indexes: \n" + " Target directory: " + targetDir);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Makes a backup of the source directory via a temporary folder
|
||||
*/
|
||||
@ -1085,14 +1069,15 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
{
|
||||
/** KEY_LUCENE_INDEX_BACKUP_COMPONENT = 'luceneIndexBackupComponent' */
|
||||
public static final String KEY_LUCENE_INDEX_BACKUP_COMPONENT = "luceneIndexBackupComponent";
|
||||
|
||||
|
||||
/**
|
||||
* Locks the Lucene indexes and copies them to a backup location
|
||||
*/
|
||||
public void execute(JobExecutionContext context) throws JobExecutionException
|
||||
{
|
||||
JobDataMap jobData = context.getJobDetail().getJobDataMap();
|
||||
LuceneIndexBackupComponent backupComponent = (LuceneIndexBackupComponent) jobData.get(KEY_LUCENE_INDEX_BACKUP_COMPONENT);
|
||||
LuceneIndexBackupComponent backupComponent = (LuceneIndexBackupComponent) jobData
|
||||
.get(KEY_LUCENE_INDEX_BACKUP_COMPONENT);
|
||||
if (backupComponent == null)
|
||||
{
|
||||
throw new JobExecutionException("Missing job data: " + KEY_LUCENE_INDEX_BACKUP_COMPONENT);
|
||||
@ -1101,4 +1086,108 @@ public class LuceneIndexerAndSearcherFactory2 implements LuceneIndexerAndSearche
|
||||
backupComponent.backup();
|
||||
}
|
||||
}
|
||||
|
||||
public <R> R doWithAllWriteLocks(WithAllWriteLocksWork<R> lockWork)
|
||||
{
|
||||
// get all the available stores
|
||||
List<StoreRef> storeRefs = nodeService.getStores();
|
||||
|
||||
IndexInfo.LockWork<R> currentLockWork = null;
|
||||
|
||||
for (int i = storeRefs.size() - 1; i >= 0; i--)
|
||||
{
|
||||
if (currentLockWork == null)
|
||||
{
|
||||
currentLockWork = new CoreLockWork<R>(getIndexer(storeRefs.get(i)), lockWork);
|
||||
}
|
||||
else
|
||||
{
|
||||
currentLockWork = new NestingLockWork<R>(getIndexer(storeRefs.get(i)), currentLockWork);
|
||||
}
|
||||
}
|
||||
|
||||
if (currentLockWork != null)
|
||||
{
|
||||
try
|
||||
{
|
||||
return currentLockWork.doWork();
|
||||
}
|
||||
catch (Throwable exception)
|
||||
{
|
||||
|
||||
// Re-throw the exception
|
||||
if (exception instanceof RuntimeException)
|
||||
{
|
||||
throw (RuntimeException) exception;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new RuntimeException("Error during run with lock.", exception);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static class NestingLockWork<R> implements IndexInfo.LockWork<R>
|
||||
{
|
||||
IndexInfo.LockWork<R> lockWork;
|
||||
|
||||
LuceneIndexer2 indexer;
|
||||
|
||||
NestingLockWork(LuceneIndexer2 indexer, IndexInfo.LockWork<R> lockWork)
|
||||
{
|
||||
this.indexer = indexer;
|
||||
this.lockWork = lockWork;
|
||||
}
|
||||
|
||||
public R doWork() throws Exception
|
||||
{
|
||||
return indexer.doWithWriteLock(lockWork);
|
||||
}
|
||||
}
|
||||
|
||||
private static class CoreLockWork<R> implements IndexInfo.LockWork<R>
|
||||
{
|
||||
WithAllWriteLocksWork<R> lockWork;
|
||||
|
||||
LuceneIndexer2 indexer;
|
||||
|
||||
CoreLockWork(LuceneIndexer2 indexer, WithAllWriteLocksWork<R> lockWork)
|
||||
{
|
||||
this.indexer = indexer;
|
||||
this.lockWork = lockWork;
|
||||
}
|
||||
|
||||
public R doWork() throws Exception
|
||||
{
|
||||
return indexer.doWithWriteLock(new IndexInfo.LockWork<R>()
|
||||
{
|
||||
public R doWork()
|
||||
{
|
||||
try
|
||||
{
|
||||
return lockWork.doWork();
|
||||
}
|
||||
catch (Throwable exception)
|
||||
{
|
||||
|
||||
// Re-throw the exception
|
||||
if (exception instanceof RuntimeException)
|
||||
{
|
||||
throw (RuntimeException) exception;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new RuntimeException("Error during run with lock.", exception);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -43,6 +43,7 @@ import org.alfresco.repo.search.IndexerException;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FTSIndexerAware;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
|
||||
import org.alfresco.repo.search.impl.lucene.index.TransactionStatus;
|
||||
import org.alfresco.repo.search.impl.lucene.index.IndexInfo.LockWork;
|
||||
import org.alfresco.service.cmr.dictionary.AspectDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
@ -1970,5 +1971,9 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
@ -236,7 +236,7 @@ public class LuceneSearcherImpl extends LuceneBase implements LuceneSearcher
|
||||
switch (sd.getSortType())
|
||||
{
|
||||
case FIELD:
|
||||
fields[index++] = new SortField(sd.getField(), !sd.isAscending());
|
||||
fields[index++] = new SortField(sd.getField(), SortField.STRING, !sd.isAscending());
|
||||
break;
|
||||
case DOCUMENT:
|
||||
fields[index++] = new SortField(null, SortField.DOC, !sd.isAscending());
|
||||
|
@ -41,6 +41,7 @@ import org.alfresco.repo.dictionary.M2Model;
|
||||
import org.alfresco.repo.node.BaseNodeServiceTest;
|
||||
import org.alfresco.repo.search.QueryParameterDefImpl;
|
||||
import org.alfresco.repo.search.QueryRegisterComponent;
|
||||
import org.alfresco.repo.search.impl.lucene.analysis.NumericEncoder;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
|
||||
import org.alfresco.repo.search.results.ChildAssocRefResultSet;
|
||||
import org.alfresco.repo.search.results.DetachedResultSet;
|
||||
@ -59,6 +60,7 @@ import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.Path;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
|
||||
import org.alfresco.service.cmr.repository.datatype.Duration;
|
||||
import org.alfresco.service.cmr.search.QueryParameter;
|
||||
import org.alfresco.service.cmr.search.QueryParameterDefinition;
|
||||
import org.alfresco.service.cmr.search.ResultSet;
|
||||
@ -93,6 +95,16 @@ public class LuceneTest extends TestCase
|
||||
|
||||
private static Log logger = LogFactory.getLog(LuceneTest.class);
|
||||
|
||||
QName createdDate = QName.createQName(TEST_NAMESPACE, "createdDate");
|
||||
|
||||
QName orderDouble = QName.createQName(TEST_NAMESPACE, "orderDouble");
|
||||
|
||||
QName orderFloat = QName.createQName(TEST_NAMESPACE, "orderFloat");
|
||||
|
||||
QName orderLong = QName.createQName(TEST_NAMESPACE, "orderLong");
|
||||
|
||||
QName orderInt = QName.createQName(TEST_NAMESPACE, "orderInt");
|
||||
|
||||
TransactionService transactionService;
|
||||
|
||||
NodeService nodeService;
|
||||
@ -179,7 +191,6 @@ public class LuceneTest extends TestCase
|
||||
serviceRegistry = (ServiceRegistry) ctx.getBean(ServiceRegistry.SERVICE_REGISTRY);
|
||||
|
||||
this.authenticationComponent = (AuthenticationComponent) ctx.getBean("authenticationComponent");
|
||||
|
||||
|
||||
queryRegisterComponent.loadQueryCollection("testQueryRegister.xml");
|
||||
|
||||
@ -189,7 +200,7 @@ public class LuceneTest extends TestCase
|
||||
testTX = transactionService.getUserTransaction();
|
||||
testTX.begin();
|
||||
this.authenticationComponent.setSystemUserAsCurrentUser();
|
||||
|
||||
|
||||
// load in the test model
|
||||
ClassLoader cl = BaseNodeServiceTest.class.getClassLoader();
|
||||
InputStream modelStream = cl.getResourceAsStream("org/alfresco/repo/search/impl/lucene/LuceneTest_model.xml");
|
||||
@ -201,16 +212,16 @@ public class LuceneTest extends TestCase
|
||||
rootNodeRef = nodeService.getRootNode(storeRef);
|
||||
|
||||
n1 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}one"),
|
||||
testSuperType).getChildRef();
|
||||
testSuperType, getOrderProperties()).getChildRef();
|
||||
nodeService.setProperty(n1, QName.createQName("{namespace}property-1"), "ValueOne");
|
||||
|
||||
n2 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}two"),
|
||||
testSuperType).getChildRef();
|
||||
testSuperType, getOrderProperties()).getChildRef();
|
||||
nodeService.setProperty(n2, QName.createQName("{namespace}property-1"), "valueone");
|
||||
nodeService.setProperty(n2, QName.createQName("{namespace}property-2"), "valuetwo");
|
||||
|
||||
n3 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}three"),
|
||||
testSuperType).getChildRef();
|
||||
testSuperType, getOrderProperties()).getChildRef();
|
||||
|
||||
ObjectOutputStream oos;
|
||||
try
|
||||
@ -270,24 +281,24 @@ public class LuceneTest extends TestCase
|
||||
nodeService.getProperties(n3);
|
||||
nodeService.getProperties(n4);
|
||||
|
||||
n5 = nodeService.createNode(n1, ASSOC_TYPE_QNAME, QName.createQName("{namespace}five"), testSuperType)
|
||||
.getChildRef();
|
||||
n6 = nodeService.createNode(n1, ASSOC_TYPE_QNAME, QName.createQName("{namespace}six"), testSuperType)
|
||||
.getChildRef();
|
||||
n7 = nodeService.createNode(n2, ASSOC_TYPE_QNAME, QName.createQName("{namespace}seven"), testSuperType)
|
||||
.getChildRef();
|
||||
n8 = nodeService.createNode(n2, ASSOC_TYPE_QNAME, QName.createQName("{namespace}eight-2"), testSuperType)
|
||||
.getChildRef();
|
||||
n9 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}nine"), testSuperType)
|
||||
.getChildRef();
|
||||
n10 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}ten"), testSuperType)
|
||||
.getChildRef();
|
||||
n11 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}eleven"), testSuperType)
|
||||
.getChildRef();
|
||||
n12 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}twelve"), testSuperType)
|
||||
.getChildRef();
|
||||
n13 = nodeService.createNode(n12, ASSOC_TYPE_QNAME, QName.createQName("{namespace}thirteen"), testSuperType)
|
||||
.getChildRef();
|
||||
n5 = nodeService.createNode(n1, ASSOC_TYPE_QNAME, QName.createQName("{namespace}five"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n6 = nodeService.createNode(n1, ASSOC_TYPE_QNAME, QName.createQName("{namespace}six"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n7 = nodeService.createNode(n2, ASSOC_TYPE_QNAME, QName.createQName("{namespace}seven"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n8 = nodeService.createNode(n2, ASSOC_TYPE_QNAME, QName.createQName("{namespace}eight-2"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n9 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}nine"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n10 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}ten"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n11 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}eleven"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n12 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}twelve"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n13 = nodeService.createNode(n12, ASSOC_TYPE_QNAME, QName.createQName("{namespace}thirteen"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
|
||||
Map<QName, Serializable> properties = new HashMap<QName, Serializable>();
|
||||
properties.put(ContentModel.PROP_CONTENT, new ContentData(null, "text/plain", 0L, "UTF-16"));
|
||||
@ -317,6 +328,32 @@ public class LuceneTest extends TestCase
|
||||
|
||||
}
|
||||
|
||||
private double orderDoubleCount = -0.11d;
|
||||
|
||||
private Date orderDate = new Date();
|
||||
|
||||
private float orderFloatCount = -3.5556f;
|
||||
|
||||
private long orderLongCount = -1999999999999999l;
|
||||
|
||||
private int orderIntCount = -45764576;
|
||||
|
||||
public Map<QName, Serializable> getOrderProperties()
|
||||
{
|
||||
Map<QName, Serializable> testProperties = new HashMap<QName, Serializable>();
|
||||
testProperties.put(createdDate, orderDate);
|
||||
testProperties.put(orderDouble, orderDoubleCount);
|
||||
testProperties.put(orderFloat, orderFloatCount);
|
||||
testProperties.put(orderLong, orderLongCount);
|
||||
testProperties.put(orderInt, orderIntCount);
|
||||
orderDate = Duration.subtract(orderDate, new Duration("P1D"));
|
||||
orderDoubleCount += 0.1d;
|
||||
orderFloatCount += 0.82f;
|
||||
orderLongCount += 299999999999999l;
|
||||
orderIntCount += 8576457;
|
||||
return testProperties;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void tearDown() throws Exception
|
||||
{
|
||||
@ -342,7 +379,7 @@ public class LuceneTest extends TestCase
|
||||
luceneFTS.resume();
|
||||
}
|
||||
|
||||
public void testDeleteIssue() throws Exception
|
||||
public void xtestDeleteIssue() throws Exception
|
||||
{
|
||||
|
||||
testTX.commit();
|
||||
@ -391,7 +428,7 @@ public class LuceneTest extends TestCase
|
||||
tx3.commit();
|
||||
}
|
||||
|
||||
public void testMTDeleteIssue() throws Exception
|
||||
public void xtestMTDeleteIssue() throws Exception
|
||||
{
|
||||
luceneFTS.pause();
|
||||
testTX.commit();
|
||||
@ -794,6 +831,226 @@ public class LuceneTest extends TestCase
|
||||
}
|
||||
results.close();
|
||||
|
||||
// sort by created date
|
||||
|
||||
SearchParameters sp7 = new SearchParameters();
|
||||
sp7.addStore(rootNodeRef.getStoreRef());
|
||||
sp7.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp7.setQuery("PATH:\"//.\"");
|
||||
sp7.addSort("@" + createdDate, true);
|
||||
results = searcher.query(sp7);
|
||||
|
||||
Date date = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Date currentBun = DefaultTypeConverter.INSTANCE.convert(Date.class, nodeService.getProperty(row
|
||||
.getNodeRef(), createdDate));
|
||||
//System.out.println(currentBun);
|
||||
if (date != null)
|
||||
{
|
||||
assertTrue(date.compareTo(currentBun) <= 0);
|
||||
}
|
||||
date = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
SearchParameters sp8 = new SearchParameters();
|
||||
sp8.addStore(rootNodeRef.getStoreRef());
|
||||
sp8.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp8.setQuery("PATH:\"//.\"");
|
||||
sp8.addSort("@" + createdDate, false);
|
||||
results = searcher.query(sp8);
|
||||
|
||||
date = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Date currentBun = DefaultTypeConverter.INSTANCE.convert(Date.class, nodeService.getProperty(row
|
||||
.getNodeRef(), createdDate));
|
||||
//System.out.println(currentBun);
|
||||
if ((date != null) && (currentBun != null))
|
||||
{
|
||||
assertTrue(date.compareTo(currentBun) >= 0);
|
||||
}
|
||||
date = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
// sort by double
|
||||
|
||||
SearchParameters sp9 = new SearchParameters();
|
||||
sp9.addStore(rootNodeRef.getStoreRef());
|
||||
sp9.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp9.setQuery("PATH:\"//.\"");
|
||||
sp9.addSort("@" + orderDouble, true);
|
||||
results = searcher.query(sp9);
|
||||
|
||||
Double d = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Double currentBun = DefaultTypeConverter.INSTANCE.convert(Double.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderDouble));
|
||||
//System.out.println( (currentBun == null ? "null" : NumericEncoder.encode(currentBun))+ " "+currentBun);
|
||||
if (d != null)
|
||||
{
|
||||
assertTrue(d.compareTo(currentBun) <= 0);
|
||||
}
|
||||
d = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
SearchParameters sp10 = new SearchParameters();
|
||||
sp10.addStore(rootNodeRef.getStoreRef());
|
||||
sp10.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp10.setQuery("PATH:\"//.\"");
|
||||
sp10.addSort("@" + orderDouble, false);
|
||||
results = searcher.query(sp10);
|
||||
|
||||
d = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Double currentBun = DefaultTypeConverter.INSTANCE.convert(Double.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderDouble));
|
||||
//System.out.println(currentBun);
|
||||
if ((d != null) && (currentBun != null))
|
||||
{
|
||||
assertTrue(d.compareTo(currentBun) >= 0);
|
||||
}
|
||||
d = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
// sort by float
|
||||
|
||||
SearchParameters sp11 = new SearchParameters();
|
||||
sp11.addStore(rootNodeRef.getStoreRef());
|
||||
sp11.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp11.setQuery("PATH:\"//.\"");
|
||||
sp11.addSort("@" + orderFloat, true);
|
||||
results = searcher.query(sp11);
|
||||
|
||||
Float f = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Float currentBun = DefaultTypeConverter.INSTANCE.convert(Float.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderFloat));
|
||||
//System.out.println( (currentBun == null ? "null" : NumericEncoder.encode(currentBun))+ " "+currentBun);
|
||||
if (f != null)
|
||||
{
|
||||
assertTrue(f.compareTo(currentBun) <= 0);
|
||||
}
|
||||
f = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
SearchParameters sp12 = new SearchParameters();
|
||||
sp12.addStore(rootNodeRef.getStoreRef());
|
||||
sp12.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp12.setQuery("PATH:\"//.\"");
|
||||
sp12.addSort("@" + orderFloat, false);
|
||||
results = searcher.query(sp12);
|
||||
|
||||
f = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Float currentBun = DefaultTypeConverter.INSTANCE.convert(Float.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderFloat));
|
||||
//System.out.println(currentBun);
|
||||
if ((f != null) && (currentBun != null))
|
||||
{
|
||||
assertTrue(f.compareTo(currentBun) >= 0);
|
||||
}
|
||||
f = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
// sort by long
|
||||
|
||||
SearchParameters sp13 = new SearchParameters();
|
||||
sp13.addStore(rootNodeRef.getStoreRef());
|
||||
sp13.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp13.setQuery("PATH:\"//.\"");
|
||||
sp13.addSort("@" + orderLong, true);
|
||||
results = searcher.query(sp13);
|
||||
|
||||
Long l = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Long currentBun = DefaultTypeConverter.INSTANCE.convert(Long.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderLong));
|
||||
//System.out.println( (currentBun == null ? "null" : NumericEncoder.encode(currentBun))+ " "+currentBun);
|
||||
if (l != null)
|
||||
{
|
||||
assertTrue(l.compareTo(currentBun) <= 0);
|
||||
}
|
||||
l = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
SearchParameters sp14 = new SearchParameters();
|
||||
sp14.addStore(rootNodeRef.getStoreRef());
|
||||
sp14.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp14.setQuery("PATH:\"//.\"");
|
||||
sp14.addSort("@" + orderLong, false);
|
||||
results = searcher.query(sp14);
|
||||
|
||||
l = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Long currentBun = DefaultTypeConverter.INSTANCE.convert(Long.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderLong));
|
||||
//System.out.println(currentBun);
|
||||
if ((l != null) && (currentBun != null))
|
||||
{
|
||||
assertTrue(l.compareTo(currentBun) >= 0);
|
||||
}
|
||||
l = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
// sort by int
|
||||
|
||||
SearchParameters sp15 = new SearchParameters();
|
||||
sp15.addStore(rootNodeRef.getStoreRef());
|
||||
sp15.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp15.setQuery("PATH:\"//.\"");
|
||||
sp15.addSort("@" + orderInt, true);
|
||||
results = searcher.query(sp15);
|
||||
|
||||
Integer i = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Integer currentBun = DefaultTypeConverter.INSTANCE.convert(Integer.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderInt));
|
||||
//System.out.println( (currentBun == null ? "null" : NumericEncoder.encode(currentBun))+ " "+currentBun);
|
||||
if (i != null)
|
||||
{
|
||||
assertTrue(i.compareTo(currentBun) <= 0);
|
||||
}
|
||||
i = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
SearchParameters sp16 = new SearchParameters();
|
||||
sp16.addStore(rootNodeRef.getStoreRef());
|
||||
sp16.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp16.setQuery("PATH:\"//.\"");
|
||||
sp16.addSort("@" + orderInt, false);
|
||||
results = searcher.query(sp16);
|
||||
|
||||
i = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Integer currentBun = DefaultTypeConverter.INSTANCE.convert(Integer.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderInt));
|
||||
//System.out.println(currentBun);
|
||||
if ((i != null) && (currentBun != null))
|
||||
{
|
||||
assertTrue(i.compareTo(currentBun) >= 0);
|
||||
}
|
||||
i = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
luceneFTS.resume();
|
||||
}
|
||||
|
||||
@ -2078,7 +2335,7 @@ public class LuceneTest extends TestCase
|
||||
assertEquals(1, results.length());
|
||||
results.close();
|
||||
}
|
||||
|
||||
|
||||
public void testNumericInPath() throws Exception
|
||||
{
|
||||
String COMPLEX_LOCAL_NAME = "Woof12";
|
||||
@ -2860,7 +3117,7 @@ public class LuceneTest extends TestCase
|
||||
luceneFTS.resume();
|
||||
}
|
||||
|
||||
public void testIssueAR47() throws Exception
|
||||
public void xtestIssueAR47() throws Exception
|
||||
{
|
||||
// This bug arose from repeated deletes and adds creating empty index
|
||||
// segments.
|
||||
|
@ -59,6 +59,7 @@ import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.Path;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
|
||||
import org.alfresco.service.cmr.repository.datatype.Duration;
|
||||
import org.alfresco.service.cmr.search.QueryParameter;
|
||||
import org.alfresco.service.cmr.search.QueryParameterDefinition;
|
||||
import org.alfresco.service.cmr.search.ResultSet;
|
||||
@ -93,6 +94,16 @@ public class LuceneTest2 extends TestCase
|
||||
|
||||
private static Log logger = LogFactory.getLog(LuceneTest2.class);
|
||||
|
||||
QName createdDate = QName.createQName(TEST_NAMESPACE, "createdDate");
|
||||
|
||||
QName orderDouble = QName.createQName(TEST_NAMESPACE, "orderDouble");
|
||||
|
||||
QName orderFloat = QName.createQName(TEST_NAMESPACE, "orderFloat");
|
||||
|
||||
QName orderLong = QName.createQName(TEST_NAMESPACE, "orderLong");
|
||||
|
||||
QName orderInt = QName.createQName(TEST_NAMESPACE, "orderInt");
|
||||
|
||||
TransactionService transactionService;
|
||||
|
||||
NodeService nodeService;
|
||||
@ -201,16 +212,16 @@ public class LuceneTest2 extends TestCase
|
||||
rootNodeRef = nodeService.getRootNode(storeRef);
|
||||
|
||||
n1 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}one"),
|
||||
testSuperType).getChildRef();
|
||||
testSuperType, getOrderProperties()).getChildRef();
|
||||
nodeService.setProperty(n1, QName.createQName("{namespace}property-1"), "ValueOne");
|
||||
|
||||
n2 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}two"),
|
||||
testSuperType).getChildRef();
|
||||
testSuperType, getOrderProperties()).getChildRef();
|
||||
nodeService.setProperty(n2, QName.createQName("{namespace}property-1"), "valueone");
|
||||
nodeService.setProperty(n2, QName.createQName("{namespace}property-2"), "valuetwo");
|
||||
|
||||
n3 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}three"),
|
||||
testSuperType).getChildRef();
|
||||
testSuperType, getOrderProperties()).getChildRef();
|
||||
|
||||
ObjectOutputStream oos;
|
||||
try
|
||||
@ -270,24 +281,24 @@ public class LuceneTest2 extends TestCase
|
||||
nodeService.getProperties(n3);
|
||||
nodeService.getProperties(n4);
|
||||
|
||||
n5 = nodeService.createNode(n1, ASSOC_TYPE_QNAME, QName.createQName("{namespace}five"), testSuperType)
|
||||
.getChildRef();
|
||||
n6 = nodeService.createNode(n1, ASSOC_TYPE_QNAME, QName.createQName("{namespace}six"), testSuperType)
|
||||
.getChildRef();
|
||||
n7 = nodeService.createNode(n2, ASSOC_TYPE_QNAME, QName.createQName("{namespace}seven"), testSuperType)
|
||||
.getChildRef();
|
||||
n8 = nodeService.createNode(n2, ASSOC_TYPE_QNAME, QName.createQName("{namespace}eight-2"), testSuperType)
|
||||
.getChildRef();
|
||||
n9 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}nine"), testSuperType)
|
||||
.getChildRef();
|
||||
n10 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}ten"), testSuperType)
|
||||
.getChildRef();
|
||||
n11 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}eleven"), testSuperType)
|
||||
.getChildRef();
|
||||
n12 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}twelve"), testSuperType)
|
||||
.getChildRef();
|
||||
n13 = nodeService.createNode(n12, ASSOC_TYPE_QNAME, QName.createQName("{namespace}thirteen"), testSuperType)
|
||||
.getChildRef();
|
||||
n5 = nodeService.createNode(n1, ASSOC_TYPE_QNAME, QName.createQName("{namespace}five"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n6 = nodeService.createNode(n1, ASSOC_TYPE_QNAME, QName.createQName("{namespace}six"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n7 = nodeService.createNode(n2, ASSOC_TYPE_QNAME, QName.createQName("{namespace}seven"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n8 = nodeService.createNode(n2, ASSOC_TYPE_QNAME, QName.createQName("{namespace}eight-2"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n9 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}nine"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n10 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}ten"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n11 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}eleven"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n12 = nodeService.createNode(n5, ASSOC_TYPE_QNAME, QName.createQName("{namespace}twelve"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
n13 = nodeService.createNode(n12, ASSOC_TYPE_QNAME, QName.createQName("{namespace}thirteen"), testSuperType,
|
||||
getOrderProperties()).getChildRef();
|
||||
|
||||
Map<QName, Serializable> properties = new HashMap<QName, Serializable>();
|
||||
properties.put(ContentModel.PROP_CONTENT, new ContentData(null, "text/plain", 0L, "UTF-16"));
|
||||
@ -317,6 +328,32 @@ public class LuceneTest2 extends TestCase
|
||||
|
||||
}
|
||||
|
||||
private double orderDoubleCount = -0.11d;
|
||||
|
||||
private Date orderDate = new Date();
|
||||
|
||||
private float orderFloatCount = -3.5556f;
|
||||
|
||||
private long orderLongCount = -1999999999999999l;
|
||||
|
||||
private int orderIntCount = -45764576;
|
||||
|
||||
public Map<QName, Serializable> getOrderProperties()
|
||||
{
|
||||
Map<QName, Serializable> testProperties = new HashMap<QName, Serializable>();
|
||||
testProperties.put(createdDate, orderDate);
|
||||
testProperties.put(orderDouble, orderDoubleCount);
|
||||
testProperties.put(orderFloat, orderFloatCount);
|
||||
testProperties.put(orderLong, orderLongCount);
|
||||
testProperties.put(orderInt, orderIntCount);
|
||||
orderDate = Duration.subtract(orderDate, new Duration("P1D"));
|
||||
orderDoubleCount += 0.1d;
|
||||
orderFloatCount += 0.82f;
|
||||
orderLongCount += 299999999999999l;
|
||||
orderIntCount += 8576457;
|
||||
return testProperties;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void tearDown() throws Exception
|
||||
{
|
||||
@ -794,6 +831,226 @@ public class LuceneTest2 extends TestCase
|
||||
}
|
||||
results.close();
|
||||
|
||||
// sort by created date
|
||||
|
||||
SearchParameters sp7 = new SearchParameters();
|
||||
sp7.addStore(rootNodeRef.getStoreRef());
|
||||
sp7.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp7.setQuery("PATH:\"//.\"");
|
||||
sp7.addSort("@" + createdDate, true);
|
||||
results = searcher.query(sp7);
|
||||
|
||||
Date date = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Date currentBun = DefaultTypeConverter.INSTANCE.convert(Date.class, nodeService.getProperty(row
|
||||
.getNodeRef(), createdDate));
|
||||
//System.out.println(currentBun);
|
||||
if (date != null)
|
||||
{
|
||||
assertTrue(date.compareTo(currentBun) <= 0);
|
||||
}
|
||||
date = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
SearchParameters sp8 = new SearchParameters();
|
||||
sp8.addStore(rootNodeRef.getStoreRef());
|
||||
sp8.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp8.setQuery("PATH:\"//.\"");
|
||||
sp8.addSort("@" + createdDate, false);
|
||||
results = searcher.query(sp8);
|
||||
|
||||
date = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Date currentBun = DefaultTypeConverter.INSTANCE.convert(Date.class, nodeService.getProperty(row
|
||||
.getNodeRef(), createdDate));
|
||||
//System.out.println(currentBun);
|
||||
if ((date != null) && (currentBun != null))
|
||||
{
|
||||
assertTrue(date.compareTo(currentBun) >= 0);
|
||||
}
|
||||
date = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
// sort by double
|
||||
|
||||
SearchParameters sp9 = new SearchParameters();
|
||||
sp9.addStore(rootNodeRef.getStoreRef());
|
||||
sp9.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp9.setQuery("PATH:\"//.\"");
|
||||
sp9.addSort("@" + orderDouble, true);
|
||||
results = searcher.query(sp9);
|
||||
|
||||
Double d = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Double currentBun = DefaultTypeConverter.INSTANCE.convert(Double.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderDouble));
|
||||
//System.out.println( (currentBun == null ? "null" : NumericEncoder.encode(currentBun))+ " "+currentBun);
|
||||
if (d != null)
|
||||
{
|
||||
assertTrue(d.compareTo(currentBun) <= 0);
|
||||
}
|
||||
d = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
SearchParameters sp10 = new SearchParameters();
|
||||
sp10.addStore(rootNodeRef.getStoreRef());
|
||||
sp10.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp10.setQuery("PATH:\"//.\"");
|
||||
sp10.addSort("@" + orderDouble, false);
|
||||
results = searcher.query(sp10);
|
||||
|
||||
d = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Double currentBun = DefaultTypeConverter.INSTANCE.convert(Double.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderDouble));
|
||||
//System.out.println(currentBun);
|
||||
if ((d != null) && (currentBun != null))
|
||||
{
|
||||
assertTrue(d.compareTo(currentBun) >= 0);
|
||||
}
|
||||
d = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
// sort by float
|
||||
|
||||
SearchParameters sp11 = new SearchParameters();
|
||||
sp11.addStore(rootNodeRef.getStoreRef());
|
||||
sp11.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp11.setQuery("PATH:\"//.\"");
|
||||
sp11.addSort("@" + orderFloat, true);
|
||||
results = searcher.query(sp11);
|
||||
|
||||
Float f = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Float currentBun = DefaultTypeConverter.INSTANCE.convert(Float.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderFloat));
|
||||
//System.out.println( (currentBun == null ? "null" : NumericEncoder.encode(currentBun))+ " "+currentBun);
|
||||
if (f != null)
|
||||
{
|
||||
assertTrue(f.compareTo(currentBun) <= 0);
|
||||
}
|
||||
f = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
SearchParameters sp12 = new SearchParameters();
|
||||
sp12.addStore(rootNodeRef.getStoreRef());
|
||||
sp12.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp12.setQuery("PATH:\"//.\"");
|
||||
sp12.addSort("@" + orderFloat, false);
|
||||
results = searcher.query(sp12);
|
||||
|
||||
f = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Float currentBun = DefaultTypeConverter.INSTANCE.convert(Float.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderFloat));
|
||||
//System.out.println(currentBun);
|
||||
if ((f != null) && (currentBun != null))
|
||||
{
|
||||
assertTrue(f.compareTo(currentBun) >= 0);
|
||||
}
|
||||
f = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
// sort by long
|
||||
|
||||
SearchParameters sp13 = new SearchParameters();
|
||||
sp13.addStore(rootNodeRef.getStoreRef());
|
||||
sp13.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp13.setQuery("PATH:\"//.\"");
|
||||
sp13.addSort("@" + orderLong, true);
|
||||
results = searcher.query(sp13);
|
||||
|
||||
Long l = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Long currentBun = DefaultTypeConverter.INSTANCE.convert(Long.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderLong));
|
||||
//System.out.println( (currentBun == null ? "null" : NumericEncoder.encode(currentBun))+ " "+currentBun);
|
||||
if (l != null)
|
||||
{
|
||||
assertTrue(l.compareTo(currentBun) <= 0);
|
||||
}
|
||||
l = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
SearchParameters sp14 = new SearchParameters();
|
||||
sp14.addStore(rootNodeRef.getStoreRef());
|
||||
sp14.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp14.setQuery("PATH:\"//.\"");
|
||||
sp14.addSort("@" + orderLong, false);
|
||||
results = searcher.query(sp14);
|
||||
|
||||
l = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Long currentBun = DefaultTypeConverter.INSTANCE.convert(Long.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderLong));
|
||||
//System.out.println(currentBun);
|
||||
if ((l != null) && (currentBun != null))
|
||||
{
|
||||
assertTrue(l.compareTo(currentBun) >= 0);
|
||||
}
|
||||
l = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
// sort by int
|
||||
|
||||
SearchParameters sp15 = new SearchParameters();
|
||||
sp15.addStore(rootNodeRef.getStoreRef());
|
||||
sp15.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp15.setQuery("PATH:\"//.\"");
|
||||
sp15.addSort("@" + orderInt, true);
|
||||
results = searcher.query(sp15);
|
||||
|
||||
Integer i = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Integer currentBun = DefaultTypeConverter.INSTANCE.convert(Integer.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderInt));
|
||||
//System.out.println( (currentBun == null ? "null" : NumericEncoder.encode(currentBun))+ " "+currentBun);
|
||||
if (i != null)
|
||||
{
|
||||
assertTrue(i.compareTo(currentBun) <= 0);
|
||||
}
|
||||
i = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
SearchParameters sp16 = new SearchParameters();
|
||||
sp16.addStore(rootNodeRef.getStoreRef());
|
||||
sp16.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp16.setQuery("PATH:\"//.\"");
|
||||
sp16.addSort("@" + orderInt, false);
|
||||
results = searcher.query(sp16);
|
||||
|
||||
i = null;
|
||||
for (ResultSetRow row : results)
|
||||
{
|
||||
Integer currentBun = DefaultTypeConverter.INSTANCE.convert(Integer.class, nodeService.getProperty(row
|
||||
.getNodeRef(), orderInt));
|
||||
//System.out.println(currentBun);
|
||||
if ((i != null) && (currentBun != null))
|
||||
{
|
||||
assertTrue(i.compareTo(currentBun) >= 0);
|
||||
}
|
||||
i = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
luceneFTS.resume();
|
||||
}
|
||||
|
||||
|
@ -14,165 +14,217 @@
|
||||
<namespace uri="http://www.alfresco.org/test/lucenetest" prefix="test"/>
|
||||
</namespaces>
|
||||
|
||||
<types>
|
||||
<type name="test:testSuperType">
|
||||
<title>Test Super Type</title>
|
||||
<parent>sys:container</parent>
|
||||
<associations>
|
||||
<child-association name="test:assoc">
|
||||
<source>
|
||||
<mandatory>false</mandatory>
|
||||
<many>true</many>
|
||||
</source>
|
||||
<target>
|
||||
<class>sys:base</class>
|
||||
<mandatory>false</mandatory>
|
||||
<many>true</many>
|
||||
</target>
|
||||
</child-association>
|
||||
</associations>
|
||||
</type>
|
||||
|
||||
<type name="test:testType">
|
||||
<title>Test Type</title>
|
||||
<parent>test:testSuperType</parent>
|
||||
<properties>
|
||||
<property name="test:text-indexed-stored-tokenised-atomic">
|
||||
<type>d:text</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:text-indexed-unstored-tokenised-atomic">
|
||||
<type>d:text</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>false</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:text-indexed-stored-tokenised-nonatomic">
|
||||
<type>d:text</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>false</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:int-ista">
|
||||
<type>d:int</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:long-ista">
|
||||
<type>d:long</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:float-ista">
|
||||
<type>d:float</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:double-ista">
|
||||
<type>d:double</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:date-ista">
|
||||
<type>d:date</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:datetime-ista">
|
||||
<type>d:datetime</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:boolean-ista">
|
||||
<type>d:boolean</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:qname-ista">
|
||||
<type>d:qname</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:category-ista">
|
||||
<type>d:category</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:noderef-ista">
|
||||
<type>d:noderef</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
</properties>
|
||||
<mandatory-aspects>
|
||||
<aspect>test:testAspect</aspect>
|
||||
</mandatory-aspects>
|
||||
</type>
|
||||
</types>
|
||||
<types>
|
||||
<type name="test:testSuperType">
|
||||
<title>Test Super Type</title>
|
||||
<parent>sys:container</parent>
|
||||
<properties>
|
||||
<property name="test:createdDate">
|
||||
<type>d:date</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:orderDouble">
|
||||
<type>d:double</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:orderFloat">
|
||||
<type>d:float</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:orderLong">
|
||||
<type>d:long</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:orderInt">
|
||||
<type>d:int</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
</properties>
|
||||
<associations>
|
||||
<child-association name="test:assoc">
|
||||
<source>
|
||||
<mandatory>false</mandatory>
|
||||
<many>true</many>
|
||||
</source>
|
||||
<target>
|
||||
<class>sys:base</class>
|
||||
<mandatory>false</mandatory>
|
||||
<many>true</many>
|
||||
</target>
|
||||
</child-association>
|
||||
</associations>
|
||||
</type>
|
||||
|
||||
<type name="test:testType">
|
||||
<title>Test Type</title>
|
||||
<parent>test:testSuperType</parent>
|
||||
<properties>
|
||||
<property name="test:text-indexed-stored-tokenised-atomic">
|
||||
<type>d:text</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:text-indexed-unstored-tokenised-atomic">
|
||||
<type>d:text</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>false</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:text-indexed-stored-tokenised-nonatomic">
|
||||
<type>d:text</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>false</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:int-ista">
|
||||
<type>d:int</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:long-ista">
|
||||
<type>d:long</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:float-ista">
|
||||
<type>d:float</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:double-ista">
|
||||
<type>d:double</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:date-ista">
|
||||
<type>d:date</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:datetime-ista">
|
||||
<type>d:datetime</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:boolean-ista">
|
||||
<type>d:boolean</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:qname-ista">
|
||||
<type>d:qname</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:category-ista">
|
||||
<type>d:category</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="test:noderef-ista">
|
||||
<type>d:noderef</type>
|
||||
<mandatory>true</mandatory>
|
||||
<multiple>false</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>true</stored>
|
||||
<tokenised>true</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
</properties>
|
||||
<mandatory-aspects>
|
||||
<aspect>test:testAspect</aspect>
|
||||
</mandatory-aspects>
|
||||
</type>
|
||||
</types>
|
||||
|
||||
<aspects>
|
||||
<aspect name="test:testSuperAspect">
|
||||
|
@ -21,7 +21,7 @@ import java.io.Reader;
|
||||
|
||||
import org.apache.lucene.analysis.Token;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.apache.lucene.analysis.standard.StandardTokenizer;
|
||||
import org.apache.lucene.analysis.WhitespaceTokenizer;
|
||||
|
||||
/**
|
||||
* Simple tokeniser for doubles.
|
||||
@ -35,7 +35,7 @@ public class DoubleTokenFilter extends Tokenizer
|
||||
public DoubleTokenFilter(Reader in)
|
||||
{
|
||||
super(in);
|
||||
baseTokeniser = new StandardTokenizer(in);
|
||||
baseTokeniser = new WhitespaceTokenizer(in);
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -21,6 +21,7 @@ import java.io.Reader;
|
||||
|
||||
import org.apache.lucene.analysis.Token;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.apache.lucene.analysis.WhitespaceTokenizer;
|
||||
import org.apache.lucene.analysis.standard.StandardTokenizer;
|
||||
|
||||
/**
|
||||
@ -35,7 +36,7 @@ public class FloatTokenFilter extends Tokenizer
|
||||
public FloatTokenFilter(Reader in)
|
||||
{
|
||||
super(in);
|
||||
baseTokeniser = new StandardTokenizer(in);
|
||||
baseTokeniser = new WhitespaceTokenizer(in);
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -21,6 +21,7 @@ import java.io.Reader;
|
||||
|
||||
import org.apache.lucene.analysis.Token;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.apache.lucene.analysis.WhitespaceTokenizer;
|
||||
import org.apache.lucene.analysis.standard.StandardTokenizer;
|
||||
|
||||
/**
|
||||
@ -35,7 +36,7 @@ public class IntegerTokenFilter extends Tokenizer
|
||||
public IntegerTokenFilter(Reader in)
|
||||
{
|
||||
super(in);
|
||||
baseTokeniser = new StandardTokenizer(in);
|
||||
baseTokeniser = new WhitespaceTokenizer(in);
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -22,6 +22,7 @@ import java.io.Reader;
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.apache.lucene.analysis.Token;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.apache.lucene.analysis.WhitespaceTokenizer;
|
||||
import org.apache.lucene.analysis.standard.StandardTokenizer;
|
||||
|
||||
/**
|
||||
@ -36,7 +37,7 @@ public class LongTokenFilter extends Tokenizer
|
||||
public LongTokenFilter(Reader in)
|
||||
{
|
||||
super(in);
|
||||
baseTokeniser = new StandardTokenizer(in);
|
||||
baseTokeniser = new WhitespaceTokenizer(in);
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -181,6 +181,12 @@ public class NumericEncodingTest extends TestCase
|
||||
assertEquals("ffefffffffffffff", NumericEncoder.encode(Double.MAX_VALUE));
|
||||
assertEquals("fff0000000000000", NumericEncoder.encode(Double.POSITIVE_INFINITY));
|
||||
assertEquals("fff8000000000000", NumericEncoder.encode(Double.NaN));
|
||||
|
||||
assertTrue( NumericEncoder.encode(-0.9).compareTo(NumericEncoder.encode(0.88)) < 0);
|
||||
assertTrue( NumericEncoder.encode(-0.9).compareTo(NumericEncoder.encode(0.91)) < 0);
|
||||
assertTrue( NumericEncoder.encode(0.88).compareTo(NumericEncoder.encode(0.91)) < 0);
|
||||
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -396,12 +396,12 @@ public class IndexInfo
|
||||
cleanerThread = new Thread(cleaner);
|
||||
cleanerThread.setDaemon(true);
|
||||
cleanerThread.setName("Index cleaner thread");
|
||||
cleanerThread.start();
|
||||
//cleanerThread.start();
|
||||
|
||||
mergerThread = new Thread(merger);
|
||||
mergerThread.setDaemon(true);
|
||||
mergerThread.setName("Index merger thread");
|
||||
mergerThread.start();
|
||||
//mergerThread.start();
|
||||
|
||||
IndexWriter writer;
|
||||
try
|
||||
@ -1633,11 +1633,24 @@ public class IndexInfo
|
||||
return size;
|
||||
}
|
||||
|
||||
private interface LockWork<Result>
|
||||
public interface LockWork<Result>
|
||||
{
|
||||
public Result doWork() throws Exception;
|
||||
}
|
||||
|
||||
public <R> R doWithWriteLock(LockWork<R> lockWork)
|
||||
{
|
||||
getWriteLock();
|
||||
try
|
||||
{
|
||||
return doWithFileLock(lockWork);
|
||||
}
|
||||
finally
|
||||
{
|
||||
releaseWriteLock();
|
||||
}
|
||||
}
|
||||
|
||||
private <R> R doWithFileLock(LockWork<R> lockWork)
|
||||
{
|
||||
FileLock fileLock = null;
|
||||
@ -1683,66 +1696,92 @@ public class IndexInfo
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws IOException
|
||||
public static void main(String[] args)
|
||||
{
|
||||
System.setProperty("disableLuceneLocks", "true");
|
||||
|
||||
HashSet<NodeRef> deletions = new HashSet<NodeRef>();
|
||||
for (int i = 0; i < 0; i++)
|
||||
{
|
||||
deletions.add(new NodeRef(new StoreRef("woof", "bingle"), GUID.generate()));
|
||||
}
|
||||
|
||||
int repeat = 100;
|
||||
int docs = 1;
|
||||
final IndexInfo ii = new IndexInfo(new File("c:\\indexTest"));
|
||||
|
||||
long totalTimeA = 0;
|
||||
long countA = 0;
|
||||
|
||||
String indexLocation = args[0];
|
||||
IndexInfo ii = new IndexInfo(new File(indexLocation));
|
||||
while (true)
|
||||
{
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < repeat; i++)
|
||||
ii.readWriteLock.writeLock().lock();
|
||||
try
|
||||
{
|
||||
String guid = GUID.generate();
|
||||
ii.setStatus(guid, TransactionStatus.ACTIVE, null, null);
|
||||
IndexWriter writer = ii.getDeltaIndexWriter(guid, new StandardAnalyzer());
|
||||
|
||||
for (int j = 0; j < docs; j++)
|
||||
System.out.println("Entry List for " + indexLocation);
|
||||
System.out.println(" Size = " + ii.indexEntries.size());
|
||||
int i = 0;
|
||||
for (IndexEntry entry : ii.indexEntries.values())
|
||||
{
|
||||
Document doc = new Document();
|
||||
for (int k = 0; k < 15; k++)
|
||||
{
|
||||
doc.add(new Field("ID" + k, guid + " " + j + " " + k, false, true, false));
|
||||
}
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
|
||||
ii.closeDeltaIndexWriter(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARING, null, null);
|
||||
ii.setPreparedState(guid, deletions, docs, false);
|
||||
ii.getDeletions(guid);
|
||||
ii.setStatus(guid, TransactionStatus.PREPARED, null, null);
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTING, null, null);
|
||||
ii.setStatus(guid, TransactionStatus.COMMITTED, null, null);
|
||||
for (int j = 0; j < 0; j++)
|
||||
{
|
||||
ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
System.out.println("\t" + (i++) + "\t" + entry.toString());
|
||||
}
|
||||
}
|
||||
|
||||
long end = System.nanoTime();
|
||||
|
||||
totalTimeA += (end - start);
|
||||
countA += repeat;
|
||||
float average = countA * 1000000000f / totalTimeA;
|
||||
|
||||
System.out.println("Repeated "
|
||||
+ repeat + " in " + ((end - start) / 1000000000.0) + " average = " + average);
|
||||
finally
|
||||
{
|
||||
ii.releaseWriteLock();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// public static void main(String[] args) throws IOException
|
||||
|
||||
// {
|
||||
// System.setProperty("disableLuceneLocks", "true");
|
||||
//
|
||||
// HashSet<NodeRef> deletions = new HashSet<NodeRef>();
|
||||
// for (int i = 0; i < 0; i++)
|
||||
// {
|
||||
// deletions.add(new NodeRef(new StoreRef("woof", "bingle"), GUID.generate()));
|
||||
// }
|
||||
//
|
||||
// int repeat = 100;
|
||||
// int docs = 1;
|
||||
// final IndexInfo ii = new IndexInfo(new File("c:\\indexTest"));
|
||||
//
|
||||
// long totalTimeA = 0;
|
||||
// long countA = 0;
|
||||
//
|
||||
// while (true)
|
||||
// {
|
||||
// long start = System.nanoTime();
|
||||
// for (int i = 0; i < repeat; i++)
|
||||
// {
|
||||
// String guid = GUID.generate();
|
||||
// ii.setStatus(guid, TransactionStatus.ACTIVE, null, null);
|
||||
// IndexWriter writer = ii.getDeltaIndexWriter(guid, new StandardAnalyzer());
|
||||
//
|
||||
// for (int j = 0; j < docs; j++)
|
||||
// {
|
||||
// Document doc = new Document();
|
||||
// for (int k = 0; k < 15; k++)
|
||||
// {
|
||||
// doc.add(new Field("ID" + k, guid + " " + j + " " + k, false, true, false));
|
||||
// }
|
||||
// writer.addDocument(doc);
|
||||
// }
|
||||
//
|
||||
// ii.closeDeltaIndexWriter(guid);
|
||||
// ii.setStatus(guid, TransactionStatus.PREPARING, null, null);
|
||||
// ii.setPreparedState(guid, deletions, docs, false);
|
||||
// ii.getDeletions(guid);
|
||||
// ii.setStatus(guid, TransactionStatus.PREPARED, null, null);
|
||||
// ii.setStatus(guid, TransactionStatus.COMMITTING, null, null);
|
||||
// ii.setStatus(guid, TransactionStatus.COMMITTED, null, null);
|
||||
// for (int j = 0; j < 0; j++)
|
||||
// {
|
||||
// ii.getMainIndexReferenceCountingReadOnlyIndexReader();
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// long end = System.nanoTime();
|
||||
//
|
||||
// totalTimeA += (end - start);
|
||||
// countA += repeat;
|
||||
// float average = countA * 1000000000f / totalTimeA;
|
||||
//
|
||||
// System.out.println("Repeated "
|
||||
// + repeat + " in " + ((end - start) / 1000000000.0) + " average = " + average);
|
||||
// }
|
||||
// }
|
||||
|
||||
/**
|
||||
* Clean up support.
|
||||
*
|
||||
@ -1862,120 +1901,132 @@ public class IndexInfo
|
||||
|
||||
while (running)
|
||||
{
|
||||
// Get the read local to decide what to do
|
||||
// Single JVM to start with
|
||||
MergeAction action = MergeAction.NONE;
|
||||
|
||||
getReadLock();
|
||||
try
|
||||
{
|
||||
if (indexIsShared && !checkVersion())
|
||||
{
|
||||
releaseReadLock();
|
||||
getWriteLock();
|
||||
try
|
||||
{
|
||||
// Sync with disk image if required
|
||||
doWithFileLock(new LockWork<Object>()
|
||||
{
|
||||
public Object doWork() throws Exception
|
||||
{
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
finally
|
||||
{
|
||||
getReadLock();
|
||||
releaseWriteLock();
|
||||
}
|
||||
}
|
||||
// Get the read local to decide what to do
|
||||
// Single JVM to start with
|
||||
MergeAction action = MergeAction.NONE;
|
||||
|
||||
int indexes = 0;
|
||||
boolean mergingIndexes = false;
|
||||
int deltas = 0;
|
||||
boolean applyingDeletions = false;
|
||||
|
||||
for (IndexEntry entry : indexEntries.values())
|
||||
{
|
||||
if (entry.getType() == IndexType.INDEX)
|
||||
{
|
||||
indexes++;
|
||||
if (entry.getStatus() == TransactionStatus.MERGE)
|
||||
{
|
||||
mergingIndexes = true;
|
||||
}
|
||||
}
|
||||
else if (entry.getType() == IndexType.DELTA)
|
||||
{
|
||||
if (entry.getStatus() == TransactionStatus.COMMITTED)
|
||||
{
|
||||
deltas++;
|
||||
}
|
||||
if (entry.getStatus() == TransactionStatus.COMMITTED_DELETING)
|
||||
{
|
||||
applyingDeletions = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug("Indexes = " + indexes);
|
||||
s_logger.debug("Merging = " + mergingIndexes);
|
||||
s_logger.debug("Deltas = " + deltas);
|
||||
s_logger.debug("Deleting = " + applyingDeletions);
|
||||
}
|
||||
|
||||
if (!mergingIndexes && !applyingDeletions)
|
||||
{
|
||||
|
||||
if ((indexes > 5) || (deltas > 5))
|
||||
{
|
||||
if (indexes > deltas)
|
||||
{
|
||||
// Try merge
|
||||
action = MergeAction.MERGE_INDEX;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Try delete
|
||||
action = MergeAction.APPLY_DELTA_DELETION;
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
catch (IOException e)
|
||||
{
|
||||
e.printStackTrace();
|
||||
// Ignore IO error and retry
|
||||
}
|
||||
finally
|
||||
{
|
||||
releaseReadLock();
|
||||
}
|
||||
|
||||
if (action == MergeAction.APPLY_DELTA_DELETION)
|
||||
{
|
||||
mergeDeletions();
|
||||
}
|
||||
else if (action == MergeAction.MERGE_INDEX)
|
||||
{
|
||||
mergeIndexes();
|
||||
}
|
||||
|
||||
synchronized (this)
|
||||
{
|
||||
getReadLock();
|
||||
try
|
||||
{
|
||||
this.wait();
|
||||
if (indexIsShared && !checkVersion())
|
||||
{
|
||||
releaseReadLock();
|
||||
getWriteLock();
|
||||
try
|
||||
{
|
||||
// Sync with disk image if required
|
||||
doWithFileLock(new LockWork<Object>()
|
||||
{
|
||||
public Object doWork() throws Exception
|
||||
{
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
getReadLock();
|
||||
}
|
||||
finally
|
||||
{
|
||||
releaseWriteLock();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int indexes = 0;
|
||||
boolean mergingIndexes = false;
|
||||
int deltas = 0;
|
||||
boolean applyingDeletions = false;
|
||||
|
||||
for (IndexEntry entry : indexEntries.values())
|
||||
{
|
||||
if (entry.getType() == IndexType.INDEX)
|
||||
{
|
||||
indexes++;
|
||||
if (entry.getStatus() == TransactionStatus.MERGE)
|
||||
{
|
||||
mergingIndexes = true;
|
||||
}
|
||||
}
|
||||
else if (entry.getType() == IndexType.DELTA)
|
||||
{
|
||||
if (entry.getStatus() == TransactionStatus.COMMITTED)
|
||||
{
|
||||
deltas++;
|
||||
}
|
||||
if (entry.getStatus() == TransactionStatus.COMMITTED_DELETING)
|
||||
{
|
||||
applyingDeletions = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
s_logger.debug("Indexes = " + indexes);
|
||||
s_logger.debug("Merging = " + mergingIndexes);
|
||||
s_logger.debug("Deltas = " + deltas);
|
||||
s_logger.debug("Deleting = " + applyingDeletions);
|
||||
}
|
||||
|
||||
if (!mergingIndexes && !applyingDeletions)
|
||||
{
|
||||
|
||||
if ((indexes > 5) || (deltas > 5))
|
||||
{
|
||||
if (indexes > deltas)
|
||||
{
|
||||
// Try merge
|
||||
action = MergeAction.MERGE_INDEX;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Try delete
|
||||
action = MergeAction.APPLY_DELTA_DELETION;
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (InterruptedException e)
|
||||
|
||||
catch (IOException e)
|
||||
{
|
||||
running = false;
|
||||
s_logger.error(e);
|
||||
}
|
||||
finally
|
||||
{
|
||||
releaseReadLock();
|
||||
}
|
||||
|
||||
if (action == MergeAction.APPLY_DELTA_DELETION)
|
||||
{
|
||||
mergeDeletions();
|
||||
}
|
||||
else if (action == MergeAction.MERGE_INDEX)
|
||||
{
|
||||
mergeIndexes();
|
||||
}
|
||||
|
||||
synchronized (this)
|
||||
{
|
||||
try
|
||||
{
|
||||
this.wait();
|
||||
}
|
||||
catch (InterruptedException e)
|
||||
{
|
||||
// No action - could signal thread termination
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Throwable t)
|
||||
{
|
||||
s_logger.error(t);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2151,7 +2202,7 @@ public class IndexInfo
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
e.printStackTrace();
|
||||
s_logger.error(e);
|
||||
fail = true;
|
||||
}
|
||||
|
||||
@ -2411,9 +2462,9 @@ public class IndexInfo
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (IOException e)
|
||||
catch (Throwable e)
|
||||
{
|
||||
e.printStackTrace();
|
||||
s_logger.error(e);
|
||||
fail = true;
|
||||
}
|
||||
|
||||
@ -2528,10 +2579,10 @@ public class IndexInfo
|
||||
|
||||
private void dumpInfo()
|
||||
{
|
||||
readWriteLock.writeLock().lock();
|
||||
try
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
if (s_logger.isDebugEnabled())
|
||||
readWriteLock.writeLock().lock();
|
||||
try
|
||||
{
|
||||
s_logger.debug("");
|
||||
s_logger.debug("Entry List");
|
||||
@ -2540,10 +2591,10 @@ public class IndexInfo
|
||||
s_logger.debug(" " + entry.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
readWriteLock.writeLock().unlock();
|
||||
finally
|
||||
{
|
||||
readWriteLock.writeLock().unlock();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -2584,4 +2635,8 @@ public class IndexInfo
|
||||
readWriteLock.readLock().unlock();
|
||||
}
|
||||
|
||||
public String toString()
|
||||
{
|
||||
return indexDirectory.toString();
|
||||
}
|
||||
}
|
||||
|
@ -283,6 +283,13 @@ public class PermissionServiceImpl implements PermissionServiceSPI, Initializing
|
||||
{
|
||||
return authorityType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString()
|
||||
{
|
||||
return accessStatus + " " + this.permission + " - " +
|
||||
this.authority + " (" + this.authorityType + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o)
|
||||
|
@ -27,19 +27,26 @@ import org.alfresco.repo.security.permissions.PermissionEntry;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.security.AccessPermission;
|
||||
import org.alfresco.service.cmr.security.AccessStatus;
|
||||
import org.alfresco.service.cmr.security.AuthorityType;
|
||||
import org.alfresco.service.cmr.security.PermissionService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
|
||||
public class PermissionServiceTest extends AbstractPermissionTest
|
||||
{
|
||||
private SimplePermissionEntry denyAndyAll;
|
||||
|
||||
private SimplePermissionEntry allowAndyAll;
|
||||
|
||||
private SimplePermissionEntry denyAndyRead;
|
||||
|
||||
private SimplePermissionEntry allowAndyRead;
|
||||
|
||||
private SimplePermissionEntry denyAndyReadProperties;
|
||||
|
||||
private SimplePermissionEntry allowAndyReadProperties;
|
||||
|
||||
private SimplePermissionEntry allowAndyReadChildren;
|
||||
|
||||
|
||||
public PermissionServiceTest()
|
||||
{
|
||||
super();
|
||||
@ -59,46 +66,80 @@ public class PermissionServiceTest extends AbstractPermissionTest
|
||||
}
|
||||
fail("Missing role ROLE_AUTHENTICATED ");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void onSetUpInTransaction() throws Exception
|
||||
{
|
||||
super.onSetUpInTransaction();
|
||||
denyAndyAll = new SimplePermissionEntry(
|
||||
rootNodeRef,
|
||||
permissionService.getAllPermissionReference(),
|
||||
"andy",
|
||||
denyAndyAll = new SimplePermissionEntry(rootNodeRef, permissionService.getAllPermissionReference(), "andy",
|
||||
AccessStatus.DENIED);
|
||||
allowAndyAll = new SimplePermissionEntry(
|
||||
rootNodeRef,
|
||||
permissionService.getAllPermissionReference(),
|
||||
"andy",
|
||||
allowAndyAll = new SimplePermissionEntry(rootNodeRef, permissionService.getAllPermissionReference(), "andy",
|
||||
AccessStatus.ALLOWED);
|
||||
denyAndyRead = new SimplePermissionEntry(
|
||||
rootNodeRef,
|
||||
getPermission(PermissionService.READ),
|
||||
"andy",
|
||||
denyAndyRead = new SimplePermissionEntry(rootNodeRef, getPermission(PermissionService.READ), "andy",
|
||||
AccessStatus.DENIED);
|
||||
allowAndyRead = new SimplePermissionEntry(
|
||||
rootNodeRef,
|
||||
getPermission(PermissionService.READ),
|
||||
"andy",
|
||||
AccessStatus.ALLOWED);
|
||||
denyAndyReadProperties = new SimplePermissionEntry(
|
||||
rootNodeRef,
|
||||
getPermission(PermissionService.READ_PROPERTIES),
|
||||
"andy",
|
||||
AccessStatus.DENIED);
|
||||
allowAndyReadProperties = new SimplePermissionEntry(
|
||||
rootNodeRef,
|
||||
getPermission(PermissionService.READ_PROPERTIES),
|
||||
"andy",
|
||||
AccessStatus.ALLOWED);
|
||||
allowAndyReadChildren = new SimplePermissionEntry(
|
||||
rootNodeRef,
|
||||
getPermission(PermissionService.READ_CHILDREN),
|
||||
"andy",
|
||||
allowAndyRead = new SimplePermissionEntry(rootNodeRef, getPermission(PermissionService.READ), "andy",
|
||||
AccessStatus.ALLOWED);
|
||||
denyAndyReadProperties = new SimplePermissionEntry(rootNodeRef,
|
||||
getPermission(PermissionService.READ_PROPERTIES), "andy", AccessStatus.DENIED);
|
||||
allowAndyReadProperties = new SimplePermissionEntry(rootNodeRef,
|
||||
getPermission(PermissionService.READ_PROPERTIES), "andy", AccessStatus.ALLOWED);
|
||||
allowAndyReadChildren = new SimplePermissionEntry(rootNodeRef, getPermission(PermissionService.READ_CHILDREN),
|
||||
"andy", AccessStatus.ALLOWED);
|
||||
}
|
||||
|
||||
public void testGetAllSetPermissions()
|
||||
{
|
||||
runAs("andy");
|
||||
permissionService.setPermission(new SimplePermissionEntry(rootNodeRef, getPermission(PermissionService.DELETE),
|
||||
"andy", AccessStatus.ALLOWED));
|
||||
permissionService.setPermission(new SimplePermissionEntry(rootNodeRef, getPermission(PermissionService.DELETE),
|
||||
"GROUP_GREEN", AccessStatus.ALLOWED));
|
||||
permissionService.setPermission(new SimplePermissionEntry(rootNodeRef, getPermission(PermissionService.READ),
|
||||
"andy", AccessStatus.ALLOWED));
|
||||
permissionService.setPermission(new SimplePermissionEntry(rootNodeRef, getPermission(PermissionService.READ),
|
||||
"GROUP_RED", AccessStatus.ALLOWED));
|
||||
permissionService.setPermission(new SimplePermissionEntry(systemNodeRef,
|
||||
getPermission(PermissionService.DELETE), "andy", AccessStatus.DENIED));
|
||||
permissionService.setPermission(new SimplePermissionEntry(systemNodeRef,
|
||||
getPermission(PermissionService.DELETE), "GROUP_GREEN", AccessStatus.DENIED));
|
||||
|
||||
NodeRef current = systemNodeRef;
|
||||
Set<AccessPermission> setPermissions = new HashSet<AccessPermission>();
|
||||
while (current != null)
|
||||
{
|
||||
Set<AccessPermission> morePermissions = permissionService.getAllSetPermissions(current);
|
||||
for (AccessPermission toTest : morePermissions)
|
||||
{
|
||||
if (toTest.getAuthorityType() == AuthorityType.GROUP)
|
||||
{
|
||||
boolean add = true;
|
||||
for (AccessPermission existing : setPermissions)
|
||||
{
|
||||
if (add
|
||||
&& existing.getAuthority().equals(toTest.getAuthority())
|
||||
&& existing.getPermission().equals(toTest.getPermission()))
|
||||
{
|
||||
add = false;
|
||||
}
|
||||
|
||||
}
|
||||
if (add)
|
||||
{
|
||||
setPermissions.add(toTest);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (permissionService.getInheritParentPermissions(current))
|
||||
{
|
||||
current = nodeService.getPrimaryParent(current).getParentRef();
|
||||
}
|
||||
else
|
||||
{
|
||||
current = null;
|
||||
}
|
||||
}
|
||||
assertEquals(2, setPermissions.size());
|
||||
|
||||
}
|
||||
|
||||
public void testSetInheritFalse()
|
||||
@ -259,7 +300,7 @@ public class PermissionServiceTest extends AbstractPermissionTest
|
||||
assertTrue(permissionService.getSetPermissions(rootNodeRef).inheritPermissions());
|
||||
assertEquals(rootNodeRef, permissionService.getSetPermissions(rootNodeRef).getNodeRef());
|
||||
assertEquals(1, permissionService.getSetPermissions(rootNodeRef).getPermissionEntries().size());
|
||||
|
||||
|
||||
// remove other-all
|
||||
permissionService.deletePermission(rootNodeRef, "other", permissionService.getAllPermission());
|
||||
assertNotNull(permissionService.getSetPermissions(rootNodeRef));
|
||||
@ -576,7 +617,7 @@ public class PermissionServiceTest extends AbstractPermissionTest
|
||||
assertEquals(1, permissionService.getAllSetPermissions(rootNodeRef).size());
|
||||
assertTrue(permissionService.hasPermission(rootNodeRef, getPermission(PermissionService.READ_PROPERTIES)) == AccessStatus.ALLOWED);
|
||||
// Changed ny not enfocing READ
|
||||
//assertFalse(permissionService.hasPermission(n1, getPermission(PermissionService.READ_PROPERTIES)) == AccessStatus.ALLOWED);
|
||||
// assertFalse(permissionService.hasPermission(n1, getPermission(PermissionService.READ_PROPERTIES)) == AccessStatus.ALLOWED);
|
||||
assertTrue(permissionService.hasPermission(n1, getPermission(PermissionService.READ_PROPERTIES)) == AccessStatus.ALLOWED);
|
||||
runAs("lemur");
|
||||
assertFalse(permissionService.hasPermission(rootNodeRef, getPermission(PermissionService.READ_PROPERTIES)) == AccessStatus.ALLOWED);
|
||||
@ -850,9 +891,9 @@ public class PermissionServiceTest extends AbstractPermissionTest
|
||||
{
|
||||
runAs("admin");
|
||||
|
||||
//TransactionService transactionService = serviceRegistry.getTransactionService();
|
||||
//UserTransaction tx = transactionService.getUserTransaction();
|
||||
//tx.begin();
|
||||
// TransactionService transactionService = serviceRegistry.getTransactionService();
|
||||
// UserTransaction tx = transactionService.getUserTransaction();
|
||||
// tx.begin();
|
||||
|
||||
NodeRef n1 = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN,
|
||||
QName.createQName("{namespace}one"), ContentModel.TYPE_FOLDER).getChildRef();
|
||||
@ -890,7 +931,7 @@ public class PermissionServiceTest extends AbstractPermissionTest
|
||||
for (int i = 0; i < 1000; i++)
|
||||
{
|
||||
getSession().flush();
|
||||
//getSession().clear();
|
||||
// getSession().clear();
|
||||
start = System.nanoTime();
|
||||
assertTrue(permissionService.hasPermission(n10, getPermission(PermissionService.READ)) == AccessStatus.ALLOWED);
|
||||
end = System.nanoTime();
|
||||
@ -910,7 +951,7 @@ public class PermissionServiceTest extends AbstractPermissionTest
|
||||
System.out.println("Time is " + (time / 1000000000.0));
|
||||
// assertTrue((time / 1000000000.0) < 2.0);
|
||||
|
||||
//tx.rollback();
|
||||
// tx.rollback();
|
||||
}
|
||||
|
||||
public void testAllPermissions()
|
||||
@ -931,7 +972,7 @@ public class PermissionServiceTest extends AbstractPermissionTest
|
||||
assertFalse(permissionService.hasPermission(rootNodeRef, getPermission(PermissionService.READ_CONTENT)) == AccessStatus.ALLOWED);
|
||||
|
||||
assertEquals(0, permissionService.getAllSetPermissions(rootNodeRef).size());
|
||||
|
||||
|
||||
permissionService.setPermission(allowAndyAll);
|
||||
assertEquals(1, permissionService.getAllSetPermissions(rootNodeRef).size());
|
||||
runAs("andy");
|
||||
@ -983,7 +1024,7 @@ public class PermissionServiceTest extends AbstractPermissionTest
|
||||
assertFalse(permissionService.hasPermission(rootNodeRef, getPermission(PermissionService.READ_CHILDREN)) == AccessStatus.ALLOWED);
|
||||
assertFalse(permissionService.hasPermission(rootNodeRef, getPermission(PermissionService.READ_CONTENT)) == AccessStatus.ALLOWED);
|
||||
}
|
||||
|
||||
|
||||
public void testOldAllPermissions()
|
||||
{
|
||||
runAs("andy");
|
||||
@ -1002,7 +1043,8 @@ public class PermissionServiceTest extends AbstractPermissionTest
|
||||
assertFalse(permissionService.hasPermission(rootNodeRef, getPermission(PermissionService.READ_CONTENT)) == AccessStatus.ALLOWED);
|
||||
|
||||
assertEquals(0, permissionService.getAllSetPermissions(rootNodeRef).size());
|
||||
permissionService.setPermission(new SimplePermissionEntry(rootNodeRef, PermissionServiceImpl.OLD_ALL_PERMISSIONS_REFERENCE, "andy", AccessStatus.ALLOWED));
|
||||
permissionService.setPermission(new SimplePermissionEntry(rootNodeRef,
|
||||
PermissionServiceImpl.OLD_ALL_PERMISSIONS_REFERENCE, "andy", AccessStatus.ALLOWED));
|
||||
assertEquals(1, permissionService.getAllSetPermissions(rootNodeRef).size());
|
||||
runAs("andy");
|
||||
assertTrue(permissionService.hasPermission(rootNodeRef, getPermission(PermissionService.READ)) == AccessStatus.ALLOWED);
|
||||
@ -1423,12 +1465,10 @@ public class PermissionServiceTest extends AbstractPermissionTest
|
||||
|
||||
permissionService.setInheritParentPermissions(n2, true);
|
||||
|
||||
|
||||
|
||||
runAs("andy");
|
||||
assertFalse(permissionService.hasPermission(n2, getPermission(PermissionService.READ)) == AccessStatus.ALLOWED);
|
||||
// Changed by removing permission read parents access
|
||||
//assertFalse(permissionService.hasPermission(n2, getPermission(PermissionService.READ_PROPERTIES)) == AccessStatus.ALLOWED);
|
||||
// assertFalse(permissionService.hasPermission(n2, getPermission(PermissionService.READ_PROPERTIES)) == AccessStatus.ALLOWED);
|
||||
assertTrue(permissionService.hasPermission(n2, getPermission(PermissionService.READ_PROPERTIES)) == AccessStatus.ALLOWED);
|
||||
assertFalse(permissionService.hasPermission(n2, getPermission(PermissionService.READ_CHILDREN)) == AccessStatus.ALLOWED);
|
||||
assertTrue(permissionService.hasPermission(n2, getPermission(PermissionService.READ_CONTENT)) == AccessStatus.ALLOWED);
|
||||
@ -1701,8 +1741,6 @@ public class PermissionServiceTest extends AbstractPermissionTest
|
||||
|
||||
}
|
||||
|
||||
|
||||
// TODO: Test permissions on missing nodes
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
@ -802,25 +802,6 @@ public class VersionServiceImpl extends AbstractVersionServiceImpl
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the given node for the version aspect. Throws an exception if it is not present.
|
||||
*
|
||||
* @param nodeRef the node reference
|
||||
* @throws AspectMissingException
|
||||
* the version aspect is not present on the node
|
||||
*/
|
||||
private void checkForVersionAspect(NodeRef nodeRef)
|
||||
throws AspectMissingException
|
||||
{
|
||||
QName aspectRef = ContentModel.ASPECT_VERSIONABLE;
|
||||
|
||||
if (this.nodeService.hasAspect(nodeRef, aspectRef) == false)
|
||||
{
|
||||
// Raise exception to indicate version aspect is not present
|
||||
throw new AspectMissingException(aspectRef, nodeRef);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1089,14 +1070,19 @@ public class VersionServiceImpl extends AbstractVersionServiceImpl
|
||||
public void deleteVersionHistory(NodeRef nodeRef)
|
||||
throws AspectMissingException
|
||||
{
|
||||
// First check that the versionable aspect is present
|
||||
checkForVersionAspect(nodeRef);
|
||||
|
||||
// Get the version history node for the node is question and delete it
|
||||
// Get the version history node for the node is question and delete it
|
||||
NodeRef versionHistoryNodeRef = getVersionHistoryNodeRef(nodeRef);
|
||||
this.dbNodeService.deleteNode(versionHistoryNodeRef);
|
||||
|
||||
// Reset the version label property on the versionable node
|
||||
this.nodeService.setProperty(nodeRef, ContentModel.PROP_VERSION_LABEL, null);
|
||||
if (versionHistoryNodeRef != null)
|
||||
{
|
||||
// Delete the version history node
|
||||
this.dbNodeService.deleteNode(versionHistoryNodeRef);
|
||||
|
||||
if (this.nodeService.hasAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE) == true)
|
||||
{
|
||||
// Reset the version label property on the versionable node
|
||||
this.nodeService.setProperty(nodeRef, ContentModel.PROP_VERSION_LABEL, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -549,4 +549,64 @@ public class VersionServiceImplTest extends BaseVersionStoreTest
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
public void testAddRemoveVersionableAspect()
|
||||
{
|
||||
HashMap<QName, Serializable> props2 = new HashMap<QName, Serializable>();
|
||||
props2.put(ContentModel.PROP_NAME, "test.txt");
|
||||
final NodeRef nodeRef = this.dbNodeService.createNode(
|
||||
rootNodeRef,
|
||||
ContentModel.ASSOC_CHILDREN,
|
||||
QName.createQName("{test}MyVersionableNode2"),
|
||||
TEST_TYPE_QNAME,
|
||||
props2).getChildRef();
|
||||
this.dbNodeService.addAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE, null);
|
||||
|
||||
setComplete();
|
||||
endTransaction();
|
||||
|
||||
TransactionUtil.executeInUserTransaction(this.transactionService, new TransactionUtil.TransactionWork<Object>()
|
||||
{
|
||||
public Object doWork() throws Exception
|
||||
{
|
||||
// Check that the version history has been created
|
||||
VersionHistory versionHistory = VersionServiceImplTest.this.versionService.getVersionHistory(nodeRef);
|
||||
assertNotNull(versionHistory);
|
||||
assertEquals(1, versionHistory.getAllVersions().size());
|
||||
|
||||
// Remove the versionable aspect
|
||||
VersionServiceImplTest.this.dbNodeService.removeAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE);
|
||||
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
TransactionUtil.executeInUserTransaction(this.transactionService, new TransactionUtil.TransactionWork<Object>()
|
||||
{
|
||||
public Object doWork() throws Exception
|
||||
{
|
||||
// Check that the version history has been removed
|
||||
VersionHistory versionHistory = VersionServiceImplTest.this.versionService.getVersionHistory(nodeRef);
|
||||
assertNull(versionHistory);
|
||||
|
||||
// Re-add the versionable aspect
|
||||
VersionServiceImplTest.this.dbNodeService.addAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE, null);
|
||||
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
TransactionUtil.executeInUserTransaction(this.transactionService, new TransactionUtil.TransactionWork<Object>()
|
||||
{
|
||||
public Object doWork() throws Exception
|
||||
{
|
||||
// Check that the version history has been created
|
||||
VersionHistory versionHistory = VersionServiceImplTest.this.versionService.getVersionHistory(nodeRef);
|
||||
assertNotNull(versionHistory);
|
||||
assertEquals(1, versionHistory.getAllVersions().size());
|
||||
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -17,23 +17,23 @@
|
||||
package org.alfresco.repo.version;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.i18n.I18NUtil;
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.action.executer.CreateVersionActionExecuter;
|
||||
import org.alfresco.repo.content.ContentServicePolicies;
|
||||
import org.alfresco.repo.node.NodeServicePolicies;
|
||||
import org.alfresco.repo.policy.Behaviour;
|
||||
import org.alfresco.repo.policy.JavaBehaviour;
|
||||
import org.alfresco.repo.policy.PolicyComponent;
|
||||
import org.alfresco.repo.policy.PolicyScope;
|
||||
import org.alfresco.repo.rule.RuntimeRuleService;
|
||||
import org.alfresco.service.cmr.action.Action;
|
||||
import org.alfresco.service.cmr.action.ActionService;
|
||||
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.rule.Rule;
|
||||
import org.alfresco.service.cmr.rule.RuleService;
|
||||
import org.alfresco.service.cmr.version.Version;
|
||||
import org.alfresco.service.cmr.version.VersionService;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
|
||||
@ -42,36 +42,27 @@ import org.alfresco.service.namespace.QName;
|
||||
*
|
||||
* @author Roy Wetherall
|
||||
*/
|
||||
public class VersionableAspect implements ContentServicePolicies.OnContentUpdatePolicy
|
||||
public class VersionableAspect implements ContentServicePolicies.OnContentUpdatePolicy,
|
||||
NodeServicePolicies.OnAddAspectPolicy,
|
||||
NodeServicePolicies.OnRemoveAspectPolicy
|
||||
{
|
||||
/**
|
||||
* The policy component
|
||||
*/
|
||||
/** The i18n'ized messages */
|
||||
private static final String MSG_INITIAL_VERSION = "create_version.initial_version";
|
||||
private static final String MSG_AUTO_VERSION = "create_version.auto_version";
|
||||
|
||||
/** Transaction resource key */
|
||||
private static final String KEY_INITIAL_VERSION = "initial_version_";
|
||||
|
||||
/** The policy component */
|
||||
private PolicyComponent policyComponent;
|
||||
|
||||
/**
|
||||
* The node service
|
||||
*/
|
||||
/** The node service */
|
||||
private NodeService nodeService;
|
||||
|
||||
/**
|
||||
* The rule service
|
||||
*/
|
||||
private RuleService ruleService;
|
||||
|
||||
/**
|
||||
* The action service
|
||||
*/
|
||||
private ActionService actionService;
|
||||
|
||||
/**
|
||||
* The rule used to create versions
|
||||
*/
|
||||
private Rule rule;
|
||||
/** The Version service */
|
||||
private VersionService versionService;
|
||||
|
||||
/**
|
||||
* Auto version behaviour
|
||||
*/
|
||||
/** Auto version behaviour */
|
||||
private Behaviour autoVersionBehaviour;
|
||||
|
||||
/**
|
||||
@ -84,25 +75,15 @@ public class VersionableAspect implements ContentServicePolicies.OnContentUpdate
|
||||
this.policyComponent = policyComponent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the rule service
|
||||
*
|
||||
* @param ruleService the rule service
|
||||
*/
|
||||
public void setRuleService(RuleService ruleService)
|
||||
/**
|
||||
* Set the version service
|
||||
*
|
||||
* @param versionService the version service
|
||||
*/
|
||||
public void setVersionService(VersionService versionService)
|
||||
{
|
||||
this.ruleService = ruleService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the action service
|
||||
*
|
||||
* @param actionService the action service
|
||||
*/
|
||||
public void setActionService(ActionService actionService)
|
||||
{
|
||||
this.actionService = actionService;
|
||||
}
|
||||
this.versionService = versionService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the node service
|
||||
@ -122,8 +103,13 @@ public class VersionableAspect implements ContentServicePolicies.OnContentUpdate
|
||||
this.policyComponent.bindClassBehaviour(
|
||||
QName.createQName(NamespaceService.ALFRESCO_URI, "onAddAspect"),
|
||||
ContentModel.ASPECT_VERSIONABLE,
|
||||
new JavaBehaviour(this, "onAddAspect"));
|
||||
autoVersionBehaviour = new JavaBehaviour(this, "onContentUpdate");
|
||||
new JavaBehaviour(this, "onAddAspect", Behaviour.NotificationFrequency.TRANSACTION_COMMIT));
|
||||
this.policyComponent.bindClassBehaviour(
|
||||
QName.createQName(NamespaceService.ALFRESCO_URI, "onRemoveAspect"),
|
||||
ContentModel.ASPECT_VERSIONABLE,
|
||||
new JavaBehaviour(this, "onRemoveAspect", Behaviour.NotificationFrequency.TRANSACTION_COMMIT));
|
||||
|
||||
autoVersionBehaviour = new JavaBehaviour(this, "onContentUpdate", Behaviour.NotificationFrequency.TRANSACTION_COMMIT);
|
||||
this.policyComponent.bindClassBehaviour(
|
||||
ContentServicePolicies.ON_CONTENT_UPDATE,
|
||||
ContentModel.ASPECT_VERSIONABLE,
|
||||
@ -157,28 +143,7 @@ public class VersionableAspect implements ContentServicePolicies.OnContentUpdate
|
||||
ContentModel.ASPECT_VERSIONABLE,
|
||||
ContentModel.PROP_AUTO_VERSION,
|
||||
this.nodeService.getProperty(sourceNodeRef, ContentModel.PROP_AUTO_VERSION));
|
||||
}
|
||||
|
||||
/**
|
||||
* OnCreateVersion behaviour for the version aspect
|
||||
* <p>
|
||||
* Ensures that the version aspect and it proerties are 'frozen' as part of
|
||||
* the versioned state.
|
||||
*
|
||||
* @param classRef the class reference
|
||||
* @param versionableNode the versionable node reference
|
||||
* @param versionProperties the version properties
|
||||
* @param nodeDetails the details of the node to be versioned
|
||||
*/
|
||||
public void onCreateVersion(
|
||||
QName classRef,
|
||||
NodeRef versionableNode,
|
||||
Map<String, Serializable> versionProperties,
|
||||
PolicyScope nodeDetails)
|
||||
{
|
||||
// Do nothing since we do not what to freeze any of the version
|
||||
// properties
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
@ -187,7 +152,6 @@ public class VersionableAspect implements ContentServicePolicies.OnContentUpdate
|
||||
* @param nodeRef
|
||||
* @param aspectTypeQName
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public void onAddAspect(NodeRef nodeRef, QName aspectTypeQName)
|
||||
{
|
||||
if (aspectTypeQName.equals(ContentModel.ASPECT_VERSIONABLE) == true)
|
||||
@ -198,15 +162,29 @@ public class VersionableAspect implements ContentServicePolicies.OnContentUpdate
|
||||
{
|
||||
initialVersion = value.booleanValue();
|
||||
}
|
||||
// else this means that the default vlaue has not been set the versionable aspect we applied pre-1.2
|
||||
// else this means that the default value has not been set the versionable aspect we applied pre-1.2
|
||||
|
||||
if (initialVersion == true)
|
||||
{
|
||||
// Queue create version action
|
||||
queueCreateVersionAction(nodeRef);
|
||||
Map<String, Serializable> versionDetails = new HashMap<String, Serializable>(1);
|
||||
versionDetails.put(Version.PROP_DESCRIPTION, I18NUtil.getMessage(MSG_INITIAL_VERSION));
|
||||
this.versionService.createVersion(nodeRef, versionDetails);
|
||||
|
||||
// Keep track of the fact that the initial version has been created
|
||||
AlfrescoTransactionSupport.bindResource(KEY_INITIAL_VERSION + nodeRef.toString(), nodeRef);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.alfresco.repo.node.NodeServicePolicies.OnRemoveAspectPolicy#onRemoveAspect(org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.namespace.QName)
|
||||
*/
|
||||
public void onRemoveAspect(NodeRef nodeRef, QName aspectTypeQName)
|
||||
{
|
||||
// When the versionable aspect is removed from a node, then delete the associatied verison history
|
||||
this.versionService.deleteVersionHistory(nodeRef);
|
||||
}
|
||||
|
||||
/**
|
||||
* On content update policy bahaviour
|
||||
@ -217,21 +195,27 @@ public class VersionableAspect implements ContentServicePolicies.OnContentUpdate
|
||||
{
|
||||
if (this.nodeService.hasAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE) == true)
|
||||
{
|
||||
// Determine whether the node is auto versionable or not
|
||||
boolean autoVersion = false;
|
||||
Boolean value = (Boolean)this.nodeService.getProperty(nodeRef, ContentModel.PROP_AUTO_VERSION);
|
||||
if (value != null)
|
||||
{
|
||||
// If the value is not null then
|
||||
autoVersion = value.booleanValue();
|
||||
}
|
||||
// else this means that the default value has not been set and the versionable aspect was applied pre-1.1
|
||||
|
||||
if (autoVersion == true)
|
||||
{
|
||||
// Queue create version action
|
||||
queueCreateVersionAction(nodeRef);
|
||||
}
|
||||
// Determine whether we have already created an initial version during this transaction
|
||||
if (AlfrescoTransactionSupport.getResource(KEY_INITIAL_VERSION + nodeRef.toString()) == null)
|
||||
{
|
||||
// Determine whether the node is auto versionable or not
|
||||
boolean autoVersion = false;
|
||||
Boolean value = (Boolean)this.nodeService.getProperty(nodeRef, ContentModel.PROP_AUTO_VERSION);
|
||||
if (value != null)
|
||||
{
|
||||
// If the value is not null then
|
||||
autoVersion = value.booleanValue();
|
||||
}
|
||||
// else this means that the default value has not been set and the versionable aspect was applied pre-1.1
|
||||
|
||||
if (autoVersion == true)
|
||||
{
|
||||
// Create the auto-version
|
||||
Map<String, Serializable> versionProperties = new HashMap<String, Serializable>(1);
|
||||
versionProperties.put(Version.PROP_DESCRIPTION, I18NUtil.getMessage(MSG_AUTO_VERSION));
|
||||
this.versionService.createVersion(nodeRef, versionProperties);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -251,24 +235,5 @@ public class VersionableAspect implements ContentServicePolicies.OnContentUpdate
|
||||
public void disableAutoVersion()
|
||||
{
|
||||
this.autoVersionBehaviour.disable();
|
||||
}
|
||||
|
||||
/**
|
||||
* Queue create version action
|
||||
*
|
||||
* @param nodeRef the node reference
|
||||
*/
|
||||
private void queueCreateVersionAction(NodeRef nodeRef)
|
||||
{
|
||||
if (this.rule == null)
|
||||
{
|
||||
// Create the version action rule
|
||||
this.rule = this.ruleService.createRule("inbound");
|
||||
Action action = this.actionService.createAction(CreateVersionActionExecuter.NAME);
|
||||
this.rule.addAction(action);
|
||||
}
|
||||
|
||||
// Stash the rule pending execution at the end of the transaction
|
||||
((RuntimeRuleService)this.ruleService).addRulePendingExecution(nodeRef, nodeRef, this.rule, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,53 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Alfresco, Inc.
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.workflow.jbpm;
|
||||
|
||||
import org.jbpm.graph.def.ActionHandler;
|
||||
import org.springframework.beans.factory.BeanFactory;
|
||||
import org.springframework.beans.factory.access.BeanFactoryLocator;
|
||||
import org.springframework.beans.factory.access.BeanFactoryReference;
|
||||
import org.springmodules.workflow.jbpm31.JbpmFactoryLocator;
|
||||
|
||||
|
||||
/**
|
||||
* Abstract base implementation of a Jbpm Action Hander with access to
|
||||
* Alfresco Spring beans.
|
||||
*
|
||||
* @author davidc
|
||||
*/
|
||||
public abstract class JBPMSpringActionHandler implements ActionHandler
|
||||
{
|
||||
|
||||
/**
|
||||
* Construct
|
||||
*/
|
||||
protected JBPMSpringActionHandler()
|
||||
{
|
||||
// The following implementation is derived from Spring Modules v0.4
|
||||
BeanFactoryLocator factoryLocator = new JbpmFactoryLocator();
|
||||
BeanFactoryReference factory = factoryLocator.useBeanFactory(null);
|
||||
initialiseHandler(factory.getFactory());
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialise Action Handler
|
||||
*
|
||||
* @param factory Spring bean factory for accessing Alfresco beans
|
||||
*/
|
||||
protected abstract void initialiseHandler(BeanFactory factory);
|
||||
|
||||
}
|
201
source/java/org/alfresco/repo/workflow/jbpm/JBPMSpringTest.java
Normal file
201
source/java/org/alfresco/repo/workflow/jbpm/JBPMSpringTest.java
Normal file
@ -0,0 +1,201 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Alfresco, Inc.
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.workflow.jbpm;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.service.descriptor.DescriptorService;
|
||||
import org.alfresco.util.BaseSpringTest;
|
||||
import org.jbpm.JbpmContext;
|
||||
import org.jbpm.db.GraphSession;
|
||||
import org.jbpm.graph.def.ProcessDefinition;
|
||||
import org.jbpm.graph.exe.ProcessInstance;
|
||||
import org.jbpm.graph.exe.Token;
|
||||
import org.springmodules.workflow.jbpm31.JbpmCallback;
|
||||
import org.springmodules.workflow.jbpm31.JbpmTemplate;
|
||||
|
||||
|
||||
/**
|
||||
* Test Usage of jBPM within Alfresco Spring Context
|
||||
*
|
||||
* @author davidc
|
||||
*/
|
||||
public class JBPMSpringTest extends BaseSpringTest
|
||||
{
|
||||
JbpmTemplate jbpmTemplate;
|
||||
DescriptorService descriptorService;
|
||||
|
||||
|
||||
//@Override
|
||||
protected void xonSetUpInTransaction() throws Exception
|
||||
{
|
||||
jbpmTemplate = (JbpmTemplate)applicationContext.getBean("jbpm.template");
|
||||
descriptorService = (DescriptorService)applicationContext.getBean("DescriptorService");
|
||||
}
|
||||
|
||||
public void testDummy()
|
||||
{
|
||||
}
|
||||
|
||||
public void xtestHelloWorld()
|
||||
throws Exception
|
||||
{
|
||||
// Between the 3 method calls below, all data is passed via the
|
||||
// database. Here, in this unit test, these 3 methods are executed
|
||||
// right after each other because we want to test a complete process
|
||||
// scenario. But in reality, these methods represent different
|
||||
// requests to a server.
|
||||
|
||||
// Since we start with a clean, empty in-memory database, we have to
|
||||
// deploy the process first. In reality, this is done once by the
|
||||
// process developer.
|
||||
deployProcessDefinition();
|
||||
|
||||
// Suppose we want to start a process instance (=process execution)
|
||||
// when a user submits a form in a web application...
|
||||
processInstanceIsCreatedWhenUserSubmitsWebappForm();
|
||||
|
||||
// Then, later, upon the arrival of an asynchronous message the
|
||||
// execution must continue.
|
||||
theProcessInstanceContinuesWhenAnAsyncMessageIsReceived();
|
||||
}
|
||||
|
||||
public void xtestStep0()
|
||||
throws Exception
|
||||
{
|
||||
deployProcessDefinition();
|
||||
setComplete();
|
||||
}
|
||||
|
||||
public void xtestStep1()
|
||||
throws Exception
|
||||
{
|
||||
processInstanceIsCreatedWhenUserSubmitsWebappForm();
|
||||
setComplete();
|
||||
}
|
||||
|
||||
public void xtestStep2()
|
||||
throws Exception
|
||||
{
|
||||
theProcessInstanceContinuesWhenAnAsyncMessageIsReceived();
|
||||
setComplete();
|
||||
}
|
||||
|
||||
private void deployProcessDefinition()
|
||||
{
|
||||
// This test shows a process definition and one execution
|
||||
// of the process definition. The process definition has
|
||||
// 3 nodes: an unnamed start-state, a state 's' and an
|
||||
// end-state named 'end'.
|
||||
final ProcessDefinition processDefinition = ProcessDefinition.parseXmlString(
|
||||
"<process-definition name='hello world'>" +
|
||||
" <start-state name='start'>" +
|
||||
" <transition to='s' />" +
|
||||
" </start-state>" +
|
||||
" <node name='s'>" +
|
||||
" <action class='org.alfresco.repo.workflow.jbpm.JBPMTestSpringActionHandler' config-type='bean'>" +
|
||||
" <value>a test value</value>" +
|
||||
" </action>" +
|
||||
" <transition to='end' />" +
|
||||
" </node>" +
|
||||
" <end-state name='end' />" +
|
||||
"</process-definition>"
|
||||
);
|
||||
|
||||
|
||||
jbpmTemplate.execute(new JbpmCallback()
|
||||
{
|
||||
public Object doInJbpm(JbpmContext context)
|
||||
{
|
||||
context.deployProcessDefinition(processDefinition);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void processInstanceIsCreatedWhenUserSubmitsWebappForm()
|
||||
{
|
||||
jbpmTemplate.execute(new JbpmCallback()
|
||||
{
|
||||
public Object doInJbpm(JbpmContext context)
|
||||
{
|
||||
GraphSession graphSession = context.getGraphSession();
|
||||
ProcessDefinition processDefinition = graphSession.findLatestProcessDefinition("hello world");
|
||||
|
||||
// With the processDefinition that we retrieved from the database, we
|
||||
// can create an execution of the process definition just like in the
|
||||
// hello world example (which was without persistence).
|
||||
ProcessInstance processInstance = new ProcessInstance(processDefinition);
|
||||
|
||||
Token token = processInstance.getRootToken();
|
||||
assertEquals("start", token.getNode().getName());
|
||||
// Let's start the process execution
|
||||
token.signal();
|
||||
// Now the process is in the state 's'.
|
||||
assertEquals("s", token.getNode().getName());
|
||||
// Spring based action has been called, check the result by looking at the
|
||||
// process variable set by the action
|
||||
String result = "Repo: " + descriptorService.getServerDescriptor().getVersion() + ", Value: a test value, Node: s, Token: /";
|
||||
assertEquals(result, processInstance.getContextInstance().getVariable("jbpm.test.action.result"));
|
||||
|
||||
context.save(processInstance);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void theProcessInstanceContinuesWhenAnAsyncMessageIsReceived()
|
||||
{
|
||||
jbpmTemplate.execute(new JbpmCallback()
|
||||
{
|
||||
public Object doInJbpm(JbpmContext context)
|
||||
{
|
||||
GraphSession graphSession = context.getGraphSession();
|
||||
|
||||
// First, we need to get the process instance back out of the database.
|
||||
// There are several options to know what process instance we are dealing
|
||||
// with here. The easiest in this simple test case is just to look for
|
||||
// the full list of process instances. That should give us only one
|
||||
// result. So let's look up the process definition.
|
||||
ProcessDefinition processDefinition = graphSession.findLatestProcessDefinition("hello world");
|
||||
|
||||
// Now, we search for all process instances of this process definition.
|
||||
List processInstances = graphSession.findProcessInstances(processDefinition.getId());
|
||||
|
||||
// Because we know that in the context of this unit test, there is
|
||||
// only one execution. In real life, the processInstanceId can be
|
||||
// extracted from the content of the message that arrived or from
|
||||
// the user making a choice.
|
||||
ProcessInstance processInstance = (ProcessInstance) processInstances.get(0);
|
||||
|
||||
// Now we can continue the execution. Note that the processInstance
|
||||
// delegates signals to the main path of execution (=the root token).
|
||||
processInstance.signal();
|
||||
|
||||
// After this signal, we know the process execution should have
|
||||
// arrived in the end-state.
|
||||
assertTrue(processInstance.hasEnded());
|
||||
|
||||
// Now we can update the state of the execution in the database
|
||||
context.save(processInstance);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,63 @@
|
||||
/*
|
||||
* Copyright (C) 2005 Alfresco, Inc.
|
||||
*
|
||||
* Licensed under the Mozilla Public License version 1.1
|
||||
* with a permitted attribution clause. You may obtain a
|
||||
* copy of the License at
|
||||
*
|
||||
* http://www.alfresco.org/legal/license.txt
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
* either express or implied. See the License for the specific
|
||||
* language governing permissions and limitations under the
|
||||
* License.
|
||||
*/
|
||||
package org.alfresco.repo.workflow.jbpm;
|
||||
|
||||
import org.alfresco.service.descriptor.DescriptorService;
|
||||
import org.jbpm.graph.exe.ExecutionContext;
|
||||
import org.springframework.beans.factory.BeanFactory;
|
||||
|
||||
|
||||
/**
|
||||
* Test Spring based Jbpm Action Handler
|
||||
*
|
||||
* @author davidc
|
||||
*/
|
||||
public class JBPMTestSpringActionHandler extends JBPMSpringActionHandler
|
||||
{
|
||||
private static final long serialVersionUID = -7659883022289711381L;
|
||||
|
||||
private DescriptorService descriptorService;
|
||||
private String value;
|
||||
|
||||
|
||||
/**
|
||||
* Setter accessible from jBPM jPDL
|
||||
* @param value
|
||||
*/
|
||||
public void setValue(String value)
|
||||
{
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.jbpm.graph.def.ActionHandler#execute(org.jbpm.graph.exe.ExecutionContext)
|
||||
*/
|
||||
public void execute(ExecutionContext arg0) throws Exception
|
||||
{
|
||||
String result = "Repo: " + descriptorService.getServerDescriptor().getVersion();
|
||||
result += ", Value: " + value + ", Node: " + arg0.getNode().getName() + ", Token: " + arg0.getToken().getFullName();
|
||||
arg0.getContextInstance().setVariable("jbpm.test.action.result", result);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initialiseHandler(BeanFactory factory)
|
||||
{
|
||||
descriptorService = (DescriptorService)factory.getBean("DescriptorService", DescriptorService.class);
|
||||
}
|
||||
|
||||
}
|
@ -18,6 +18,8 @@ package org.alfresco.service.namespace;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.alfresco.service.Auditable;
|
||||
|
||||
/**
|
||||
* The <code>NamespacePrefixResolver</code> provides a mapping between
|
||||
* namespace prefixes and namespace URIs.
|
||||
@ -33,6 +35,7 @@ public interface NamespacePrefixResolver
|
||||
* @return the namespace
|
||||
* @throws NamespaceException if prefix has not been registered
|
||||
*/
|
||||
@Auditable(parameters = {"prefix"})
|
||||
public String getNamespaceURI(String prefix)
|
||||
throws NamespaceException;
|
||||
|
||||
@ -43,6 +46,7 @@ public interface NamespacePrefixResolver
|
||||
* @return the prefixes (or empty collection, if no prefixes registered against URI)
|
||||
* @throws NamespaceException if URI has not been registered
|
||||
*/
|
||||
@Auditable(parameters = {"namespaceURI"})
|
||||
public Collection<String> getPrefixes(String namespaceURI)
|
||||
throws NamespaceException;
|
||||
|
||||
@ -51,6 +55,7 @@ public interface NamespacePrefixResolver
|
||||
*
|
||||
* @return collection of all registered namespace prefixes
|
||||
*/
|
||||
@Auditable
|
||||
Collection<String> getPrefixes();
|
||||
|
||||
/**
|
||||
@ -58,6 +63,7 @@ public interface NamespacePrefixResolver
|
||||
*
|
||||
* @return collection of all registered namespace uris
|
||||
*/
|
||||
@Auditable
|
||||
Collection<String> getURIs();
|
||||
|
||||
}
|
||||
|
@ -409,7 +409,7 @@ public class Import extends Tool
|
||||
throw new ToolArgumentException("Source directory " + fileSourceDir.getAbsolutePath() + " does not exist.");
|
||||
}
|
||||
}
|
||||
if (packageName.endsWith(".acp"))
|
||||
if (packageName.endsWith(".acp") || packageName.endsWith(".zip"))
|
||||
{
|
||||
File packageFile = new File(getSourceDir(), packageName);
|
||||
if (!packageFile.exists())
|
||||
|
@ -34,7 +34,7 @@ public class SearchLanguageConversion
|
||||
* <li>Reserved: \%_</li>
|
||||
* </ul>
|
||||
*/
|
||||
public static LanguageDefinition DEF_XPATH_LIKE = new SimpleLanguageDef('\\', "%", "_", "\\%_");
|
||||
public static LanguageDefinition DEF_XPATH_LIKE = new SimpleLanguageDef('\\', "%", "_", "\\%_[]");
|
||||
/**
|
||||
* Regular expression query language summary:
|
||||
* <ul>
|
||||
|
@ -42,7 +42,7 @@ public class SearchLanguageConversionTest extends TestCase
|
||||
assertEquals("Escaping for xpath failed",
|
||||
"\\\\ | ! \" £ " +
|
||||
"$ \\% ^ & * ( " +
|
||||
") \\_ { } [ ] " +
|
||||
") \\_ { } \\[ \\] " +
|
||||
"@ # ~ ' : ; " +
|
||||
", . < > + ? " +
|
||||
"/ \\\\\\\\ \\\\* \\\\? \\\\\\_",
|
||||
|
Loading…
x
Reference in New Issue
Block a user