Merged V1.4 to HEAD

svn merge svn://svn.alfresco.com:3691/alfresco/BRANCHES/V1.4@3925 svn://svn.alfresco.com:3691/alfresco/BRANCHES/V1.4@3965 .


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@3966 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley 2006-09-29 07:45:33 +00:00
parent d4947ef511
commit cf29ca2343
51 changed files with 2076 additions and 1985 deletions

View File

@ -285,6 +285,9 @@
<property name="serviceRegistry"> <property name="serviceRegistry">
<ref bean="ServiceRegistry"></ref> <ref bean="ServiceRegistry"></ref>
</property> </property>
<property name="headerEncoding">
<value>${mail.header}</value>
</property>
</bean> </bean>
<bean id="set-property-value" class="org.alfresco.repo.action.executer.SetPropertyValueActionExecuter" parent="action-executer"> <bean id="set-property-value" class="org.alfresco.repo.action.executer.SetPropertyValueActionExecuter" parent="action-executer">

View File

@ -2,24 +2,12 @@
xmlns:cm="http://www.alfresco.org/model/content/1.0" xmlns:cm="http://www.alfresco.org/model/content/1.0"
xmlns:app="http://www.alfresco.org/model/application/1.0"> xmlns:app="http://www.alfresco.org/model/application/1.0">
<!-- <cm:content>
<cm:folder> <app:uifacets />
<app:uifacets /> <cm:name>${tutorial.document.name}</cm:name>
<cm:name>${tutorial.space.name}</cm:name> <cm:title>${tutorial.document.title}</cm:title>
<cm:description>${tutorial.space.description}</cm:description> <cm:description>${tutorial.document.description}</cm:description>
<app:icon>space-icon-doc</app:icon> <cm:content>contentUrl=classpath:alfresco/bootstrap/Alfresco-Tutorial.pdf|mimetype=application/pdf|size=|encoding=</cm:content>
<cm:contains> </cm:content>
-->
<cm:content>
<app:uifacets />
<cm:name>${tutorial.document.name}</cm:name>
<cm:title>${tutorial.document.title}</cm:title>
<cm:description>${tutorial.document.description}</cm:description>
<cm:content>contentUrl=classpath:alfresco/bootstrap/${tutorial.document.name}|mimetype=application/pdf|size=|encoding=</cm:content>
</cm:content>
<!--
</cm:contains>
</cm:folder>
-->
</view:view> </view:view>

View File

@ -0,0 +1,38 @@
<?xml version='1.0' encoding='UTF-8'?>
<!DOCTYPE beans PUBLIC '-//SPRING//DTD BEAN//EN' 'http://www.springframework.org/dtd/spring-beans.dtd'>
<beans>
<!-- Forces the reindexing of nodes where content may have been missing before -->
<!--
This component can be triggered at intervals where asynchronous content sharing
between clustered servers has been set up.
-->
<bean id="missingContentReindexTrigger" class="org.alfresco.util.TriggerBean">
<property name="jobDetail">
<bean class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass">
<value>org.alfresco.repo.node.index.IndexRecoveryJob</value>
</property>
<property name="jobDataAsMap">
<map>
<entry key="indexRecoveryComponent">
<ref bean="missingContentReindexComponent" />
</entry>
</map>
</property>
</bean>
</property>
<property name="scheduler">
<ref bean="schedulerFactory" />
</property>
<!-- Give the server 5 minutes and then check for missing content -->
<property name="startDelayMinutes">
<value>5</value>
</property>
<property name="repeatCount">
<value>0</value>
</property>
</bean>
</beans>

View File

@ -4,10 +4,10 @@
<beans> <beans>
<bean id="indexRecoveryComponentBase" abstract="true" > <bean id="indexRecoveryComponentBase" abstract="true" >
<property name="sessionFactory"> <property name="authenticationComponent">
<ref bean="sessionFactory" /> <ref bean="authenticationComponentImpl" />
</property> </property>
<property name="transactionService"> <property name="transactionComponent">
<ref bean="transactionComponent" /> <ref bean="transactionComponent" />
</property> </property>
<property name="indexer"> <property name="indexer">
@ -22,62 +22,33 @@
<property name="nodeService"> <property name="nodeService">
<ref bean="nodeService" /> <ref bean="nodeService" />
</property> </property>
</bean> <property name="nodeDaoService">
<ref bean="nodeDaoService" />
<!-- full node index recovery -->
<bean id="indexRecoveryComponent" class="org.alfresco.repo.node.index.FullIndexRecoveryComponent" parent="indexRecoveryComponentBase">
<!-- Properties controlling full index rebuilding / tracking -->
<property name="executeFullRecovery">
<value>false</value> <!-- enable this to start the full index recovery -->
</property>
<property name="runContinuously">
<value>false</value> <!-- ensure the index is up to date and then stop -->
</property>
<property name="waitTime">
<value>1000</value> <!-- milliseconds to wait between checks for new transactions -->
</property>
<property name="l2CacheMode">
<value>NORMAL</value> <!-- normal L2 cache usage (database is changed by this server only) -->
</property> </property>
</bean> </bean>
<!-- attempt to reindex content that was missing before --> <!-- index recovery and validation -->
<!-- <!--
<bean id="missingContentReindexComponent" class="org.alfresco.repo.node.index.MissingContentReindexComponent" parent="indexRecoveryComponentBase"> Recovery types are:
<property name="runContinuously"> NONE: Ignore
<value>false</value> VALIDATE: Checks that the last transaction for each store is represented in the indexes
</property> AUTO: Validates and auto-recovers if validation fails
<property name="waitTime"> FULL: Full index rebuild, processing all transactions in order. The server is temporarily suspended.
<value>300000</value> -->
<bean
id="indexRecoveryComponent"
class="org.alfresco.repo.node.index.FullIndexRecoveryComponent"
parent="indexRecoveryComponentBase">
<property name="recoveryMode">
<value>${index.recovery.mode}</value>
</property> </property>
</bean> </bean>
<bean id="missingContentReindexTrigger" class="org.alfresco.util.TriggerBean"> <!-- Bean that attempts to index content that was previously missing -->
<property name="jobDetail"> <bean
<bean id="IndexRecoveryJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean"> id="missingContentReindexComponent"
<property name="jobClass"> class="org.alfresco.repo.node.index.MissingContentReindexComponent"
<value>org.alfresco.repo.node.index.IndexRecoveryJob</value> parent="indexRecoveryComponentBase">
</property> </bean>
<property name="jobDataAsMap">
<map>
<entry key="missingContentReindexComponent">
<ref bean="missingContentReindexComponent" />
</entry>
</map>
</property>
</bean>
</property>
<property name="startDelay">
<value>60000</value>
</property>
<property name="repeatCount">
<value>0</value>
</property>
<property name="scheduler">
<ref bean="schedulerFactory" />
</property>
</bean>
-->
</beans> </beans>

View File

@ -10,14 +10,14 @@ bpm_businessprocessmodel.transition.description=Task Done
# Base Task # Base Task
bpm_businessprocessmodel.type.bpm_task.title=Task bpm_businessprocessmodel.type.bpm_task.title=Task
bpm_businessprocessmodel.type.bpm_task.description=Task bpm_businessprocessmodel.type.bpm_task.description=Task
bpm_businessprocessmodel.property.bpm_taskId.title=Task Identifier bpm_businessprocessmodel.property.bpm_taskId.title=Identifier
bpm_businessprocessmodel.property.bpm_taskId.description=Task Identifier bpm_businessprocessmodel.property.bpm_taskId.description=Identifier
bpm_businessprocessmodel.property.bpm_startDate.title=Task Start Date bpm_businessprocessmodel.property.bpm_startDate.title=Start Date
bpm_businessprocessmodel.property.bpm_startDate.description=Task Start Date bpm_businessprocessmodel.property.bpm_startDate.description=Start Date
bpm_businessprocessmodel.property.bpm_completionDate.title=Task Completion Date bpm_businessprocessmodel.property.bpm_completionDate.title=Completion Date
bpm_businessprocessmodel.property.bpm_completionDate.description=Task Completion Date bpm_businessprocessmodel.property.bpm_completionDate.description=Completion Date
bpm_businessprocessmodel.property.bpm_dueDate.title=Task Due Date bpm_businessprocessmodel.property.bpm_dueDate.title=Due Date
bpm_businessprocessmodel.property.bpm_dueDate.description=Task Due Date bpm_businessprocessmodel.property.bpm_dueDate.description=Due Date
bpm_businessprocessmodel.property.bpm_status.title=Status bpm_businessprocessmodel.property.bpm_status.title=Status
bpm_businessprocessmodel.property.bpm_status.description=Status bpm_businessprocessmodel.property.bpm_status.description=Status
bpm_businessprocessmodel.property.bpm_priority.title=Priority bpm_businessprocessmodel.property.bpm_priority.title=Priority
@ -36,9 +36,9 @@ bpm_businessprocessmodel.property.bpm_workflowInstanceId.title=Workflow Instance
bpm_businessprocessmodel.property.bpm_workflowInstanceId.description=Workflow Instance Id bpm_businessprocessmodel.property.bpm_workflowInstanceId.description=Workflow Instance Id
bpm_businessprocessmodel.property.bpm_context.title=Task Context bpm_businessprocessmodel.property.bpm_context.title=Task Context
bpm_businessprocessmodel.property.bpm_context.description=The context within which this task has been assigned bpm_businessprocessmodel.property.bpm_context.description=The context within which this task has been assigned
bpm_businessprocessmodel.property.bpm_description.title=Task Description bpm_businessprocessmodel.property.bpm_description.title=Description
bpm_businessprocessmodel.property.bpm_description.description=Description of what needs to be achieved bpm_businessprocessmodel.property.bpm_description.description=Description of what needs to be achieved
bpm_businessprocessmodel.property.bpm_outcome.title=Task Outcome bpm_businessprocessmodel.property.bpm_outcome.title=Outcome
bpm_businessprocessmodel.property.bpm_outcome.description=Decision made on completing Task bpm_businessprocessmodel.property.bpm_outcome.description=Decision made on completing Task
bpm_businessprocessmodel.property.bpm_completedItems.title=Completed Items bpm_businessprocessmodel.property.bpm_completedItems.title=Completed Items
bpm_businessprocessmodel.property.bpm_completedItems.description=Package items marked as complete bpm_businessprocessmodel.property.bpm_completedItems.description=Package items marked as complete

View File

@ -2,3 +2,9 @@
content.content_missing=The node''s content is missing: \n node: {0} \n reader: {1} \n Please contact your system administrator. content.content_missing=The node''s content is missing: \n node: {0} \n reader: {1} \n Please contact your system administrator.
content.runtime_exec.property_moved=The property ''errorCodes'' has moved down onto the RuntimeExec class content.runtime_exec.property_moved=The property ''errorCodes'' has moved down onto the RuntimeExec class
index.recovery.store_not_up_to_date=The indexes for store ''{0}'' are not synchronized with the database.
index.recovery.starting=Index recovery started: {0} transactions.
index.recovery.complete=Index recovery completed.
index.recovery.progress=\t{0} % complete.
index.recovery.terminated=Index recovery terminated.

View File

@ -291,6 +291,11 @@
<aspect name="bpm:workflowPackage"> <aspect name="bpm:workflowPackage">
<properties> <properties>
<!-- Created by Workflow Service (true), or provided from outside (false) -->
<property name="bpm:isSystemPackage">
<type>d:boolean</type>
</property>
<!-- --> <!-- -->
<!-- Associated Workflow --> <!-- Associated Workflow -->
<!-- --> <!-- -->

View File

@ -1018,7 +1018,7 @@
<idref local="WorkflowService_transaction"/> <idref local="WorkflowService_transaction"/>
<idref local="AuditMethodInterceptor"/> <idref local="AuditMethodInterceptor"/>
<idref local="exceptionTranslator"/> <idref local="exceptionTranslator"/>
<idref bean="WorkflowService_security"/> <idref bean="WorkflowService_security"/>
<idref local="WorkflowService_descriptor"/> <idref local="WorkflowService_descriptor"/>
</list> </list>
</property> </property>

View File

@ -8,13 +8,14 @@ dir.contentstore.deleted=${dir.root}/contentstore.deleted
dir.auditcontentstore=${dir.root}/audit.contentstore dir.auditcontentstore=${dir.root}/audit.contentstore
# The location for lucene index files # The location for lucene index files
dir.indexes=${dir.root}/lucene-indexes dir.indexes=${dir.root}/lucene-indexes
# The location for lucene index locks # The location for lucene index locks
dir.indexes.lock=${dir.indexes}/locks dir.indexes.lock=${dir.indexes}/locks
# The index recovery mode (NONE, VALIDATE, AUTO, FULL)
index.recovery.mode=VALIDATE
# #################### # # #################### #
# Lucene configuration # # Lucene configuration #
# #################### # # #################### #
@ -74,6 +75,8 @@ mail.username=anonymous
mail.password= mail.password=
# Set this value to UTF-8 or similar for encoding of email messages as required # Set this value to UTF-8 or similar for encoding of email messages as required
mail.encoding=UTF-8 mail.encoding=UTF-8
# Set this value to 7bit or similar for Asian encoding of email headers as required
mail.header=
# System Configuration # System Configuration

View File

@ -2,9 +2,25 @@
<!DOCTYPE beans PUBLIC '-//SPRING//DTD BEAN//EN' 'http://www.springframework.org/dtd/spring-beans.dtd'> <!DOCTYPE beans PUBLIC '-//SPRING//DTD BEAN//EN' 'http://www.springframework.org/dtd/spring-beans.dtd'>
<beans> <beans>
<!-- -->
<!-- Scheduled jobs --> <!-- Task scheduler -->
<!-- --> <!-- Triggers should not appear here - the scheduler should be injected into the trigger definition -->
<!-- This bean should not need to apear else where in extension configuration -->
<bean id="schedulerFactory" class="org.springframework.scheduling.quartz.SchedulerFactoryBean">
<property name="waitForJobsToCompleteOnShutdown">
<value>true</value>
</property>
<property name="configLocation">
<value>classpath:alfresco/domain/quartz.properties</value>
</property>
<property name="schedulerName">
<value>DefaultScheduler</value>
</property>
</bean>
<!-- -->
<!-- Scheduled tasks -->
<!-- -->
<bean id="ftsIndexerTrigger" class="org.alfresco.util.TriggerBean"> <bean id="ftsIndexerTrigger" class="org.alfresco.util.TriggerBean">
<property name="jobDetail"> <property name="jobDetail">
@ -21,16 +37,74 @@
</property> </property>
</bean> </bean>
</property> </property>
<!-- Try and do some indexing every minute after starting up --> <property name="scheduler">
<property name="startDelay"> <ref bean="schedulerFactory" />
<value>60000</value>
</property> </property>
<property name="repeatInterval"> <!-- Try and do some indexing every minute after starting up -->
<value>60000</value> <property name="startDelayMinutes">
<value>1</value>
</property>
<property name="repeatIntervalMinutes">
<value>1</value>
</property>
</bean>
<!-- Forces the reindexing of nodes where content may have been missing before -->
<!--
This component can be triggered at intervals where asynchronous content sharing
between clustered servers has been set up
-->
<bean id="missingContentReindexTrigger" class="org.alfresco.util.TriggerBean">
<property name="jobDetail">
<bean class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass">
<value>org.alfresco.repo.node.index.IndexRecoveryJob</value>
</property>
<property name="jobDataAsMap">
<map>
<entry key="indexRecoveryComponent">
<ref bean="missingContentReindexComponent" />
</entry>
</map>
</property>
</bean>
</property> </property>
<property name="scheduler"> <property name="scheduler">
<ref bean="schedulerFactory" /> <ref bean="schedulerFactory" />
</property> </property>
<!-- Give the server 5 minutes and then check for missing content -->
<property name="startDelayMinutes">
<value>5</value>
</property>
<property name="repeatCount">
<value>0</value>
</property>
</bean>
<bean id="indexRecoveryTrigger" class="org.alfresco.util.TriggerBean">
<property name="jobDetail">
<bean class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass">
<value>org.alfresco.repo.node.index.IndexRecoveryJob</value>
</property>
<property name="jobDataAsMap">
<map>
<entry key="indexRecoveryComponent">
<ref bean="indexRecoveryComponent" />
</entry>
</map>
</property>
</bean>
</property>
<property name="scheduler">
<ref bean="schedulerFactory" />
</property>
<property name="startDelayMinutes">
<value>1</value>
</property>
<property name="repeatCount">
<value>0</value>
</property>
</bean> </bean>
<bean id="tempFileCleanerTrigger" class="org.alfresco.util.TriggerBean"> <bean id="tempFileCleanerTrigger" class="org.alfresco.util.TriggerBean">
@ -48,15 +122,16 @@
</property> </property>
</bean> </bean>
</property> </property>
<property name="startDelay">
<value>1800000</value><!-- start after half an hour -->
</property>
<property name="repeatInterval">
<value>3600000</value><!-- repeat every hour -->
</property>
<property name="scheduler"> <property name="scheduler">
<ref bean="schedulerFactory" /> <ref bean="schedulerFactory" />
</property> </property>
<!-- start after half an hour and repeat hourly -->
<property name="startDelayMinutes">
<value>30</value>
</property>
<property name="repeatIntervalMinutes">
<value>60</value>
</property>
</bean> </bean>
<bean id="fileContentStoreCleanerJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean"> <bean id="fileContentStoreCleanerJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
@ -84,33 +159,6 @@
</property> </property>
</bean> </bean>
<bean id="indexRecoveryTrigger" class="org.alfresco.util.TriggerBean">
<property name="jobDetail">
<bean id="IndexRecoveryJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass">
<value>org.alfresco.repo.node.index.IndexRecoveryJob</value>
</property>
<property name="jobDataAsMap">
<map>
<entry key="indexRecoveryComponent">
<ref bean="indexRecoveryComponent" />
</entry>
</map>
</property>
</bean>
</property>
<property name="startDelay">
<value>60000</value> <!-- start after 1 minute -->
</property>
<property name="repeatCount">
<value>0</value> <!-- DO NOT REPEAT !!!!! -->
</property>
<property name="scheduler">
<ref bean="schedulerFactory" />
</property>
</bean>
<bean id="indexBackupJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean"> <bean id="indexBackupJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass"> <property name="jobClass">
<value>org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory2$LuceneIndexBackupJob</value> <value>org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory2$LuceneIndexBackupJob</value>
@ -150,26 +198,12 @@
<ref bean="schedulerFactory" /> <ref bean="schedulerFactory" />
</property> </property>
--> -->
<property name="startDelay"> <!-- start after an hour and repeat hourly -->
<value>3600000</value><!-- start after an hour --> <property name="startDelayMinutes">
<value>60</value>
</property> </property>
<property name="repeatInterval"> <property name="repeatIntervalMinutes">
<value>3600000</value><!-- repeat every hour --> <value>60</value>
</property>
</bean>
<!-- Scheduled tasks -->
<!-- Triggers should not appear here - the scheduler should be injected into the trigger definition -->
<!-- This bean should not need to apear else where in extension configuration -->
<bean id="schedulerFactory" class="org.springframework.scheduling.quartz.SchedulerFactoryBean">
<property name="waitForJobsToCompleteOnShutdown">
<value>true</value>
</property>
<property name="configLocation">
<value>classpath:alfresco/domain/quartz.properties</value>
</property>
<property name="schedulerName">
<value>DefaultScheduler</value>
</property> </property>
</bean> </bean>

View File

@ -5,17 +5,7 @@
<swimlane name="initiator"/> <swimlane name="initiator"/>
<start-state name="start"> <start-state name="start">
<task name="wf:submitAdhocTask" swimlane="initiator"> <task name="wf:submitAdhocTask" swimlane="initiator"/>
<controller>
<variable name="bpm_assignee" access="write"/>
<variable name="bpm_workflowDescription" access="write"/>
<variable name="bpm_workflowDueDate" access="write"/>
<variable name="bpm_workflowPriority" access="write"/>
<variable name="bpm_package" access="write"/>
<variable name="bpm_context" access="write"/>
<variable name="wf_notifyMe" access="write"/>
</controller>
</task>
<transition name="" to="adhoc"/> <transition name="" to="adhoc"/>
</start-state> </start-state>

View File

@ -2,55 +2,46 @@
<process-definition xmlns="urn:jbpm.org:jpdl-3.1" name="wf:review"> <process-definition xmlns="urn:jbpm.org:jpdl-3.1" name="wf:review">
<swimlane name="initiator"/> <swimlane name="initiator" />
<start-state name="start"> <start-state name="start">
<task name="wf:submitReviewTask" swimlane="initiator"> <task name="wf:submitReviewTask" swimlane="initiator" />
<controller> <transition name="" to="review" />
<variable name="bpm_assignee" access="write"/> </start-state>
<variable name="bpm_workflowDescription" access="write"/>
<variable name="bpm_workflowDueDate" access="write"/>
<variable name="bpm_workflowPriority" access="write"/>
<variable name="bpm_package" access="write"/>
<variable name="bpm_context" access="write"/>
</controller>
</task>
<transition name="" to="review"/>
</start-state>
<swimlane name="reviewer"> <swimlane name="reviewer">
<assignment actor-id="#{bpm_assignee.properties['cm:userName']}"/> <assignment actor-id="#{bpm_assignee.properties['cm:userName']}" />
</swimlane> </swimlane>
<task-node name="review"> <task-node name="review">
<task name="wf:reviewTask" swimlane="reviewer"> <task name="wf:reviewTask" swimlane="reviewer">
<event type="task-create"> <event type="task-create">
<script> <script>
if (bpm_workflowDueDate != void) if (bpm_workflowDueDate != void)
{ {
taskInstance.dueDate = bpm_workflowDueDate; taskInstance.dueDate = bpm_workflowDueDate;
} }
if (bpm_workflowPriority != void) if (bpm_workflowPriority != void)
{ {
taskInstance.priority = bpm_workflowPriority; taskInstance.priority = bpm_workflowPriority;
} }
</script> </script>
</event> </event>
</task> </task>
<transition name="reject" to="rejected"/> <transition name="reject" to="rejected" />
<transition name="approve" to="approved"/> <transition name="approve" to="approved" />
</task-node> </task-node>
<task-node name="rejected"> <task-node name="rejected">
<task name="wf:rejectedTask" swimlane="initiator"/> <task name="wf:rejectedTask" swimlane="initiator" />
<transition name="" to="end"/> <transition name="" to="end" />
</task-node> </task-node>
<task-node name="approved"> <task-node name="approved">
<task name="wf:approvedTask" swimlane="initiator"/> <task name="wf:approvedTask" swimlane="initiator" />
<transition name="" to="end"/> <transition name="" to="end" />
</task-node> </task-node>
<end-state name="end"/> <end-state name="end" />
</process-definition> </process-definition>

View File

@ -201,7 +201,6 @@ public class RepositoryImpl implements Repository
{ {
// construct the session // construct the session
SessionImpl sessionImpl = new SessionImpl(this); SessionImpl sessionImpl = new SessionImpl(this);
registerSession(sessionImpl);
// authenticate user // authenticate user
AuthenticationService authenticationService = getServiceRegistry().getAuthenticationService(); AuthenticationService authenticationService = getServiceRegistry().getAuthenticationService();
@ -211,7 +210,6 @@ public class RepositoryImpl implements Repository
} }
catch(AuthenticationException e) catch(AuthenticationException e)
{ {
deregisterSession();
throw new LoginException("Alfresco Repository failed to authenticate credentials", e); throw new LoginException("Alfresco Repository failed to authenticate credentials", e);
} }
@ -222,11 +220,11 @@ public class RepositoryImpl implements Repository
// session is now ready // session is now ready
Session session = sessionImpl.getProxy(); Session session = sessionImpl.getProxy();
registerSession(sessionImpl);
return session; return session;
} }
catch(AlfrescoRuntimeException e) catch(AlfrescoRuntimeException e)
{ {
deregisterSession();
throw new RepositoryException(e); throw new RepositoryException(e);
} }
} }

View File

@ -109,6 +109,11 @@ public class MailActionExecuter extends ActionExecuterAbstractBase
*/ */
private ServiceRegistry serviceRegistry; private ServiceRegistry serviceRegistry;
/**
* Mail header encoding scheme
*/
private String headerEncoding = null;
/** /**
* @param javaMailSender the java mail sender * @param javaMailSender the java mail sender
*/ */
@ -165,6 +170,14 @@ public class MailActionExecuter extends ActionExecuterAbstractBase
this.nodeService = nodeService; this.nodeService = nodeService;
} }
/**
* @param headerEncoding The mail header encoding to set.
*/
public void setHeaderEncoding(String headerEncoding)
{
this.headerEncoding = headerEncoding;
}
/** /**
* Execute the rule action * Execute the rule action
*/ */
@ -180,6 +193,12 @@ public class MailActionExecuter extends ActionExecuterAbstractBase
{ {
MimeMessageHelper message = new MimeMessageHelper(mimeMessage); MimeMessageHelper message = new MimeMessageHelper(mimeMessage);
// set header encoding if one has been supplied
if (headerEncoding != null && headerEncoding.length() != 0)
{
mimeMessage.setHeader("Content-Transfer-Encoding", headerEncoding);
}
// set recipient // set recipient
String to = (String)ruleAction.getParameterValue(PARAM_TO); String to = (String)ruleAction.getParameterValue(PARAM_TO);
if (to != null && to.length() != 0) if (to != null && to.length() != 0)

View File

@ -44,6 +44,7 @@ import org.alfresco.util.BaseSpringTest;
import org.alfresco.util.GUID; import org.alfresco.util.GUID;
import org.hibernate.CacheMode; import org.hibernate.CacheMode;
import org.hibernate.exception.ConstraintViolationException; import org.hibernate.exception.ConstraintViolationException;
import org.hibernate.exception.GenericJDBCException;
/** /**
* Test persistence and retrieval of Hibernate-specific implementations of the * Test persistence and retrieval of Hibernate-specific implementations of the
@ -161,6 +162,11 @@ public class HibernateNodeTest extends BaseSpringTest
{ {
// expected // expected
} }
catch(GenericJDBCException e)
{
// Sybase
// expected
}
} }
/** /**
@ -251,8 +257,8 @@ public class HibernateNodeTest extends BaseSpringTest
assoc1.setQname(QName.createQName(null, "number1")); assoc1.setQname(QName.createQName(null, "number1"));
assoc1.setChildNodeName("number1"); assoc1.setChildNodeName("number1");
assoc1.setChildNodeNameCrc(1); assoc1.setChildNodeNameCrc(1);
getSession().save(assoc1);
assoc1.buildAssociation(containerNode, contentNode); assoc1.buildAssociation(containerNode, contentNode);
getSession().save(assoc1);
// make another association between the same two parent and child nodes // make another association between the same two parent and child nodes
ChildAssoc assoc2 = new ChildAssocImpl(); ChildAssoc assoc2 = new ChildAssocImpl();
@ -261,8 +267,8 @@ public class HibernateNodeTest extends BaseSpringTest
assoc2.setQname(QName.createQName(null, "number2")); assoc2.setQname(QName.createQName(null, "number2"));
assoc2.setChildNodeName("number2"); assoc2.setChildNodeName("number2");
assoc2.setChildNodeNameCrc(2); assoc2.setChildNodeNameCrc(2);
getSession().save(assoc2);
assoc2.buildAssociation(containerNode, contentNode); assoc2.buildAssociation(containerNode, contentNode);
getSession().save(assoc2);
assertFalse("Hashcode incorrent", assoc2.hashCode() == 0); assertFalse("Hashcode incorrent", assoc2.hashCode() == 0);
assertNotSame("Assoc equals failure", assoc1, assoc2); assertNotSame("Assoc equals failure", assoc1, assoc2);

View File

@ -314,56 +314,6 @@
assoc.target.id = :targetId assoc.target.id = :targetId
</query> </query>
<query name="node.GetNextChangeTxnIds">
select distinct
transaction.changeTxnId
from
org.alfresco.repo.domain.hibernate.TransactionImpl as transaction
where
transaction.changeTxnId > :currentTxnId
order by
transaction.changeTxnId
</query>
<query name="node.GetChangedNodeStatusesCount">
select
count(transaction.changeTxnId)
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
join status.transaction as transaction
where
status.key.protocol = :storeProtocol and
status.key.identifier = :storeIdentifier and
status.node.id is not null and
transaction.changeTxnId = :changeTxnId
</query>
<query name="node.GetChangedNodeStatuses">
select
status
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
join status.transaction as transaction
where
status.key.protocol = :storeProtocol and
status.key.identifier = :storeIdentifier and
status.node.id is not null and
transaction.changeTxnId = :changeTxnId
</query>
<query name="node.GetDeletedNodeStatuses">
select
status
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
join status.transaction as transaction
where
status.key.protocol = :storeProtocol and
status.key.identifier = :storeIdentifier and
status.node.id is null and
transaction.changeTxnId = :changeTxnId
</query>
<query name="node.GetNodesWithPropertyValuesByActualType"> <query name="node.GetNodesWithPropertyValuesByActualType">
select select
node node

View File

@ -59,4 +59,81 @@
server.ipAddress = :ipAddress server.ipAddress = :ipAddress
</query> </query>
<query name="txn.GetLastTxnIdForStore">
select
max(txn.id)
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
join status.transaction as txn
where
status.key.protocol = :protocol and
status.key.identifier = :identifier
</query>
<query name="txn.CountTransactions">
select
count(txn.id)
from
org.alfresco.repo.domain.hibernate.TransactionImpl as txn
</query>
<query name="txn.GetNextTxns">
<![CDATA[
select
txn
from
org.alfresco.repo.domain.hibernate.TransactionImpl as txn
where
txn.id > :lastTxnId
order by
txn.id
]]>
</query>
<query name="txn.GetTxnUpdateCountForStore">
select
count(status.key.guid)
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
join status.transaction as txn
where
txn.id = :txnId and
status.node is not null and
status.key.protocol = :protocol and
status.key.identifier = :identifier
</query>
<query name="txn.GetTxnDeleteCountForStore">
select
count(status.key.guid)
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
join status.transaction as txn
where
txn.id = :txnId and
status.node is null and
status.key.protocol = :protocol and
status.key.identifier = :identifier
</query>
<query name="txn.GetTxnChangesForStore">
select
status
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
where
status.transaction.id = :txnId and
status.key.protocol = :protocol and
status.key.identifier = :identifier
</query>
<query name="txn.GetTxnChanges">
select
status
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
where
status.transaction.id = :txnId
</query>
</hibernate-mapping> </hibernate-mapping>

View File

@ -202,7 +202,7 @@ public final class Actions implements Scopeable
@SuppressWarnings("synthetic-access") @SuppressWarnings("synthetic-access")
public void execute(Node node) public void execute(Node node)
{ {
if (this.parameters.isModified()) if (this.parameters != null && this.parameters.isModified())
{ {
Map<String, Serializable> actionParams = action.getParameterValues(); Map<String, Serializable> actionParams = action.getParameterValues();
actionParams.clear(); actionParams.clear();

View File

@ -25,10 +25,12 @@ import org.alfresco.repo.domain.Node;
import org.alfresco.repo.domain.NodeAssoc; import org.alfresco.repo.domain.NodeAssoc;
import org.alfresco.repo.domain.NodeStatus; import org.alfresco.repo.domain.NodeStatus;
import org.alfresco.repo.domain.Store; import org.alfresco.repo.domain.Store;
import org.alfresco.repo.domain.Transaction;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition; import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.InvalidTypeException; import org.alfresco.service.cmr.dictionary.InvalidTypeException;
import org.alfresco.service.cmr.repository.ChildAssociationRef; import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.namespace.QName; import org.alfresco.service.namespace.QName;
/** /**
@ -233,4 +235,12 @@ public interface NodeDaoService
* @return Returns the values for the given type definition * @return Returns the values for the given type definition
*/ */
public List<Serializable> getPropertyValuesByActualType(DataTypeDefinition actualDataTypeDefinition); public List<Serializable> getPropertyValuesByActualType(DataTypeDefinition actualDataTypeDefinition);
public Transaction getLastTxn(final StoreRef storeRef);
public int getTxnUpdateCountForStore(final StoreRef storeRef, final long txnId);
public int getTxnDeleteCountForStore(final StoreRef storeRef, final long txnId);
public int getTransactionCount();
public List<Transaction> getNextTxns(final Transaction lastTxn, final int count);
public List<NodeRef> getTxnChangesForStore(final StoreRef storeRef, final long txnId);
public List<NodeRef> getTxnChanges(final long txnId);
} }

View File

@ -990,4 +990,176 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
} }
return convertedValues; return convertedValues;
} }
/*
* Queries for transactions
*/
private static final String QUERY_GET_LAST_TXN_ID_FOR_STORE = "txn.GetLastTxnIdForStore";
private static final String QUERY_GET_TXN_UPDATE_COUNT_FOR_STORE = "txn.GetTxnUpdateCountForStore";
private static final String QUERY_GET_TXN_DELETE_COUNT_FOR_STORE = "txn.GetTxnDeleteCountForStore";
private static final String QUERY_COUNT_TRANSACTIONS = "txn.CountTransactions";
private static final String QUERY_GET_NEXT_TXNS = "txn.GetNextTxns";
private static final String QUERY_GET_TXN_CHANGES_FOR_STORE = "txn.GetTxnChangesForStore";
private static final String QUERY_GET_TXN_CHANGES = "txn.GetTxnChanges";
@SuppressWarnings("unchecked")
public Transaction getLastTxn(final StoreRef storeRef)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_LAST_TXN_ID_FOR_STORE);
query.setString("protocol", storeRef.getProtocol())
.setString("identifier", storeRef.getIdentifier())
.setMaxResults(1)
.setReadOnly(true);
return query.uniqueResult();
}
};
Long txnId = (Long) getHibernateTemplate().execute(callback);
Transaction txn = null;
if (txnId != null)
{
txn = (Transaction) getSession().get(TransactionImpl.class, txnId);
}
// done
return txn;
}
@SuppressWarnings("unchecked")
public int getTxnUpdateCountForStore(final StoreRef storeRef, final long txnId)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_TXN_UPDATE_COUNT_FOR_STORE);
query.setLong("txnId", txnId)
.setString("protocol", storeRef.getProtocol())
.setString("identifier", storeRef.getIdentifier())
.setMaxResults(1)
.setReadOnly(true);
return query.uniqueResult();
}
};
Integer count = (Integer) getHibernateTemplate().execute(callback);
// done
return count;
}
@SuppressWarnings("unchecked")
public int getTxnDeleteCountForStore(final StoreRef storeRef, final long txnId)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_TXN_DELETE_COUNT_FOR_STORE);
query.setLong("txnId", txnId)
.setString("protocol", storeRef.getProtocol())
.setString("identifier", storeRef.getIdentifier())
.setMaxResults(1)
.setReadOnly(true);
return query.uniqueResult();
}
};
Integer count = (Integer) getHibernateTemplate().execute(callback);
// done
return count;
}
@SuppressWarnings("unchecked")
public int getTransactionCount()
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_COUNT_TRANSACTIONS);
query.setMaxResults(1)
.setReadOnly(true);
return query.uniqueResult();
}
};
Integer count = (Integer) getHibernateTemplate().execute(callback);
// done
return count.intValue();
}
@SuppressWarnings("unchecked")
public List<Transaction> getNextTxns(final Transaction lastTxn, final int count)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
long lastTxnId = (lastTxn == null) ? -1L : lastTxn.getId();
Query query = session.getNamedQuery(QUERY_GET_NEXT_TXNS);
query.setLong("lastTxnId", lastTxnId)
.setMaxResults(count)
.setReadOnly(true);
return query.list();
}
};
List<Transaction> results = (List<Transaction>) getHibernateTemplate().execute(callback);
// done
return results;
}
@SuppressWarnings("unchecked")
public List<NodeRef> getTxnChangesForStore(final StoreRef storeRef, final long txnId)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_TXN_CHANGES_FOR_STORE);
query.setLong("txnId", txnId)
.setString("protocol", storeRef.getProtocol())
.setString("identifier", storeRef.getIdentifier())
.setReadOnly(true);
return query.list();
}
};
List<NodeStatus> results = (List<NodeStatus>) getHibernateTemplate().execute(callback);
// transform into a simpler form
List<NodeRef> nodeRefs = new ArrayList<NodeRef>(results.size());
for (NodeStatus nodeStatus : results)
{
NodeRef nodeRef = new NodeRef(storeRef, nodeStatus.getKey().getGuid());
nodeRefs.add(nodeRef);
}
// done
return nodeRefs;
}
@SuppressWarnings("unchecked")
public List<NodeRef> getTxnChanges(final long txnId)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_TXN_CHANGES);
query.setLong("txnId", txnId)
.setReadOnly(true);
return query.list();
}
};
List<NodeStatus> results = (List<NodeStatus>) getHibernateTemplate().execute(callback);
// transform into a simpler form
List<NodeRef> nodeRefs = new ArrayList<NodeRef>(results.size());
for (NodeStatus nodeStatus : results)
{
NodeRef nodeRef = new NodeRef(
nodeStatus.getKey().getProtocol(),
nodeStatus.getKey().getIdentifier(),
nodeStatus.getKey().getGuid());
nodeRefs.add(nodeRef);
}
// done
return nodeRefs;
}
} }

View File

@ -0,0 +1,227 @@
/*
* Copyright (C) 2005-2006 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.node.index;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import net.sf.acegisecurity.Authentication;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.transaction.TransactionComponent;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.util.PropertyCheck;
import org.alfresco.util.VmShutdownListener;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Abstract helper for reindexing.
*
* @see #reindexImpl()
* @see #getIndexerWriteLock()
* @see #isShuttingDown()
*
* @author Derek Hulley
*/
public abstract class AbstractReindexComponent implements IndexRecovery
{
private static Log logger = LogFactory.getLog(AbstractReindexComponent.class);
/** kept to notify the thread that it should quit */
private static VmShutdownListener vmShutdownListener = new VmShutdownListener("MissingContentReindexComponent");
private AuthenticationComponent authenticationComponent;
/** provides transactions to atomically index each missed transaction */
protected TransactionComponent transactionService;
/** the component to index the node hierarchy */
protected Indexer indexer;
/** the FTS indexer that we will prompt to pick up on any un-indexed text */
protected FullTextSearchIndexer ftsIndexer;
/** the component providing searches of the indexed nodes */
protected SearchService searcher;
/** the component giving direct access to <b>store</b> instances */
protected NodeService nodeService;
/** the component giving direct access to <b>transaction</b> instances */
protected NodeDaoService nodeDaoService;
private boolean shutdown;
private final WriteLock indexerWriteLock;
public AbstractReindexComponent()
{
shutdown = false;
ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock();
indexerWriteLock = readWriteLock.writeLock();
}
/**
* Convenience method to get a common write lock. This can be used to avoid
* concurrent access to the work methods.
*/
protected WriteLock getIndexerWriteLock()
{
return indexerWriteLock;
}
/**
* Programmatically notify a reindex thread to terminate
*
* @param shutdown true to shutdown, false to reset
*/
public void setShutdown(boolean shutdown)
{
this.shutdown = shutdown;
}
/**
*
* @return Returns true if the VM shutdown hook has been triggered, or the instance
* was programmatically {@link #shutdown shut down}
*/
protected boolean isShuttingDown()
{
return shutdown || vmShutdownListener.isVmShuttingDown();
}
/**
* @param authenticationComponent ensures that reindexing operates as system user
*/
public void setAuthenticationComponent(AuthenticationComponent authenticationComponent)
{
this.authenticationComponent = authenticationComponent;
}
/**
* Set the low-level transaction component to use
*
* @param transactionComponent provide transactions to index each missed transaction
*/
public void setTransactionComponent(TransactionComponent transactionComponent)
{
this.transactionService = transactionComponent;
}
/**
* @param indexer the indexer that will be index
*/
public void setIndexer(Indexer indexer)
{
this.indexer = indexer;
}
/**
* @param ftsIndexer the FTS background indexer
*/
public void setFtsIndexer(FullTextSearchIndexer ftsIndexer)
{
this.ftsIndexer = ftsIndexer;
}
/**
* @param searcher component providing index searches
*/
public void setSearcher(SearchService searcher)
{
this.searcher = searcher;
}
/**
* @param nodeService provides information about nodes for indexing
*/
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
/**
* @param nodeDaoService provides access to transaction-related queries
*/
public void setNodeDaoService(NodeDaoService nodeDaoService)
{
this.nodeDaoService = nodeDaoService;
}
/**
* Perform the actual work. This method will be called as the system user
* and within an existing transaction. This thread will only ever be accessed
* by a single thread per instance.
*
*/
protected abstract void reindexImpl();
/**
* If this object is currently busy, then it just nothing
*/
public final void reindex()
{
PropertyCheck.mandatory(this, "authenticationComponent", this.authenticationComponent);
PropertyCheck.mandatory(this, "ftsIndexer", this.ftsIndexer);
PropertyCheck.mandatory(this, "indexer", this.indexer);
PropertyCheck.mandatory(this, "searcher", this.searcher);
PropertyCheck.mandatory(this, "nodeService", this.nodeService);
PropertyCheck.mandatory(this, "nodeDaoService", this.nodeDaoService);
PropertyCheck.mandatory(this, "transactionComponent", this.transactionService);
if (indexerWriteLock.tryLock())
{
Authentication auth = null;
try
{
auth = AuthenticationUtil.getCurrentAuthentication();
// authenticate as the system user
authenticationComponent.setSystemUserAsCurrentUser();
TransactionWork<Object> reindexWork = new TransactionWork<Object>()
{
public Object doWork() throws Exception
{
reindexImpl();
return null;
}
};
TransactionUtil.executeInUserTransaction(transactionService, reindexWork);
}
finally
{
try { indexerWriteLock.unlock(); } catch (Throwable e) {}
if (auth != null)
{
authenticationComponent.setCurrentAuthentication(auth);
}
}
// done
if (logger.isDebugEnabled())
{
logger.debug("Reindex work completed: " + this);
}
}
else
{
if (logger.isDebugEnabled())
{
logger.debug("Bypassed reindex work - already busy: " + this);
}
}
}
}

View File

@ -1,134 +0,0 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.node.index;
import java.util.ArrayList;
import java.util.List;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.transaction.TransactionService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Ensures that the FTS indexing picks up on any outstanding documents that
* require indexing.
* <p>
* FTS indexing is a background process. It is therefore possible that
* certain documents don't get indexed when the server shuts down.
*
* @author Derek Hulley
*/
public class FtsIndexRecoveryComponent implements IndexRecovery
{
private static Log logger = LogFactory.getLog(FtsIndexRecoveryComponent.class);
/** provides transactions to atomically index each missed transaction */
private TransactionService transactionService;
/** the FTS indexer that we will prompt to pick up on any un-indexed text */
private FullTextSearchIndexer ftsIndexer;
/** the component giving direct access to <b>node</b> instances */
private NodeService nodeService;
/** the workspaces to reindex */
private List<StoreRef> storeRefs;
public FtsIndexRecoveryComponent()
{
this.storeRefs = new ArrayList<StoreRef>(2);
}
/**
* @param transactionService provide transactions to index each missed transaction
*/
public void setTransactionService(TransactionService transactionService)
{
this.transactionService = transactionService;
}
/**
* @param ftsIndexer the FTS background indexer
*/
public void setFtsIndexer(FullTextSearchIndexer ftsIndexer)
{
this.ftsIndexer = ftsIndexer;
}
/**
* @param nodeService provides information about nodes for indexing
*/
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
/**
* Set the workspaces that need reindexing
*
* @param storeRefStrings a list of strings representing store references
*/
public void setStores(List<String> storeRefStrings)
{
storeRefs.clear();
for (String storeRefStr : storeRefStrings)
{
StoreRef storeRef = new StoreRef(storeRefStr);
storeRefs.add(storeRef);
}
}
/**
* Ensures that the FTS indexing is activated for any outstanding full text searches.
*/
public void reindex()
{
TransactionWork<Object> reindexWork = new TransactionWork<Object>()
{
public Object doWork()
{
// reindex each store
for (StoreRef storeRef : storeRefs)
{
// check if the store exists
if (!nodeService.exists(storeRef))
{
// store does not exist
if (logger.isDebugEnabled())
{
logger.debug("Skipping reindex of non-existent store: " + storeRef);
}
continue;
}
// prompt FTS to reindex the store
ftsIndexer.requiresIndex(storeRef);
}
// done
return null;
}
};
TransactionUtil.executeInUserTransaction(transactionService, reindexWork);
// done
if (logger.isDebugEnabled())
{
logger.debug("Prompted FTS index on stores: " + storeRefs);
}
}
}

View File

@ -1,60 +0,0 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.node.index;
import junit.framework.TestCase;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper;
import org.springframework.context.ApplicationContext;
/**
* Checks that the FTS index recovery component is working
*
* @author Derek Hulley
*/
public class FtsIndexRecoveryComponentTest extends TestCase
{
private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
private IndexRecovery indexRecoverer;
private TransactionService txnService;
public void setUp() throws Exception
{
indexRecoverer = (IndexRecovery) ctx.getBean("indexRecoveryComponent");
txnService = (TransactionService) ctx.getBean("transactionComponent");
}
public void testReindexing() throws Exception
{
// performs a reindex
TransactionWork<Object> reindexWork = new TransactionWork<Object>()
{
public Object doWork()
{
indexRecoverer.reindex();
return null;
}
};
// reindex
TransactionUtil.executeInNonPropagatingUserTransaction(txnService, reindexWork);
}
}

View File

@ -16,660 +16,365 @@
*/ */
package org.alfresco.repo.node.index; package org.alfresco.repo.node.index;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.i18n.I18NUtil;
import org.alfresco.model.ContentModel; import org.alfresco.model.ContentModel;
import org.alfresco.repo.domain.NodeStatus; import org.alfresco.repo.domain.Transaction;
import org.alfresco.repo.search.Indexer; import org.alfresco.repo.search.impl.lucene.LuceneQueryParser;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerImpl;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.transaction.TransactionUtil; import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork; import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.cmr.repository.ChildAssociationRef; import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef; import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.repository.NodeRef.Status;
import org.alfresco.service.cmr.search.ResultSet; import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.SearchParameters; import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.cmr.search.SearchService; import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.transaction.TransactionService;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.hibernate.CacheMode;
import org.hibernate.Query;
import org.hibernate.Session;
import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
/** /**
* Ensures that the FTS indexing picks up on any outstanding documents that * Component to check and recover the indexes.
* require indexing.
* <p>
* This component must be used as a singleton (one per VM) and may only be
* called to reindex once. It will start a thread that processes all available
* transactions and keeps checking to ensure that the index is up to date with
* the latest database changes.
* <p>
* <b>The following points are important:</b>
* <ul>
* <li>
* By default, the Hibernate L2 cache is used during processing.
* This can be disabled by either disabling the L2 cache globally
* for the server (not recommended) or by setting the
* {@link #setL2CacheMode(String) l2CacheMode} property. If the
* database is static then the L2 cache usage can be set to use
* the <code>NORMAL</code> mode. <code>REFRESH</code> should be
* used where the server will still be accessed from some clients
* despite the database changing. <code>NORMAL</code> can be used
* in the case of the caches being clustered, i.e. the caches will
* not be out of date w.r.t. the database.
* </li>
* <li>
* This process should only be used continuously where the index
* transactions are following the database transactions. Use the
* {@link #setRunContinuously(boolean) runContinuously} property
* to change this behaviour.
* </li>
* </ul>
* *
* @author Derek Hulley * @author Derek Hulley
*/ */
public class FullIndexRecoveryComponent extends HibernateDaoSupport implements IndexRecovery public class FullIndexRecoveryComponent extends AbstractReindexComponent
{ {
public static final String QUERY_GET_NEXT_CHANGE_TXN_IDS = "node.GetNextChangeTxnIds"; private static final String ERR_STORE_NOT_UP_TO_DATE = "index.recovery.store_not_up_to_date";
public static final String QUERY_GET_CHANGED_NODE_STATUSES = "node.GetChangedNodeStatuses"; private static final String MSG_RECOVERY_STARTING = "index.recovery.starting";
public static final String QUERY_GET_DELETED_NODE_STATUSES = "node.GetDeletedNodeStatuses"; private static final String MSG_RECOVERY_COMPLETE = "index.recovery.complete";
public static final String QUERY_GET_CHANGED_NODE_STATUSES_COUNT = "node.GetChangedNodeStatusesCount"; private static final String MSG_RECOVERY_PROGRESS = "index.recovery.progress";
private static final String MSG_RECOVERY_TERMINATED = "index.recovery.terminated";
private static final String START_TXN_ID = "000";
private static Log logger = LogFactory.getLog(FullIndexRecoveryComponent.class); private static Log logger = LogFactory.getLog(FullIndexRecoveryComponent.class);
/** ensures that this process is kicked off once per VM */ public static enum RecoveryMode
private static boolean started = false;
/** The current transaction ID being processed */
private static String currentTxnId = START_TXN_ID;
/** kept to notify the thread that it should quit */
private boolean killThread = false;
/** provides transactions to atomically index each missed transaction */
private TransactionService transactionService;
/** the component to index the node hierarchy */
private Indexer indexer;
/** the FTS indexer that we will prompt to pick up on any un-indexed text */
private FullTextSearchIndexer ftsIndexer;
/** the component providing searches of the indexed nodes */
private SearchService searcher;
/** the component giving direct access to <b>node</b> instances */
private NodeService nodeService;
/** set this to run the index recovery component */
private boolean executeFullRecovery;
/** set this on to keep checking for new transactions and never stop */
private boolean runContinuously;
/** set the time to wait between checking indexes */
private long waitTime;
/** controls how the L2 cache is used */
private CacheMode l2CacheMode;
/**
* @return Returns the ID of the current (or last) transaction processed
*/
public static String getCurrentTransactionId()
{ {
return currentTxnId; /** Do nothing - not even a check */
NONE,
/** Perform a quick check on the state of the indexes only */
VALIDATE,
/** Performs a quick validation and then starts a full pass-through on failure */
AUTO,
/** Performs a full pass-through of all recorded transactions to ensure that the indexes are up to date */
FULL;
} }
private RecoveryMode recoveryMode;
public FullIndexRecoveryComponent() public FullIndexRecoveryComponent()
{ {
this.killThread = false; recoveryMode = RecoveryMode.VALIDATE;
this.executeFullRecovery = false; }
this.runContinuously = false;
this.waitTime = 1000L;
this.l2CacheMode = CacheMode.REFRESH;
// ensure that we kill the thread when the VM is shutting down /**
Runnable shutdownRunnable = new Runnable() * Set the type of recovery to perform.
*
* @param recoveryMode one of the {@link RecoveryMode } values
*/
public void setRecoveryMode(String recoveryMode)
{
this.recoveryMode = RecoveryMode.valueOf(recoveryMode);
}
@Override
protected void reindexImpl()
{
if (logger.isDebugEnabled())
{ {
public void run() logger.debug("Performing index recovery for type: " + recoveryMode);
}
// do we just ignore
if (recoveryMode == RecoveryMode.NONE)
{
return;
}
// check the level of cover required
boolean fullRecoveryRequired = false;
if (recoveryMode == RecoveryMode.FULL) // no validate required
{
fullRecoveryRequired = true;
}
else // validate first
{
List<StoreRef> storeRefs = nodeService.getStores();
for (StoreRef storeRef : storeRefs)
{ {
killThread = true; // get the last txn ID in the database
}; Transaction txn = nodeDaoService.getLastTxn(storeRef);
}; boolean lastChangeTxnIdInIndex = isTxnIdPresentInIndex(storeRef, txn);
Thread shutdownThread = new Thread(shutdownRunnable); if (lastChangeTxnIdInIndex)
Runtime.getRuntime().addShutdownHook(shutdownThread);
}
/**
* @return Returns true if the component has already been started
*/
public static boolean isStarted()
{
return started;
}
/**
* @param transactionService provide transactions to index each missed transaction
*/
public void setTransactionService(TransactionService transactionService)
{
this.transactionService = transactionService;
}
/**
* @param indexer the indexer that will be index
*/
public void setIndexer(Indexer indexer)
{
this.indexer = indexer;
}
/**
* @param ftsIndexer the FTS background indexer
*/
public void setFtsIndexer(FullTextSearchIndexer ftsIndexer)
{
this.ftsIndexer = ftsIndexer;
}
/**
* @param searcher component providing index searches
*/
public void setSearcher(SearchService searcher)
{
this.searcher = searcher;
}
/**
* @param nodeService provides information about nodes for indexing
*/
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
/**
* Set this to <code>true</code> to initiate the full index recovery.
* <p>
* This used to default to <code>true</code> but is now false. Set this
* if the potentially long-running process of checking and fixing the
* indexes must be started.
*
* @param executeFullRecovery
*/
public void setExecuteFullRecovery(boolean executeFullRecovery)
{
this.executeFullRecovery = executeFullRecovery;
}
/**
* Set this to ensure that the process continuously checks for new transactions.
* If not, it will permanently terminate once it catches up with the current
* transactions.
*
* @param runContinuously true to never cease looking for new transactions
*/
public void setRunContinuously(boolean runContinuously)
{
this.runContinuously = runContinuously;
}
/**
* Set the time to wait between checking for new transaction changes in the database.
*
* @param waitTime the time to wait in milliseconds
*/
public void setWaitTime(long waitTime)
{
this.waitTime = waitTime;
}
/**
* Set the hibernate cache mode by name
*
* @see org.hibernate.CacheMode
*/
public void setL2CacheMode(String l2CacheModeStr)
{
if (l2CacheModeStr.equals("GET"))
{
l2CacheMode = CacheMode.GET;
}
else if (l2CacheModeStr.equals("IGNORE"))
{
l2CacheMode = CacheMode.IGNORE;
}
else if (l2CacheModeStr.equals("NORMAL"))
{
l2CacheMode = CacheMode.NORMAL;
}
else if (l2CacheModeStr.equals("PUT"))
{
l2CacheMode = CacheMode.PUT;
}
else if (l2CacheModeStr.equals("REFRESH"))
{
l2CacheMode = CacheMode.REFRESH;
}
else
{
throw new IllegalArgumentException("Unrecognised Hibernate L2 cache mode: " + l2CacheModeStr);
}
}
/**
* Ensure that the index is up to date with the current state of the persistence layer.
* The full list of unique transaction change IDs is retrieved and used to detect
* which are not present in the index. All the node changes and deletions for the
* remaining transactions are then indexed.
*/
public synchronized void reindex()
{
if (FullIndexRecoveryComponent.started)
{
throw new AlfrescoRuntimeException
("Only one FullIndexRecoveryComponent may be used per VM and it may only be called once");
}
// ensure that we don't redo this work
FullIndexRecoveryComponent.started = true;
// work to mark the stores for full text reindexing
TransactionWork<Object> ftsReindexWork = new TransactionWork<Object>()
{
public Object doWork()
{
List<StoreRef> storeRefs = nodeService.getStores();
// reindex each store
for (StoreRef storeRef : storeRefs)
{ {
// check if the store exists // this store is good
if (!nodeService.exists(storeRef)) continue;
{
// store does not exist
if (logger.isDebugEnabled())
{
logger.debug("Skipping reindex of non-existent store: " + storeRef);
}
continue;
}
// prompt FTS to reindex the store
ftsIndexer.requiresIndex(storeRef);
} }
// done // this store isn't up to date
if (logger.isDebugEnabled()) String msg = I18NUtil.getMessage(ERR_STORE_NOT_UP_TO_DATE, storeRef);
logger.warn(msg);
// the store is out of date - validation failed
if (recoveryMode == RecoveryMode.VALIDATE)
{ {
logger.debug("Prompted FTS index on stores: " + storeRefs); // next store
continue;
} }
return null; else if (recoveryMode == RecoveryMode.AUTO)
}
};
TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, ftsReindexWork);
// start full index recovery, if necessary
if (!this.executeFullRecovery)
{
if (logger.isDebugEnabled())
{
logger.debug("Full index recovery is off - quitting");
}
}
else
{
// set the state of the reindex
FullIndexRecoveryComponent.currentTxnId = START_TXN_ID;
// start a stateful thread that will begin processing the reindexing the transactions
Runnable runnable = new ReindexRunner();
Thread reindexThread = new Thread(runnable);
// make it a daemon thread
reindexThread.setDaemon(true);
// it should not be a high priority
reindexThread.setPriority(Thread.MIN_PRIORITY);
// start it
reindexThread.start();
if (logger.isDebugEnabled())
{
logger.debug("Full index recovery thread started: \n" +
" continuous: " + runContinuously);
}
}
}
/**
* Stateful thread runnable that executes reindex calls.
*
* @see FullIndexRecoveryComponent#reindexNodes()
*
* @author Derek Hulley
*/
private class ReindexRunner implements Runnable
{
public void run()
{
// keep this thread going permanently
while (!killThread)
{
try
{ {
// reindex nodes fullRecoveryRequired = true;
List<String> txnsIndexed = FullIndexRecoveryComponent.this.reindexNodes();
// reindex missing content
// @SuppressWarnings("unused")
// int missingContentCount = FullIndexRecoveryComponent.this.reindexMissingContent();
// check if the process should terminate
if (txnsIndexed.size() == 0 && !runContinuously)
{
// the thread has caught up with all the available work and should not
// run continuously
if (logger.isDebugEnabled())
{
logger.debug("Thread quitting - no more available indexing to do: \n" +
" last txn: " + FullIndexRecoveryComponent.getCurrentTransactionId());
}
break;
}
// brief pause
synchronized(FullIndexRecoveryComponent.this)
{
FullIndexRecoveryComponent.this.wait(waitTime);
}
}
catch (InterruptedException e)
{
// ignore
}
catch (Throwable e)
{
if (killThread)
{
// the shutdown may have caused the exception - ignore it
}
else
{
// we are still a go; report it
logger.error("Reindex failure", e);
}
} }
} }
} }
}
/** // put the server into read-only mode for the duration
* @return Returns the transaction ID just reindexed, i.e. where some work was performed boolean allowWrite = !transactionService.isReadOnly();
*/ try
private List<String> reindexNodes()
{
// get a list of all transactions still requiring a check
List<String> txnsToCheck = getNextChangeTxnIds(FullIndexRecoveryComponent.currentTxnId);
// loop over each transaction
for (String changeTxnId : txnsToCheck)
{ {
reindexNodes(changeTxnId); // set the server into read-only mode
transactionService.setAllowWrite(false);
// do we need to perform a full recovery
if (fullRecoveryRequired)
{
performFullRecovery();
}
}
finally
{
// restore read-only state
transactionService.setAllowWrite(allowWrite);
} }
}
private static final int MAX_TRANSACTIONS_PER_ITERATION = 1000;
private void performFullRecovery()
{
int txnCount = nodeDaoService.getTransactionCount();
// starting
String msgStart = I18NUtil.getMessage(MSG_RECOVERY_STARTING, txnCount);
logger.info(msgStart);
// count the transactions
int processedCount = 0;
Transaction lastTxn = null;
while(true)
{
List<Transaction> nextTxns = nodeDaoService.getNextTxns(
lastTxn,
MAX_TRANSACTIONS_PER_ITERATION);
// reindex each transaction
for (Transaction txn : nextTxns)
{
Long txnId = txn.getId();
// check if we have to terminate
if (isShuttingDown())
{
String msgTerminated = I18NUtil.getMessage(MSG_RECOVERY_TERMINATED);
logger.warn(msgTerminated);
return;
}
reindexTransaction(txnId);
// dump a progress report every 10% of the way
double before = (double) processedCount / (double) txnCount * 10.0; // 0 - 10
processedCount++;
double after = (double) processedCount / (double) txnCount * 10.0; // 0 - 10
if (Math.floor(before) < Math.floor(after)) // crossed a 0 - 10 integer boundary
{
int complete = ((int)Math.floor(after))*10;
String msgProgress = I18NUtil.getMessage(MSG_RECOVERY_PROGRESS, complete);
logger.info(msgProgress);
}
}
// have we finished?
if (nextTxns.size() == 0)
{
// there are no more
break;
}
lastTxn = nextTxns.get(nextTxns.size() - 1);
}
// done // done
return txnsToCheck; String msgDone = I18NUtil.getMessage(MSG_RECOVERY_COMPLETE);
logger.info(msgDone);
} }
/** /**
* Reindexes changes specific to the change transaction ID. * Perform a full reindexing of the given transaction in the context of a completely
* <p> * new transaction.
* <b>All exceptions are absorbed.</b> *
* @param txnId the transaction identifier
*/ */
private void reindexNodes(final String changeTxnId) public void reindexTransaction(final long txnId)
{ {
/* if (logger.isDebugEnabled())
* This must execute each within its own transaction. {
* The cache size is therefore not an issue. logger.debug("Reindexing transaction: " + txnId);
*/ }
TransactionWork<Object> reindexWork = new TransactionWork<Object>() TransactionWork<Object> reindexWork = new TransactionWork<Object>()
{ {
public Object doWork() throws Exception public Object doWork() throws Exception
{ {
// perform the work in a Hibernate callback // get the node references pertinent to the transaction
HibernateCallback callback = new ReindexCallback(changeTxnId); List<NodeRef> nodeRefs = nodeDaoService.getTxnChanges(txnId);
getHibernateTemplate().execute(callback); // reindex each node
for (NodeRef nodeRef : nodeRefs)
{
Status nodeStatus = nodeService.getNodeStatus(nodeRef);
if (nodeStatus == null)
{
// it's not there any more
continue;
}
if (nodeStatus.isDeleted()) // node deleted
{
// only the child node ref is relevant
ChildAssociationRef assocRef = new ChildAssociationRef(
ContentModel.ASSOC_CHILDREN,
null,
null,
nodeRef);
indexer.deleteNode(assocRef);
}
else // node created
{
// get the primary assoc for the node
ChildAssociationRef primaryAssocRef = nodeService.getPrimaryParent(nodeRef);
// reindex
indexer.createNode(primaryAssocRef);
}
}
// done // done
return null; return null;
} }
}; };
try TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, reindexWork, true);
{ // done
TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, reindexWork);
}
catch (Throwable e)
{
logger.error("Transaction reindex failed: \n" +
" txn: " + changeTxnId,
e);
}
finally
{
// Up the current transaction now, in case the process fails at this point.
// This will prevent the transaction from being processed again.
// This applies to failures as well, which should be dealt with externally
// and having the entire process start again, e.g. such as a system reboot
currentTxnId = changeTxnId;
}
} }
/** private boolean isTxnIdPresentInIndex(StoreRef storeRef, Transaction txn)
* Stateful inner class that implements a single reindex call for a given store
* and transaction.
* <p>
* It must be called within its own transaction.
*
* @author Derek Hulley
*/
private class ReindexCallback implements HibernateCallback
{ {
private final String changeTxnId; if (logger.isDebugEnabled())
public ReindexCallback(String changeTxnId)
{ {
this.changeTxnId = changeTxnId; logger.debug("Checking for transaction in index: \n" +
" store: " + storeRef + "\n" +
" txn: " + txn);
} }
/** String changeTxnId = txn.getChangeTxnId();
* Changes the L2 cache usage before reindexing for each store // count the changes in the transaction
* int updateCount = nodeDaoService.getTxnUpdateCountForStore(storeRef, txn.getId());
* @see #reindexNodes(StoreRef, String) int deleteCount = nodeDaoService.getTxnDeleteCountForStore(storeRef, txn.getId());
*/ if (logger.isDebugEnabled())
public Object doInHibernate(Session session)
{ {
// set the way the L2 cache is used logger.debug("Transaction has " + updateCount + " updates and " + deleteCount + " deletes: " + txn);
getSession().setCacheMode(l2CacheMode);
// reindex each store
// for (StoreRef storeRef : storeRefs)
// {
// if (!nodeService.exists(storeRef))
// {
// // the store is not present
// continue;
// }
// // reindex for store
// reindexNodes(storeRef, changeTxnId);
// }
// done
return null;
} }
private void reindexNodes(StoreRef storeRef, String changeTxnId) // do the most update check, which is most common
if (deleteCount == 0 && updateCount == 0)
{
if (logger.isDebugEnabled())
{
logger.debug("No changes in transaction: " + txn);
}
// there's nothing to check for
return true;
}
else if (updateCount > 0)
{ {
// check if we need to perform this operation
SearchParameters sp = new SearchParameters();
sp.addStore(storeRef);
// search for it in the index
String query = "TX:\"" + changeTxnId + "\"";
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery(query);
ResultSet results = null; ResultSet results = null;
try try
{ {
SearchParameters sp = new SearchParameters();
sp.addStore(storeRef);
// search for it in the index, sorting with youngest first, fetching only 1
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("TX:" + LuceneQueryParser.escape(changeTxnId));
sp.setLimit(1);
results = searcher.query(sp); results = searcher.query(sp);
// did the index have any of these changes?
if (results.length() > 0) if (results.length() > 0)
{ {
// the transaction has an entry in the index - assume that it was
// atomically correct
if (logger.isDebugEnabled()) if (logger.isDebugEnabled())
{ {
logger.debug("Transaction present in index - no indexing required: \n" + logger.debug("Index has results for txn (OK): " + txn);
" store: " + storeRef + "\n" +
" txn: " + changeTxnId);
} }
return; return true; // there were updates/creates and results for the txn were found
}
else
{
if (logger.isDebugEnabled())
{
logger.debug("Index has no results for txn (Index out of date): " + txn);
}
return false;
} }
} }
finally finally
{ {
if (results != null) if (results != null) { results.close(); }
{
results.close();
}
}
// the index has no record of this
// were there any changes, or is it all just deletions?
int changedCount = getChangedNodeStatusesCount(storeRef, changeTxnId);
if (changedCount == 0)
{
// no nodes were changed in the transaction, i.e. they are only deletions
// the index is quite right not to have any entries for the transaction
if (logger.isDebugEnabled())
{
logger.debug("Transaction only has deletions - no indexing required: \n" +
" store: " + storeRef + "\n" +
" txn: " + changeTxnId);
}
return;
}
// process the deletions relevant to the txn and the store
List<NodeStatus> deletedNodeStatuses = getDeletedNodeStatuses(storeRef, changeTxnId);
for (NodeStatus status : deletedNodeStatuses)
{
NodeRef nodeRef = new NodeRef(storeRef, status.getKey().getGuid());
// only the child node ref is relevant
ChildAssociationRef assocRef = new ChildAssociationRef(
ContentModel.ASSOC_CHILDREN,
null,
null,
nodeRef);
indexer.deleteNode(assocRef);
}
// process additions
List<NodeStatus> changedNodeStatuses = getChangedNodeStatuses(storeRef, changeTxnId);
for (NodeStatus status : changedNodeStatuses)
{
NodeRef nodeRef = new NodeRef(storeRef, status.getKey().getGuid());
// get the primary assoc for the node
ChildAssociationRef primaryAssocRef = nodeService.getPrimaryParent(nodeRef);
// reindex
indexer.createNode(primaryAssocRef);
}
// done
if (logger.isDebugEnabled())
{
logger.debug("Transaction reindexed: \n" +
" store: " + storeRef + "\n" +
" txn: " + changeTxnId + "\n" +
" deletions: " + deletedNodeStatuses.size() + "\n" +
" modifications: " + changedNodeStatuses.size());
} }
} }
}; // there have been deletes, so we have to ensure that none of the nodes deleted are present in the index
// get all node refs for the transaction
/** Long txnId = txn.getId();
* Retrieve next 50 transaction IDs that are greater than the given transaction ID. List<NodeRef> nodeRefs = nodeDaoService.getTxnChangesForStore(storeRef, txnId);
* for (NodeRef nodeRef : nodeRefs)
* @param currentTxnId the transaction ID that must be less than all returned results
* @return Returns an ordered list of the next 50 transaction IDs
*/
@SuppressWarnings("unchecked")
public List<String> getNextChangeTxnIds(final String currentTxnId)
{
HibernateCallback callback = new HibernateCallback()
{ {
public Object doInHibernate(Session session) if (logger.isDebugEnabled())
{ {
Query query = session.getNamedQuery(QUERY_GET_NEXT_CHANGE_TXN_IDS); logger.debug("Searching for node in index: \n" +
query.setString("currentTxnId", currentTxnId) " node: " + nodeRef + "\n" +
.setMaxResults(50) " txn: " + txn);
.setReadOnly(true);
return query.list();
} }
}; // we know that these are all deletions
List<String> queryResults = (List<String>) getHibernateTemplate().execute(callback); ResultSet results = null;
// done try
return queryResults; {
} SearchParameters sp = new SearchParameters();
sp.addStore(storeRef);
// search for it in the index, sorting with youngest first, fetching only 1
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("ID:" + LuceneQueryParser.escape(nodeRef.toString()));
sp.setLimit(1);
@SuppressWarnings("unchecked") results = searcher.query(sp);
public int getChangedNodeStatusesCount(final StoreRef storeRef, final String changeTxnId)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_CHANGED_NODE_STATUSES_COUNT);
query.setString("storeProtocol", storeRef.getProtocol())
.setString("storeIdentifier", storeRef.getIdentifier())
.setString("changeTxnId", changeTxnId)
.setReadOnly(true);
return query.uniqueResult();
}
};
Integer changeCount = (Integer) getHibernateTemplate().execute(callback);
// done
return changeCount.intValue();
}
@SuppressWarnings("unchecked") if (results.length() == 0)
public List<NodeStatus> getChangedNodeStatuses(final StoreRef storeRef, final String changeTxnId) {
{ // no results, as expected
HibernateCallback callback = new HibernateCallback() if (logger.isDebugEnabled())
{ {
public Object doInHibernate(Session session) logger.debug(" --> Node not found (OK)");
{ }
Query query = session.getNamedQuery(QUERY_GET_CHANGED_NODE_STATUSES); continue;
query.setString("storeProtocol", storeRef.getProtocol()) }
.setString("storeIdentifier", storeRef.getIdentifier()) else
.setString("changeTxnId", changeTxnId) {
.setReadOnly(true); if (logger.isDebugEnabled())
return query.list(); {
logger.debug(" --> Node found (Index out of date)");
}
return false;
}
} }
}; finally
List<NodeStatus> queryResults = (List) getHibernateTemplate().execute(callback); {
// done if (results != null) { results.close(); }
return queryResults; }
} }
@SuppressWarnings("unchecked") // all tests passed
public List<NodeStatus> getDeletedNodeStatuses(final StoreRef storeRef, final String changeTxnId) if (logger.isDebugEnabled())
{
HibernateCallback callback = new HibernateCallback()
{ {
public Object doInHibernate(Session session) logger.debug("Index is in synch with transaction: " + txn);
{ }
Query query = session.getNamedQuery(QUERY_GET_DELETED_NODE_STATUSES); return true;
query.setString("storeProtocol", storeRef.getProtocol())
.setString("storeIdentifier", storeRef.getIdentifier())
.setString("changeTxnId", changeTxnId)
.setReadOnly(true);
return query.list();
}
};
List<NodeStatus> queryResults = (List) getHibernateTemplate().execute(callback);
// done
return queryResults;
} }
} }

View File

@ -16,24 +16,8 @@
*/ */
package org.alfresco.repo.node.index; package org.alfresco.repo.node.index;
import java.util.ArrayList;
import java.util.List;
import junit.framework.TestCase; import junit.framework.TestCase;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.InvalidStoreRefException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper; import org.alfresco.util.ApplicationContextHelper;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
@ -46,114 +30,35 @@ public class FullIndexRecoveryComponentTest extends TestCase
{ {
private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext(); private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
private TransactionService transactionService;
private FullIndexRecoveryComponent indexRecoverer; private FullIndexRecoveryComponent indexRecoverer;
private NodeService nodeService;
private TransactionService txnService;
private Indexer indexer;
private List<StoreRef> storeRefs;
public void setUp() throws Exception public void setUp() throws Exception
{ {
transactionService = (TransactionService) ctx.getBean("transactionComponent");
indexRecoverer = (FullIndexRecoveryComponent) ctx.getBean("indexRecoveryComponent"); indexRecoverer = (FullIndexRecoveryComponent) ctx.getBean("indexRecoveryComponent");
txnService = (TransactionService) ctx.getBean("transactionComponent");
nodeService = (NodeService) ctx.getBean("nodeService");
indexer = (Indexer) ctx.getBean("indexerComponent");
// create 2 stores
TransactionWork<List<StoreRef>> createStoresWork = new TransactionWork<List<StoreRef>>()
{
public List<StoreRef> doWork() throws Exception
{
List<StoreRef> storeRefs = new ArrayList<StoreRef>(2);
storeRefs.add(nodeService.createStore(StoreRef.PROTOCOL_WORKSPACE, getName() + System.nanoTime()));
storeRefs.add(nodeService.createStore(StoreRef.PROTOCOL_WORKSPACE, getName() + System.nanoTime()));
return storeRefs;
}
};
storeRefs = TransactionUtil.executeInUserTransaction(transactionService, createStoresWork);
} }
public void testNothing() throws Exception public void testSetup() throws Exception
{ {
} }
public void xtestReindexing() throws Exception public synchronized void testReindexing() throws Exception
{ {
// don't do anything if the component has already started indexRecoverer.setRecoveryMode(FullIndexRecoveryComponent.RecoveryMode.FULL.name());
if (FullIndexRecoveryComponent.isStarted())
{
return;
}
// deletes a content node from the index
final List<String> storeRefStrings = new ArrayList<String>(2);
TransactionWork<String> dropNodeIndexWork = new TransactionWork<String>()
{
public String doWork()
{
// create a node in each store and drop it from the index
for (StoreRef storeRef : storeRefs)
{
try
{
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
ChildAssociationRef assocRef = nodeService.createNode(
rootNodeRef,
ContentModel.ASSOC_CONTAINS,
QName.createQName(NamespaceService.ALFRESCO_URI, "unindexedChild" + System.currentTimeMillis()),
ContentModel.TYPE_BASE);
// this will have indexed it, so remove it from the index
indexer.deleteNode(assocRef);
// make the string version of the storeRef
storeRefStrings.add(storeRef.toString());
}
catch (InvalidStoreRefException e)
{
// just ignore stores that are invalid
}
}
return AlfrescoTransactionSupport.getTransactionId();
}
};
// create un-indexed nodes
String txnId = TransactionUtil.executeInNonPropagatingUserTransaction(txnService, dropNodeIndexWork);
indexRecoverer.setExecuteFullRecovery(true);
// indexRecoverer.setStores(storeRefStrings);
// reindex // reindex
indexRecoverer.reindex(); Thread reindexThread = new Thread()
// check that reindexing fails
try
{ {
indexRecoverer.reindex(); public void run()
fail("Reindexer failed to prevent reindex from being called twice");
}
catch (RuntimeException e)
{
// expected
}
// loop for some time, giving it a chance to do its thing
String lastProcessedTxnId = null;
for (int i = 0; i < 60; i++)
{
lastProcessedTxnId = FullIndexRecoveryComponent.getCurrentTransactionId();
if (lastProcessedTxnId.equals(txnId))
{ {
break; indexRecoverer.reindex();
} }
// wait for a second };
synchronized(this) reindexThread.setDaemon(true);
{ reindexThread.start();
this.wait(1000L); // reindexThread.run();
}
} // wait a bit and then terminate
// check that the index was recovered wait(10000);
assertEquals("Index transaction not up to date", txnId, lastProcessedTxnId); indexRecoverer.setShutdown(true);
wait(10000);
} }
} }

View File

@ -1,741 +1,141 @@
///* /*
// * Copyright (C) 2005-2006 Alfresco, Inc. * Copyright (C) 2005-2006 Alfresco, Inc.
// * *
// * Licensed under the Mozilla Public License version 1.1 * Licensed under the Mozilla Public License version 1.1
// * with a permitted attribution clause. You may obtain a * with a permitted attribution clause. You may obtain a
// * copy of the License at * copy of the License at
// * *
// * http://www.alfresco.org/legal/license.txt * http://www.alfresco.org/legal/license.txt
// * *
// * Unless required by applicable law or agreed to in writing, * Unless required by applicable law or agreed to in writing,
// * software distributed under the License is distributed on an * software distributed under the License is distributed on an
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// * either express or implied. See the License for the specific * either express or implied. See the License for the specific
// * language governing permissions and limitations under the * language governing permissions and limitations under the
// * License. * License.
// */ */
//package org.alfresco.repo.node.index; package org.alfresco.repo.node.index;
//
//import java.util.ArrayList; import java.util.List;
//import java.util.List;
// import org.alfresco.repo.search.impl.lucene.LuceneIndexerImpl;
//import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.repo.transaction.TransactionUtil;
//import org.alfresco.model.ContentModel; import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
//import org.alfresco.repo.domain.NodeStatus; import org.alfresco.service.cmr.repository.NodeRef;
//import org.alfresco.repo.search.Indexer; import org.alfresco.service.cmr.repository.StoreRef;
//import org.alfresco.repo.search.impl.lucene.LuceneIndexerImpl; import org.alfresco.service.cmr.search.ResultSet;
//import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer; import org.alfresco.service.cmr.search.ResultSetRow;
//import org.alfresco.repo.transaction.TransactionUtil; import org.alfresco.service.cmr.search.SearchParameters;
//import org.alfresco.repo.transaction.TransactionUtil.TransactionWork; import org.alfresco.service.cmr.search.SearchService;
//import org.alfresco.service.cmr.repository.ChildAssociationRef; import org.apache.commons.logging.Log;
//import org.alfresco.service.cmr.repository.NodeRef; import org.apache.commons.logging.LogFactory;
//import org.alfresco.service.cmr.repository.NodeService;
//import org.alfresco.service.cmr.repository.StoreRef; /**
//import org.alfresco.service.cmr.search.ResultSet; * This component attempts to reindex
//import org.alfresco.service.cmr.search.SearchParameters; *
//import org.alfresco.service.cmr.search.SearchService; * @author Derek Hulley
//import org.alfresco.service.transaction.TransactionService; */
//import org.apache.commons.logging.Log; public class MissingContentReindexComponent extends AbstractReindexComponent
//import org.apache.commons.logging.LogFactory; {
//import org.hibernate.CacheMode; private static Log logger = LogFactory.getLog(MissingContentReindexComponent.class);
//import org.hibernate.Query;
//import org.hibernate.Session; /** keep track of whether the FTS indexer thread has been poked */
//import org.springframework.orm.hibernate3.HibernateCallback; private boolean ftsIndexerCalled;
//import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
// public MissingContentReindexComponent()
///** {
// * Ensures that the FTS indexing picks up on any outstanding documents that ftsIndexerCalled = false;
// * require indexing. }
// * <p>
// * This component must be used as a singleton (one per VM) and may only be /**
// * called to reindex once. It will start a thread that processes all available * If this object is currently busy, then it just nothing
// * transactions and keeps checking to ensure that the index is up to date with */
// * the latest database changes. @Override
// * <p> public void reindexImpl()
// * <b>The following points are important:</b> {
// * <ul> List<StoreRef> storeRefs = nodeService.getStores();
// * <li> int count = 0;
// * By default, the Hibernate L2 cache is used during processing. for (StoreRef storeRef : storeRefs)
// * This can be disabled by either disabling the L2 cache globally {
// * for the server (not recommended) or by setting the // prompt the FTS reindexing
// * {@link #setL2CacheMode(String) l2CacheMode} property. If the if (!ftsIndexerCalled)
// * database is static then the L2 cache usage can be set to use {
// * the <code>NORMAL</code> mode. <code>REFRESH</code> should be ftsIndexer.requiresIndex(storeRef);
// * used where the server will still be accessed from some clients }
// * despite the database changing. <code>NORMAL</code> can be used // reindex missing content
// * in the case of the caches being clustered, i.e. the caches will count += reindexMissingContent(storeRef);
// * not be out of date w.r.t. the database. // check if we have to break out
// * </li> if (isShuttingDown())
// * <li> {
// * This process should only be used continuously where the index break;
// * transactions are following the database transactions. Use the }
// * {@link #setRunContinuously(boolean) runContinuously} property }
// * to change this behaviour.
// * </li> // The FTS indexer only needs to be prompted once
// * </ul> ftsIndexerCalled = true;
// *
// * @author Derek Hulley // done
// */ if (logger.isDebugEnabled())
//public class MissingContentReindexComponent extends HibernateDaoSupport implements IndexRecovery {
//{ logger.debug("Missing content indexing touched " + count + " content nodes");
// public static final String QUERY_GET_NEXT_CHANGE_TXN_IDS = "node.GetNextChangeTxnIds"; }
// public static final String QUERY_GET_CHANGED_NODE_STATUSES = "node.GetChangedNodeStatuses"; }
// public static final String QUERY_GET_DELETED_NODE_STATUSES = "node.GetDeletedNodeStatuses";
// public static final String QUERY_GET_CHANGED_NODE_STATUSES_COUNT = "node.GetChangedNodeStatusesCount"; /**
// * @param storeRef the store to check for missing content
// private static final String START_TXN_ID = "000"; * @return Returns the number of documents reindexed
// */
// private static Log logger = LogFactory.getLog(FullIndexRecoveryComponent.class); private int reindexMissingContent(StoreRef storeRef)
// {
// /** ensures that this process is kicked off once per VM */ SearchParameters sp = new SearchParameters();
// private static boolean started = false; sp.addStore(storeRef);
// /** The current transaction ID being processed */
// private static String currentTxnId = START_TXN_ID; // search for it in the index, sorting with youngest first
// /** kept to notify the thread that it should quite */ sp.setLanguage(SearchService.LANGUAGE_LUCENE);
// private boolean killThread = false; sp.setQuery("TEXT:" + LuceneIndexerImpl.NOT_INDEXED_CONTENT_MISSING);
// sp.addSort(SearchParameters.SORT_IN_DOCUMENT_ORDER_DESCENDING);
// /** provides transactions to atomically index each missed transaction */ ResultSet results = null;
// private TransactionService transactionService; try
// /** the component to index the node hierarchy */ {
// private Indexer indexer; results = searcher.query(sp);
// /** the FTS indexer that we will prompt to pick up on any un-indexed text */
// private FullTextSearchIndexer ftsIndexer; int count = 0;
// /** the component providing searches of the indexed nodes */ // iterate over the nodes and prompt for reindexing
// private SearchService searcher; for (ResultSetRow row : results)
// /** the component giving direct access to <b>node</b> instances */ {
// private NodeService nodeService; final NodeRef childNodeRef = row.getNodeRef();
// /** set this to run the index recovery component */ // prompt for a reindex - it might fail again, but we just keep plugging away
// private boolean executeFullRecovery; TransactionWork<Object> reindexWork = new TransactionWork<Object>()
// /** set this on to keep checking for new transactions and never stop */ {
// private boolean runContinuously; public Object doWork()
// /** set the time to wait between checking indexes */ {
// private long waitTime; indexer.updateNode(childNodeRef);
// /** controls how the L2 cache is used */ return null;
// private CacheMode l2CacheMode; }
// };
// /** TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, reindexWork);
// * @return Returns the ID of the current (or last) transaction processed // check if we have to break out
// */ if (isShuttingDown())
// public static String getCurrentTransactionId() {
// { break;
// return currentTxnId; }
// } }
// // done
// public FullIndexRecoveryComponent() if (logger.isDebugEnabled())
// { {
// this.killThread = false; logger.debug(
// this.executeFullRecovery = false; "Reindexed missing content: \n" +
// this.runContinuously = false; " store: " + storeRef + "\n" +
// this.waitTime = 1000L; " node count: " + count);
// this.l2CacheMode = CacheMode.REFRESH; }
// return count;
// // ensure that we kill the thread when the VM is shutting down }
// Runnable shutdownRunnable = new Runnable() finally
// { {
// public void run() if (results != null)
// { {
// killThread = true; results.close();
// }; }
// }; }
// Thread shutdownThread = new Thread(shutdownRunnable); }
// Runtime.getRuntime().addShutdownHook(shutdownThread); }
// }
//
// /**
// * @return Returns true if the component has already been started
// */
// public static boolean isStarted()
// {
// return started;
// }
//
// /**
// * @param transactionService provide transactions to index each missed transaction
// */
// public void setTransactionService(TransactionService transactionService)
// {
// this.transactionService = transactionService;
// }
//
// /**
// * @param indexer the indexer that will be index
// */
// public void setIndexer(Indexer indexer)
// {
// this.indexer = indexer;
// }
//
// /**
// * @param ftsIndexer the FTS background indexer
// */
// public void setFtsIndexer(FullTextSearchIndexer ftsIndexer)
// {
// this.ftsIndexer = ftsIndexer;
// }
//
// /**
// * @param searcher component providing index searches
// */
// public void setSearcher(SearchService searcher)
// {
// this.searcher = searcher;
// }
//
// /**
// * @param nodeService provides information about nodes for indexing
// */
// public void setNodeService(NodeService nodeService)
// {
// this.nodeService = nodeService;
// }
//
// /**
// * Set this to <code>true</code> to initiate the full index recovery.
// * <p>
// * This used to default to <code>true</code> but is now false. Set this
// * if the potentially long-running process of checking and fixing the
// * indexes must be started.
// *
// * @param executeFullRecovery
// */
// public void setExecuteFullRecovery(boolean executeFullRecovery)
// {
// this.executeFullRecovery = executeFullRecovery;
// }
//
// /**
// * Set this to ensure that the process continuously checks for new transactions.
// * If not, it will permanently terminate once it catches up with the current
// * transactions.
// *
// * @param runContinuously true to never cease looking for new transactions
// */
// public void setRunContinuously(boolean runContinuously)
// {
// this.runContinuously = runContinuously;
// }
//
// /**
// * Set the time to wait between checking for new transaction changes in the database.
// *
// * @param waitTime the time to wait in milliseconds
// */
// public void setWaitTime(long waitTime)
// {
// this.waitTime = waitTime;
// }
//
// /**
// * Set the hibernate cache mode by name
// *
// * @see org.hibernate.CacheMode
// */
// public void setL2CacheMode(String l2CacheModeStr)
// {
// if (l2CacheModeStr.equals("GET"))
// {
// l2CacheMode = CacheMode.GET;
// }
// else if (l2CacheModeStr.equals("IGNORE"))
// {
// l2CacheMode = CacheMode.IGNORE;
// }
// else if (l2CacheModeStr.equals("NORMAL"))
// {
// l2CacheMode = CacheMode.NORMAL;
// }
// else if (l2CacheModeStr.equals("PUT"))
// {
// l2CacheMode = CacheMode.PUT;
// }
// else if (l2CacheModeStr.equals("REFRESH"))
// {
// l2CacheMode = CacheMode.REFRESH;
// }
// else
// {
// throw new IllegalArgumentException("Unrecognised Hibernate L2 cache mode: " + l2CacheModeStr);
// }
// }
//
// /**
// * Ensure that the index is up to date with the current state of the persistence layer.
// * The full list of unique transaction change IDs is retrieved and used to detect
// * which are not present in the index. All the node changes and deletions for the
// * remaining transactions are then indexed.
// */
// public synchronized void reindex()
// {
// if (FullIndexRecoveryComponent.started)
// {
// throw new AlfrescoRuntimeException
// ("Only one FullIndexRecoveryComponent may be used per VM and it may only be called once");
// }
//
// // ensure that we don't redo this work
// FullIndexRecoveryComponent.started = true;
//
// // work to mark the stores for full text reindexing
// TransactionWork<Object> ftsReindexWork = new TransactionWork<Object>()
// {
// public Object doWork()
// {
// List<StoreRef> storeRefs = nodeService.getStores();
// // reindex each store
// for (StoreRef storeRef : storeRefs)
// {
// // check if the store exists
// if (!nodeService.exists(storeRef))
// {
// // store does not exist
// if (logger.isDebugEnabled())
// {
// logger.debug("Skipping reindex of non-existent store: " + storeRef);
// }
// continue;
// }
//
// // prompt FTS to reindex the store
// ftsIndexer.requiresIndex(storeRef);
// }
// // done
// if (logger.isDebugEnabled())
// {
// logger.debug("Prompted FTS index on stores: " + storeRefs);
// }
// return null;
// }
// };
// TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, ftsReindexWork);
//
// // start full index recovery, if necessary
// if (!this.executeFullRecovery)
// {
// if (logger.isDebugEnabled())
// {
// logger.debug("Full index recovery is off - quitting");
// }
// }
// else
// {
// // set the state of the reindex
// FullIndexRecoveryComponent.currentTxnId = START_TXN_ID;
//
// // start a stateful thread that will begin processing the reindexing the transactions
// Runnable runnable = new ReindexRunner();
// Thread reindexThread = new Thread(runnable);
// // make it a daemon thread
// reindexThread.setDaemon(true);
// // it should not be a high priority
// reindexThread.setPriority(Thread.MIN_PRIORITY);
// // start it
// reindexThread.start();
//
// if (logger.isDebugEnabled())
// {
// logger.debug("Full index recovery thread started: \n" +
// " continuous: " + runContinuously);
// }
// }
// }
//
// /**
// * Stateful thread runnable that executes reindex calls.
// *
// * @see FullIndexRecoveryComponent#reindexNodes()
// *
// * @author Derek Hulley
// */
// private class ReindexRunner implements Runnable
// {
// public void run()
// {
// // keep this thread going permanently
// while (!killThread)
// {
// try
// {
// // reindex nodes
// List<String> txnsIndexed = FullIndexRecoveryComponent.this.reindexNodes();
// // reindex missing content
// @SuppressWarnings("unused")
// int missingContentCount = FullIndexRecoveryComponent.this.reindexMissingContent();
// // check if the process should terminate
// if (txnsIndexed.size() == 0 && !runContinuously)
// {
// // the thread has caught up with all the available work and should not
// // run continuously
// if (logger.isDebugEnabled())
// {
// logger.debug("Thread quitting - no more available indexing to do: \n" +
// " last txn: " + FullIndexRecoveryComponent.getCurrentTransactionId());
// }
// break;
// }
// // brief pause
// synchronized(FullIndexRecoveryComponent.this)
// {
// FullIndexRecoveryComponent.this.wait(waitTime);
// }
// }
// catch (InterruptedException e)
// {
// // ignore
// }
// catch (Throwable e)
// {
// if (killThread)
// {
// // the shutdown may have caused the exception - ignore it
// }
// else
// {
// // we are still a go; report it
// logger.error("Reindex failure", e);
// }
// }
// }
// }
// }
//
// /**
// * @return Returns the number of documents reindexed
// */
// private int reindexMissingContent()
// {
// int count = 0;
// for (StoreRef storeRef : storeRefs)
// {
// count += reindexMissingContent(storeRef);
// }
// return count;
// }
//
// /**
// * @param storeRef the store to check for missing content
// * @return Returns the number of documents reindexed
// */
// private int reindexMissingContent(StoreRef storeRef)
// {
// SearchParameters sp = new SearchParameters();
// sp.addStore(storeRef);
//
// // search for it in the index
// String query = "TEXT:" + LuceneIndexerImpl.NOT_INDEXED_CONTENT_MISSING;
// sp.setLanguage(SearchService.LANGUAGE_LUCENE);
// sp.setQuery(query);
// ResultSet results = null;
// try
// {
// results = searcher.query(sp);
//
// int count = 0;
// // loop over the results and get the details of the nodes that have missing content
// List<ChildAssociationRef> assocRefs = results.getChildAssocRefs();
// for (ChildAssociationRef assocRef : assocRefs)
// {
// final NodeRef childNodeRef = assocRef.getChildRef();
// // prompt for a reindex - it might fail again, but we just keep plugging away
// TransactionWork<Object> reindexWork = new TransactionWork<Object>()
// {
// public Object doWork()
// {
// indexer.updateNode(childNodeRef);
// return null;
// }
// };
// TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, reindexWork);
// count++;
// }
// // done
// if (logger.isDebugEnabled())
// {
// logger.debug("Reindexed missing content: \n" +
// " store: " + storeRef + "\n" +
// " node count: " + count);
// }
// return count;
// }
// finally
// {
// if (results != null)
// {
// results.close();
// }
// }
// }
//
// /**
// * @return Returns the transaction ID just reindexed, i.e. where some work was performed
// */
// private List<String> reindexNodes()
// {
// // get a list of all transactions still requiring a check
// List<String> txnsToCheck = getNextChangeTxnIds(FullIndexRecoveryComponent.currentTxnId);
//
// // loop over each transaction
// for (String changeTxnId : txnsToCheck)
// {
// reindexNodes(changeTxnId);
// }
//
// // done
// return txnsToCheck;
// }
//
// /**
// * Reindexes changes specific to the change transaction ID.
// * <p>
// * <b>All exceptions are absorbed.</b>
// */
// private void reindexNodes(final String changeTxnId)
// {
// /*
// * This must execute each within its own transaction.
// * The cache size is therefore not an issue.
// */
// TransactionWork<Object> reindexWork = new TransactionWork<Object>()
// {
// public Object doWork() throws Exception
// {
// // perform the work in a Hibernate callback
// HibernateCallback callback = new ReindexCallback(changeTxnId);
// getHibernateTemplate().execute(callback);
// // done
// return null;
// }
// };
// try
// {
// TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, reindexWork);
// }
// catch (Throwable e)
// {
// logger.error("Transaction reindex failed: \n" +
// " txn: " + changeTxnId,
// e);
// }
// finally
// {
// // Up the current transaction now, in case the process fails at this point.
// // This will prevent the transaction from being processed again.
// // This applies to failures as well, which should be dealt with externally
// // and having the entire process start again, e.g. such as a system reboot
// currentTxnId = changeTxnId;
// }
// }
//
// /**
// * Stateful inner class that implements a single reindex call for a given store
// * and transaction.
// * <p>
// * It must be called within its own transaction.
// *
// * @author Derek Hulley
// */
// private class ReindexCallback implements HibernateCallback
// {
// private final String changeTxnId;
//
// public ReindexCallback(String changeTxnId)
// {
// this.changeTxnId = changeTxnId;
// }
//
// /**
// * Changes the L2 cache usage before reindexing for each store
// *
// * @see #reindexNodes(StoreRef, String)
// */
// public Object doInHibernate(Session session)
// {
// // set the way the L2 cache is used
// getSession().setCacheMode(l2CacheMode);
//
// // reindex each store
// for (StoreRef storeRef : storeRefs)
// {
// if (!nodeService.exists(storeRef))
// {
// // the store is not present
// continue;
// }
// // reindex for store
// reindexNodes(storeRef, changeTxnId);
// }
// // done
// return null;
// }
//
// private void reindexNodes(StoreRef storeRef, String changeTxnId)
// {
// // check if we need to perform this operation
// SearchParameters sp = new SearchParameters();
// sp.addStore(storeRef);
//
// // search for it in the index
// String query = "TX:\"" + changeTxnId + "\"";
// sp.setLanguage(SearchService.LANGUAGE_LUCENE);
// sp.setQuery(query);
// ResultSet results = null;
// try
// {
// results = searcher.query(sp);
// // did the index have any of these changes?
// if (results.length() > 0)
// {
// // the transaction has an entry in the index - assume that it was
// // atomically correct
// if (logger.isDebugEnabled())
// {
// logger.debug("Transaction present in index - no indexing required: \n" +
// " store: " + storeRef + "\n" +
// " txn: " + changeTxnId);
// }
// return;
// }
// }
// finally
// {
// if (results != null)
// {
// results.close();
// }
// }
// // the index has no record of this
// // were there any changes, or is it all just deletions?
// int changedCount = getChangedNodeStatusesCount(storeRef, changeTxnId);
// if (changedCount == 0)
// {
// // no nodes were changed in the transaction, i.e. they are only deletions
// // the index is quite right not to have any entries for the transaction
// if (logger.isDebugEnabled())
// {
// logger.debug("Transaction only has deletions - no indexing required: \n" +
// " store: " + storeRef + "\n" +
// " txn: " + changeTxnId);
// }
// return;
// }
//
// // process the deletions relevant to the txn and the store
// List<NodeStatus> deletedNodeStatuses = getDeletedNodeStatuses(storeRef, changeTxnId);
// for (NodeStatus status : deletedNodeStatuses)
// {
// NodeRef nodeRef = new NodeRef(storeRef, status.getKey().getGuid());
// // only the child node ref is relevant
// ChildAssociationRef assocRef = new ChildAssociationRef(
// ContentModel.ASSOC_CHILDREN,
// null,
// null,
// nodeRef);
// indexer.deleteNode(assocRef);
// }
//
// // process additions
// List<NodeStatus> changedNodeStatuses = getChangedNodeStatuses(storeRef, changeTxnId);
// for (NodeStatus status : changedNodeStatuses)
// {
// NodeRef nodeRef = new NodeRef(storeRef, status.getKey().getGuid());
// // get the primary assoc for the node
// ChildAssociationRef primaryAssocRef = nodeService.getPrimaryParent(nodeRef);
// // reindex
// indexer.createNode(primaryAssocRef);
// }
//
// // done
// if (logger.isDebugEnabled())
// {
// logger.debug("Transaction reindexed: \n" +
// " store: " + storeRef + "\n" +
// " txn: " + changeTxnId + "\n" +
// " deletions: " + deletedNodeStatuses.size() + "\n" +
// " modifications: " + changedNodeStatuses.size());
// }
// }
// };
//
// /**
// * Retrieve all transaction IDs that are greater than the given transaction ID.
// *
// * @param currentTxnId the transaction ID that must be less than all returned results
// * @return Returns an ordered list of transaction IDs
// */
// @SuppressWarnings("unchecked")
// public List<String> getNextChangeTxnIds(final String currentTxnId)
// {
// HibernateCallback callback = new HibernateCallback()
// {
// public Object doInHibernate(Session session)
// {
// Query query = session.getNamedQuery(QUERY_GET_NEXT_CHANGE_TXN_IDS);
// query.setString("currentTxnId", currentTxnId)
// .setReadOnly(true);
// return query.list();
// }
// };
// List<String> queryResults = (List<String>) getHibernateTemplate().execute(callback);
// // done
// return queryResults;
// }
//
// @SuppressWarnings("unchecked")
// public int getChangedNodeStatusesCount(final StoreRef storeRef, final String changeTxnId)
// {
// HibernateCallback callback = new HibernateCallback()
// {
// public Object doInHibernate(Session session)
// {
// Query query = session.getNamedQuery(QUERY_GET_CHANGED_NODE_STATUSES_COUNT);
// query.setString("storeProtocol", storeRef.getProtocol())
// .setString("storeIdentifier", storeRef.getIdentifier())
// .setString("changeTxnId", changeTxnId)
// .setReadOnly(true);
// return query.uniqueResult();
// }
// };
// Integer changeCount = (Integer) getHibernateTemplate().execute(callback);
// // done
// return changeCount.intValue();
// }
//
// @SuppressWarnings("unchecked")
// public List<NodeStatus> getChangedNodeStatuses(final StoreRef storeRef, final String changeTxnId)
// {
// HibernateCallback callback = new HibernateCallback()
// {
// public Object doInHibernate(Session session)
// {
// Query query = session.getNamedQuery(QUERY_GET_CHANGED_NODE_STATUSES);
// query.setString("storeProtocol", storeRef.getProtocol())
// .setString("storeIdentifier", storeRef.getIdentifier())
// .setString("changeTxnId", changeTxnId)
// .setReadOnly(true);
// return query.list();
// }
// };
// List<NodeStatus> queryResults = (List) getHibernateTemplate().execute(callback);
// // done
// return queryResults;
// }
//
// @SuppressWarnings("unchecked")
// public List<NodeStatus> getDeletedNodeStatuses(final StoreRef storeRef, final String changeTxnId)
// {
// HibernateCallback callback = new HibernateCallback()
// {
// public Object doInHibernate(Session session)
// {
// Query query = session.getNamedQuery(QUERY_GET_DELETED_NODE_STATUSES);
// query.setString("storeProtocol", storeRef.getProtocol())
// .setString("storeIdentifier", storeRef.getIdentifier())
// .setString("changeTxnId", changeTxnId)
// .setReadOnly(true);
// return query.list();
// }
// };
// List<NodeStatus> queryResults = (List) getHibernateTemplate().execute(callback);
// // done
// return queryResults;
// }
//}

View File

@ -0,0 +1,172 @@
/*
* Copyright (C) 2005-2006 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.node.index;
import junit.framework.TestCase;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.AbstractContentStore;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerImpl;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.repo.transaction.TransactionComponent;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper;
import org.springframework.context.ApplicationContext;
/**
* @see org.alfresco.repo.node.index.MissingContentReindexComponent
*
* @author Derek Hulley
*/
public class MissingContentReindexComponentTest extends TestCase
{
private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
private AuthenticationComponent authenticationComponent;
private SearchService searchService;
private NodeService nodeService;
private FileFolderService fileFolderService;
private ContentStore contentStore;
private FullTextSearchIndexer ftsIndexer;
private NodeRef rootNodeRef;
private MissingContentReindexComponent reindexer;
@Override
protected void setUp() throws Exception
{
ServiceRegistry serviceRegistry = (ServiceRegistry) ctx.getBean(ServiceRegistry.SERVICE_REGISTRY);
searchService = serviceRegistry.getSearchService();
nodeService = serviceRegistry.getNodeService();
fileFolderService = serviceRegistry.getFileFolderService();
authenticationComponent = (AuthenticationComponent) ctx.getBean("authenticationComponentImpl");
contentStore = (ContentStore) ctx.getBean("fileContentStore");
ftsIndexer = (FullTextSearchIndexer) ctx.getBean("LuceneFullTextSearchIndexer");
Indexer indexer = (Indexer) ctx.getBean("indexerComponent");
NodeDaoService nodeDaoService = (NodeDaoService) ctx.getBean("nodeDaoService");
TransactionService transactionService = serviceRegistry.getTransactionService();
reindexer = new MissingContentReindexComponent();
reindexer.setAuthenticationComponent(authenticationComponent);
reindexer.setFtsIndexer(ftsIndexer);
reindexer.setIndexer(indexer);
reindexer.setNodeDaoService(nodeDaoService);
reindexer.setNodeService(nodeService);
reindexer.setSearcher(searchService);
reindexer.setTransactionComponent((TransactionComponent)transactionService);
// authenticate
authenticationComponent.setSystemUserAsCurrentUser();
// create a root node for the test
StoreRef storeRef = nodeService.createStore("test", getName() + "-" + System.nanoTime());
rootNodeRef = nodeService.getRootNode(storeRef);
rootNodeRef = nodeService.createNode(
rootNodeRef,
ContentModel.ASSOC_CHILDREN,
QName.createQName("cm:x"),
ContentModel.TYPE_FOLDER).getChildRef();
}
@Override
protected void tearDown() throws Exception
{
authenticationComponent.clearCurrentSecurityContext();
}
/**
* Create a node with a content URL that points to missing content. It then
* checks that the indexing flagged it, prompts a reindex of missing content
* and checks that the text was properly indexed.
*/
public synchronized void testReindex() throws Exception
{
// create a node with missing content
String contentUrl = AbstractContentStore.createNewUrl();
ContentData contentData = new ContentData(contentUrl, "text/plain", 0L, "UTF8");
// create the file node
NodeRef nodeRef = fileFolderService.create(rootNodeRef, "myfile", ContentModel.TYPE_CONTENT).getNodeRef();
// add the content
nodeService.setProperty(nodeRef, ContentModel.PROP_CONTENT, contentData);
// wait a bit for the indexing
ftsIndexer.index();
wait(1000);
// check that the content was but that the content was M.I.A.
SearchParameters sp = new SearchParameters();
sp.addStore(rootNodeRef.getStoreRef());
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("TEXT:" + LuceneIndexerImpl.NOT_INDEXED_CONTENT_MISSING);
sp.addSort(SearchParameters.SORT_IN_DOCUMENT_ORDER_DESCENDING);
ResultSet results = null;
try
{
results = searchService.query(sp);
assertTrue("Content missing NICM not found", results.length() == 1);
}
finally
{
if (results != null) { results.close(); }
}
// now put some content in the store
ContentWriter writer = contentStore.getWriter(null, contentUrl);
writer.setMimetype("text/plain");
writer.setEncoding("UTF8");
writer.putContent("123abc456def");
// prompt for reindex
reindexer.reindex();
// wait for it to have been indexed again
ftsIndexer.index();
wait(1000);
// search for the text
sp = new SearchParameters();
sp.addStore(rootNodeRef.getStoreRef());
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("TEXT:" + "123abc456def");
sp.addSort("@" + ContentModel.PROP_CREATED, false);
results = null;
try
{
results = searchService.query(sp);
assertTrue("Indexed content node found", results.length() == 1);
}
finally
{
if (results != null) { results.close(); }
}
}
}

View File

@ -42,6 +42,11 @@ public class ClosingIndexSearcher extends IndexSearcher
this.reader = r; this.reader = r;
} }
/*package*/ IndexReader getReader()
{
return reader;
}
@Override @Override
public void close() throws IOException public void close() throws IOException
{ {

View File

@ -145,7 +145,7 @@ public abstract class LuceneBase2
} }
} }
protected Searcher getSearcher(LuceneIndexer2 luceneIndexer) throws LuceneIndexException protected ClosingIndexSearcher getSearcher(LuceneIndexer2 luceneIndexer) throws LuceneIndexException
{ {
// If we know the delta id we should do better // If we know the delta id we should do better

View File

@ -43,7 +43,6 @@ import org.alfresco.repo.search.IndexerException;
import org.alfresco.repo.search.impl.lucene.fts.FTSIndexerAware; import org.alfresco.repo.search.impl.lucene.fts.FTSIndexerAware;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer; import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.search.impl.lucene.index.TransactionStatus; import org.alfresco.repo.search.impl.lucene.index.TransactionStatus;
import org.alfresco.repo.search.impl.lucene.index.IndexInfo.LockWork;
import org.alfresco.service.cmr.dictionary.AspectDefinition; import org.alfresco.service.cmr.dictionary.AspectDefinition;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition; import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService; import org.alfresco.service.cmr.dictionary.DictionaryService;
@ -1316,7 +1315,7 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
if (includeDirectoryDocuments) if (includeDirectoryDocuments)
{ {
if (nodeTypeDef.getChildAssociations().size() > 0) if (nodeTypeDef != null && nodeTypeDef.getChildAssociations().size() > 0)
{ {
if (directPaths.contains(pair.getFirst())) if (directPaths.contains(pair.getFirst()))
{ {

View File

@ -59,13 +59,9 @@ import org.saxpath.SAXPathException;
import com.werken.saxpath.XPathReader; import com.werken.saxpath.XPathReader;
/** /**
* The Lucene implementation of Searcher At the moment we support only lucene * The Lucene implementation of Searcher At the moment we support only lucene based queries. TODO: Support for other query languages
* based queries.
*
* TODO: Support for other query languages
* *
* @author andyh * @author andyh
*
*/ */
public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2 public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
{ {
@ -90,9 +86,7 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
*/ */
/** /**
* Get an initialised searcher for the store and transaction Normally we do * Get an initialised searcher for the store and transaction Normally we do not search against a a store and delta. Currently only gets the searcher against the main index.
* not search against a a store and delta. Currently only gets the searcher
* against the main index.
* *
* @param storeRef * @param storeRef
* @param deltaId * @param deltaId
@ -115,9 +109,7 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
} }
/** /**
* Get an intialised searcher for the store. No transactional ammendsmends * Get an intialised searcher for the store. No transactional ammendsmends are searched.
* are searched.
*
* *
* @param storeRef * @param storeRef
* @return * @return
@ -134,7 +126,7 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
public boolean indexExists() public boolean indexExists()
{ {
//return mainIndexExists(); // return mainIndexExists();
return true; return true;
} }
@ -220,7 +212,7 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
Query query = LuceneQueryParser.parse(parameterisedQueryString, DEFAULT_FIELD, new LuceneAnalyser( Query query = LuceneQueryParser.parse(parameterisedQueryString, DEFAULT_FIELD, new LuceneAnalyser(
dictionaryService), namespacePrefixResolver, dictionaryService, defaultOperator); dictionaryService), namespacePrefixResolver, dictionaryService, defaultOperator);
Searcher searcher = getSearcher(indexer); ClosingIndexSearcher searcher = getSearcher(indexer);
if (searcher == null) if (searcher == null)
{ {
// no index return an empty result set // no index return an empty result set
@ -238,7 +230,14 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
switch (sd.getSortType()) switch (sd.getSortType())
{ {
case FIELD: case FIELD:
fields[index++] = new SortField(sd.getField(), !sd.isAscending()); if (searcher.getReader().getFieldNames().contains(sd.getField()))
{
fields[index++] = new SortField(sd.getField(), !sd.isAscending());
}
else
{
fields[index++] = new SortField(null, SortField.DOC, !sd.isAscending());
}
break; break;
case DOCUMENT: case DOCUMENT:
fields[index++] = new SortField(null, SortField.DOC, !sd.isAscending()); fields[index++] = new SortField(null, SortField.DOC, !sd.isAscending());
@ -348,8 +347,7 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
} }
/** /**
* The definitions must provide a default value, or of not there must be a * The definitions must provide a default value, or of not there must be a parameter to provide the value
* parameter to provide the value
* *
* @param definition * @param definition
* @param queryParameters * @param queryParameters
@ -396,12 +394,9 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
} }
/* /*
* Parameterise the query string - not sure if it is required to escape * Parameterise the query string - not sure if it is required to escape lucence spacials chars The parameters could be used to build the query - the contents of parameters
* lucence spacials chars The parameters could be used to build the query - * should alread have been escaped if required. ... mush better to provide the parameters and work out what to do TODO: conditional query escapement - may be we should have a
* the contents of parameters should alread have been escaped if required. * parameter type that is not escaped
* ... mush better to provide the parameters and work out what to do TODO:
* conditional query escapement - may be we should have a parameter type
* that is not escaped
*/ */
private String parameterise(String unparameterised, Map<QName, QueryParameterDefinition> map, private String parameterise(String unparameterised, Map<QName, QueryParameterDefinition> map,
QueryParameter[] queryParameters, NamespacePrefixResolver nspr) throws QueryParameterisationException QueryParameter[] queryParameters, NamespacePrefixResolver nspr) throws QueryParameterisationException
@ -567,7 +562,6 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
/** /**
* @return Returns true if the pattern is present, otherwise false. * @return Returns true if the pattern is present, otherwise false.
*
* @see #setIndexer(Indexer) * @see #setIndexer(Indexer)
* @see #setSearcher(SearchService) * @see #setSearcher(SearchService)
*/ */

View File

@ -381,6 +381,10 @@ public class LuceneTest2 extends TestCase
super(arg0); super(arg0);
} }
public void firstTest() throws Exception
{
testSort();
}
public void test0() throws Exception public void test0() throws Exception
{ {
@ -1063,6 +1067,17 @@ public class LuceneTest2 extends TestCase
results.close(); results.close();
luceneFTS.resume(); luceneFTS.resume();
SearchParameters sp17 = new SearchParameters();
sp17.addStore(rootNodeRef.getStoreRef());
sp17.setLanguage(SearchService.LANGUAGE_LUCENE);
sp17.setQuery("PATH:\"//.\"");
sp17.addSort("cabbage", false);
results = searcher.query(sp17);
results.close();
luceneFTS.resume();
} }
public void test1() throws Exception public void test1() throws Exception

View File

@ -531,6 +531,9 @@ public class NTLMAuthenticationComponentImpl extends AbstractAuthenticationCompo
authSess = m_passthruServers.openSession(); authSess = m_passthruServers.openSession();
if ( authSess == null)
throw new AuthenticationException("Failed to open session to passthru server");
// Authenticate using the credentials supplied // Authenticate using the credentials supplied
authenticateLocal(ntlmToken, authSess); authenticateLocal(ntlmToken, authSess);
@ -748,6 +751,11 @@ public class NTLMAuthenticationComponentImpl extends AbstractAuthenticationCompo
authSess = m_passthruServers.openSession(); authSess = m_passthruServers.openSession();
// Check if the session was opened to the passthru server
if ( authSess == null)
throw new AuthenticationServiceException("Failed to open passthru auth session");
ntlmToken.setAuthenticationExpireTime(System.currentTimeMillis() + getSessionTimeout()); ntlmToken.setAuthenticationExpireTime(System.currentTimeMillis() + getSessionTimeout());
// Get the challenge from the initial session negotiate stage // Get the challenge from the initial session negotiate stage

View File

@ -26,30 +26,34 @@ import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.cmr.security.PermissionService; import org.alfresco.service.cmr.security.PermissionService;
import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.InitializingBean;
/**
* LockOwnerDynamicAuthority
*/
public class LockOwnerDynamicAuthority implements DynamicAuthority, InitializingBean public class LockOwnerDynamicAuthority implements DynamicAuthority, InitializingBean
{ {
private LockService lockService; private LockService lockService;
private NodeService nodeService; private NodeService nodeService;
public LockOwnerDynamicAuthority()
{
super();
}
public boolean hasAuthority(NodeRef nodeRef, String userName) public boolean hasAuthority(NodeRef nodeRef, String userName)
{ {
if(lockService.getLockStatus(nodeRef) == LockStatus.LOCK_OWNER) if (lockService.getLockStatus(nodeRef) == LockStatus.LOCK_OWNER)
{ {
return true; return true;
} }
if(nodeService.hasAspect(nodeRef, ContentModel.ASPECT_WORKING_COPY)) if (nodeService.hasAspect(nodeRef, ContentModel.ASPECT_WORKING_COPY))
{ {
NodeRef originial = DefaultTypeConverter.INSTANCE.convert(NodeRef.class, nodeService.getProperty(nodeRef, ContentModel.PROP_COPY_REFERENCE)); NodeRef original = DefaultTypeConverter.INSTANCE.convert(
return (lockService.getLockStatus(originial) == LockStatus.LOCK_OWNER); NodeRef.class, nodeService.getProperty(nodeRef, ContentModel.PROP_COPY_REFERENCE));
if (nodeService.exists(original))
{
return (lockService.getLockStatus(original) == LockStatus.LOCK_OWNER);
}
else
{
return false;
}
} }
else else
{ {
@ -80,12 +84,8 @@ public class LockOwnerDynamicAuthority implements DynamicAuthority, Initializing
this.lockService = lockService; this.lockService = lockService;
} }
public void setNodeService(NodeService nodeService) public void setNodeService(NodeService nodeService)
{ {
this.nodeService = nodeService; this.nodeService = nodeService;
} }
} }

View File

@ -43,6 +43,7 @@ import org.alfresco.service.cmr.security.AuthenticationService;
import org.alfresco.service.cmr.security.AuthorityService; import org.alfresco.service.cmr.security.AuthorityService;
import org.alfresco.service.cmr.security.OwnableService; import org.alfresco.service.cmr.security.OwnableService;
import org.alfresco.service.cmr.security.PermissionService; import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.cmr.security.PersonService;
import org.alfresco.service.cmr.version.VersionService; import org.alfresco.service.cmr.version.VersionService;
import org.alfresco.service.cmr.view.ExporterService; import org.alfresco.service.cmr.view.ExporterService;
import org.alfresco.service.cmr.view.ImporterService; import org.alfresco.service.cmr.view.ImporterService;
@ -341,5 +342,12 @@ public class ServiceDescriptorRegistry
return (OwnableService)getService(OWNABLE_SERVICE); return (OwnableService)getService(OWNABLE_SERVICE);
} }
/* (non-Javadoc)
* @see org.alfresco.service.ServiceRegistry#getPersonService()
*/
public PersonService getPersonService()
{
return (PersonService)getService(PERSON_SERVICE);
}
} }

View File

@ -45,6 +45,10 @@ public class DummyTransactionService implements TransactionService
return false; return false;
} }
public void setReadOnly(boolean readOnly)
{
}
public UserTransaction getUserTransaction() public UserTransaction getUserTransaction()
{ {
return txn; return txn;

View File

@ -60,6 +60,7 @@ public interface WorkflowModel
// workflow package // workflow package
static final QName ASPECT_WORKFLOW_PACKAGE = QName.createQName(NamespaceService.BPM_MODEL_1_0_URI, "workflowPackage"); static final QName ASPECT_WORKFLOW_PACKAGE = QName.createQName(NamespaceService.BPM_MODEL_1_0_URI, "workflowPackage");
static final QName PROP_IS_SYSTEM_PACKAGE = QName.createQName(NamespaceService.BPM_MODEL_1_0_URI, "isSystemPackage");
static final QName PROP_WORKFLOW_DEFINITION_ID = QName.createQName(NamespaceService.BPM_MODEL_1_0_URI, "workflowDefinitionId"); static final QName PROP_WORKFLOW_DEFINITION_ID = QName.createQName(NamespaceService.BPM_MODEL_1_0_URI, "workflowDefinitionId");
static final QName PROP_WORKFLOW_DEFINITION_NAME = QName.createQName(NamespaceService.BPM_MODEL_1_0_URI, "workflowDefinitionName"); static final QName PROP_WORKFLOW_DEFINITION_NAME = QName.createQName(NamespaceService.BPM_MODEL_1_0_URI, "workflowDefinitionName");
static final QName PROP_WORKFLOW_INSTANCE_ID = QName.createQName(NamespaceService.BPM_MODEL_1_0_URI, "workflowInstanceId"); static final QName PROP_WORKFLOW_INSTANCE_ID = QName.createQName(NamespaceService.BPM_MODEL_1_0_URI, "workflowInstanceId");

View File

@ -40,6 +40,16 @@ public interface WorkflowPackageComponent
*/ */
public NodeRef createPackage(NodeRef container); public NodeRef createPackage(NodeRef container);
/**
* Deletes a Workflow Package
*
* The workflow package aspect is removed, and if the container was previously created by the workflow
* service (i.e. not provided from elsewhere), it will be deleted.
*
* @param container
*/
public void deletePackage(NodeRef container);
// TODO: Further support for finding packages via meta-data of WorkflowPackage aspect // TODO: Further support for finding packages via meta-data of WorkflowPackage aspect
/** /**

View File

@ -89,6 +89,7 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
public NodeRef createPackage(NodeRef container) public NodeRef createPackage(NodeRef container)
{ {
// create a container, if one is not specified // create a container, if one is not specified
boolean isSystemPackage = false;
if (container == null) if (container == null)
{ {
// create simple folder in workflow system folder // create simple folder in workflow system folder
@ -112,6 +113,7 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
QName qname = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, containerName); QName qname = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, containerName);
ChildAssociationRef childRef = nodeService.createNode(packages, ContentModel.ASSOC_CONTAINS, qname, ContentModel.TYPE_SYSTEM_FOLDER); ChildAssociationRef childRef = nodeService.createNode(packages, ContentModel.ASSOC_CONTAINS, qname, ContentModel.TYPE_SYSTEM_FOLDER);
container = childRef.getChildRef(); container = childRef.getChildRef();
isSystemPackage = true;
} }
// attach workflow package // attach workflow package
@ -120,11 +122,31 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
throw new WorkflowException("Container '" + container + "' is already a workflow package."); throw new WorkflowException("Container '" + container + "' is already a workflow package.");
} }
nodeService.addAspect(container, WorkflowModel.ASPECT_WORKFLOW_PACKAGE, null); nodeService.addAspect(container, WorkflowModel.ASPECT_WORKFLOW_PACKAGE, null);
nodeService.setProperty(container, WorkflowModel.PROP_IS_SYSTEM_PACKAGE, isSystemPackage);
// return container // return container
return container; return container;
} }
/* (non-Javadoc)
* @see org.alfresco.repo.workflow.WorkflowPackageComponent#deletePackage(org.alfresco.service.cmr.repository.NodeRef)
*/
public void deletePackage(NodeRef container)
{
if (container != null && nodeService.exists(container) && nodeService.hasAspect(container, WorkflowModel.ASPECT_WORKFLOW_PACKAGE))
{
Boolean isSystemPackage = (Boolean)nodeService.getProperty(container, WorkflowModel.PROP_IS_SYSTEM_PACKAGE);
if (isSystemPackage != null && isSystemPackage.booleanValue())
{
nodeService.deleteNode(container);
}
else
{
nodeService.removeAspect(container, WorkflowModel.ASPECT_WORKFLOW_PACKAGE);
}
}
}
/* (non-Javadoc) /* (non-Javadoc)
* @see org.alfresco.repo.workflow.WorkflowPackageComponent#getWorkflowIdsForContent(org.alfresco.service.cmr.repository.NodeRef, boolean) * @see org.alfresco.repo.workflow.WorkflowPackageComponent#getWorkflowIdsForContent(org.alfresco.service.cmr.repository.NodeRef, boolean)
*/ */
@ -151,7 +173,6 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
return workflowIds; return workflowIds;
} }
/** /**
* Gets the system workflow container for storing workflow related items * Gets the system workflow container for storing workflow related items
* *
@ -171,7 +192,6 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
return systemWorkflowContainer; return systemWorkflowContainer;
} }
/** /**
* Finds the system workflow container * Finds the system workflow container
* *
@ -193,7 +213,6 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
return systemWorkflowContainer; return systemWorkflowContainer;
} }
/** /**
* Finds the system container * Finds the system container
* *
@ -215,7 +234,6 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
return nodeRefs.get(0); return nodeRefs.get(0);
} }
/** /**
* Creates the System Workflow Container * Creates the System Workflow Container
* *

View File

@ -210,6 +210,8 @@ public class WorkflowServiceImpl implements WorkflowService
*/ */
public WorkflowInstance cancelWorkflow(String workflowId) public WorkflowInstance cancelWorkflow(String workflowId)
{ {
WorkflowInstance instance = getWorkflowById(workflowId);
workflowPackageComponent.deletePackage(instance.workflowPackage);
String engineId = BPMEngineRegistry.getEngineId(workflowId); String engineId = BPMEngineRegistry.getEngineId(workflowId);
WorkflowComponent component = getWorkflowComponent(engineId); WorkflowComponent component = getWorkflowComponent(engineId);
return component.cancelWorkflow(workflowId); return component.cancelWorkflow(workflowId);
@ -319,7 +321,7 @@ public class WorkflowServiceImpl implements WorkflowService
String engineId = BPMEngineRegistry.getEngineId(workflowId); String engineId = BPMEngineRegistry.getEngineId(workflowId);
WorkflowComponent component = getWorkflowComponent(engineId); WorkflowComponent component = getWorkflowComponent(engineId);
WorkflowInstance instance = component.getWorkflowById(workflowId); WorkflowInstance instance = component.getWorkflowById(workflowId);
if (instance.active == active) if (instance != null && instance.active == active)
{ {
workflowInstances.add(instance); workflowInstances.add(instance);
} }

View File

@ -0,0 +1,186 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.workflow.jbpm;
import java.util.List;
import junit.framework.TestCase;
import org.jbpm.JbpmConfiguration;
import org.jbpm.JbpmContext;
import org.jbpm.db.GraphSession;
import org.jbpm.db.TaskMgmtSession;
import org.jbpm.graph.def.ProcessDefinition;
import org.jbpm.graph.exe.ProcessInstance;
import org.jbpm.graph.exe.Token;
import org.jbpm.taskmgmt.exe.TaskInstance;
/**
* Unit Test for reproducing constraint violation during JBPM process deletion
*
* http://jira.jboss.com/jira/browse/JBPM-757
*
* @author davidc
*/
public class JBPMDeleteProcessTest extends TestCase {
static JbpmConfiguration jbpmConfiguration = null;
static long processId = -1L;
static String currentTokenPath = null;
static {
jbpmConfiguration = JbpmConfiguration.parseXmlString(
"<jbpm-configuration>" +
" <jbpm-context>" +
" <service name='persistence' " +
" factory='org.jbpm.persistence.db.DbPersistenceServiceFactory' />" +
" </jbpm-context>" +
" <string name='resource.hibernate.cfg.xml' " +
" value='jbpmresources/hibernate.cfg.xml' />" +
" <string name='resource.business.calendar' " +
" value='org/jbpm/calendar/jbpm.business.calendar.properties' />" +
" <string name='resource.default.modules' " +
" value='org/jbpm/graph/def/jbpm.default.modules.properties' />" +
" <string name='resource.converter' " +
" value='org/jbpm/db/hibernate/jbpm.converter.properties' />" +
" <string name='resource.action.types' " +
" value='org/jbpm/graph/action/action.types.xml' />" +
" <string name='resource.node.types' " +
" value='org/jbpm/graph/node/node.types.xml' />" +
" <string name='resource.varmapping' " +
" value='org/jbpm/context/exe/jbpm.varmapping.xml' />" +
"</jbpm-configuration>"
);
}
public void setUp() {
jbpmConfiguration.createSchema();
}
public void tearDown() {
jbpmConfiguration.dropSchema();
}
public void testDelete() {
deployProcessDefinition();
startProcess();
step2TaskEnd();
deleteProcess();
}
public void deployProcessDefinition() {
ProcessDefinition processDefinition = ProcessDefinition.parseXmlString
(
"<process-definition name='deletetest'>" +
" <start-state name='start'> " +
" <task name='startTask'> " +
" <controller> " +
" <variable name='var1' access='write'/> " +
" </controller> " +
" </task> " +
" <transition name='' to='step2'/> " +
" </start-state> " +
" <task-node name='step2'> " +
" <task name='step2Task'/> " +
" <transition name='' to='step3'/> " +
" </task-node>" +
" <task-node name='step3'> " +
" <task name='step3Task'/> " +
" <transition name='' to='end'/> " +
" </task-node> " +
" <end-state name='end' />" +
"</process-definition>"
);
JbpmContext jbpmContext = jbpmConfiguration.createJbpmContext();
try {
jbpmContext.deployProcessDefinition(processDefinition);
} finally {
jbpmContext.close();
}
}
public void startProcess() {
JbpmContext jbpmContext = jbpmConfiguration.createJbpmContext();
try {
GraphSession graphSession = jbpmContext.getGraphSession();
ProcessDefinition processDefinition = graphSession.findLatestProcessDefinition("deletetest");
ProcessInstance processInstance = new ProcessInstance(processDefinition);
processId = processInstance.getId();
TaskInstance taskInstance = processInstance.getTaskMgmtInstance().createStartTaskInstance();
taskInstance.setVariableLocally("var1", "var1Value");
taskInstance.end();
Token token = taskInstance.getToken();
currentTokenPath = token.getFullName();
jbpmContext.save(processInstance);
} finally {
jbpmContext.close();
}
}
public void step2TaskEnd() {
JbpmContext jbpmContext = jbpmConfiguration.createJbpmContext();
try {
GraphSession graphSession = jbpmContext.getGraphSession();
ProcessInstance processInstance = graphSession.loadProcessInstance(processId);
Token token = processInstance.findToken(currentTokenPath);
TaskMgmtSession taskSession = jbpmContext.getTaskMgmtSession();
List tasks = taskSession.findTaskInstancesByToken(token.getId());
TaskInstance taskInstance = (TaskInstance)tasks.get(0);
//
// Uncomment the following line to force constraint violation
//
// taskInstance.setVariableLocally("var1", "var1TaskValue");
taskInstance.setVariableLocally("var2", "var2UpdatedValue");
taskInstance.end();
token = taskInstance.getToken();
currentTokenPath = token.getFullName();
jbpmContext.save(processInstance);
} finally {
jbpmContext.close();
}
}
public void deleteProcess()
{
JbpmContext jbpmContext = jbpmConfiguration.createJbpmContext();
try {
GraphSession graphSession = jbpmContext.getGraphSession();
ProcessInstance processInstance = graphSession.loadProcessInstance(processId);
graphSession.deleteProcessInstance(processInstance, true, true, true);
} finally {
jbpmContext.close();
}
}
}

View File

@ -59,6 +59,7 @@ import org.alfresco.service.cmr.workflow.WorkflowTask;
import org.alfresco.service.cmr.workflow.WorkflowTaskDefinition; import org.alfresco.service.cmr.workflow.WorkflowTaskDefinition;
import org.alfresco.service.cmr.workflow.WorkflowTaskState; import org.alfresco.service.cmr.workflow.WorkflowTaskState;
import org.alfresco.service.cmr.workflow.WorkflowTransition; import org.alfresco.service.cmr.workflow.WorkflowTransition;
import org.alfresco.service.namespace.NamespaceException;
import org.alfresco.service.namespace.NamespaceService; import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName; import org.alfresco.service.namespace.QName;
import org.hibernate.Query; import org.hibernate.Query;
@ -67,7 +68,6 @@ import org.hibernate.proxy.HibernateProxy;
import org.jbpm.JbpmContext; import org.jbpm.JbpmContext;
import org.jbpm.JbpmException; import org.jbpm.JbpmException;
import org.jbpm.context.exe.ContextInstance; import org.jbpm.context.exe.ContextInstance;
import org.jbpm.context.exe.TokenVariableMap;
import org.jbpm.db.GraphSession; import org.jbpm.db.GraphSession;
import org.jbpm.db.TaskMgmtSession; import org.jbpm.db.TaskMgmtSession;
import org.jbpm.graph.def.Node; import org.jbpm.graph.def.Node;
@ -260,8 +260,7 @@ public class JBPMEngine extends BPMEngine
{ {
// retrieve process definition // retrieve process definition
GraphSession graphSession = context.getGraphSession(); GraphSession graphSession = context.getGraphSession();
ProcessDefinition processDefinition = graphSession.loadProcessDefinition(getJbpmId(workflowDefinitionId)); ProcessDefinition processDefinition = getProcessDefinition(graphSession, workflowDefinitionId);
// NOTE: if not found, should throw an exception
// undeploy // undeploy
// NOTE: jBPM deletes all "in-flight" processes too // NOTE: jBPM deletes all "in-flight" processes too
@ -332,7 +331,7 @@ public class JBPMEngine extends BPMEngine
{ {
GraphSession graphSession = context.getGraphSession(); GraphSession graphSession = context.getGraphSession();
ProcessDefinition processDef = graphSession.findLatestProcessDefinition(createLocalId(workflowName)); ProcessDefinition processDef = graphSession.findLatestProcessDefinition(createLocalId(workflowName));
return createWorkflowDefinition(processDef); return processDef == null ? null : createWorkflowDefinition(processDef);
} }
}); });
} }
@ -342,6 +341,23 @@ public class JBPMEngine extends BPMEngine
} }
} }
/**
* Gets a jBPM process definition
*
* @param graphSession jBPM graph session
* @param workflowDefinitionId workflow definition id
* @return process definition
*/
private ProcessDefinition getProcessDefinition(GraphSession graphSession, String workflowDefinitionId)
{
ProcessDefinition processDefinition = graphSession.getProcessDefinition(getJbpmId(workflowDefinitionId));
if (processDefinition == null)
{
throw new WorkflowException("Workflow definition '" + workflowDefinitionId + "' does not exist");
}
return processDefinition;
}
// //
// Workflow Instance Management... // Workflow Instance Management...
@ -367,7 +383,7 @@ public class JBPMEngine extends BPMEngine
// construct a new process // construct a new process
GraphSession graphSession = context.getGraphSession(); GraphSession graphSession = context.getGraphSession();
ProcessDefinition processDefinition = graphSession.loadProcessDefinition(getJbpmId(workflowDefinitionId)); ProcessDefinition processDefinition = getProcessDefinition(graphSession, workflowDefinitionId);
ProcessInstance processInstance = new ProcessInstance(processDefinition); ProcessInstance processInstance = new ProcessInstance(processDefinition);
// assign initial process context // assign initial process context
@ -447,7 +463,7 @@ public class JBPMEngine extends BPMEngine
// retrieve workflow // retrieve workflow
GraphSession graphSession = context.getGraphSession(); GraphSession graphSession = context.getGraphSession();
ProcessInstance processInstance = graphSession.getProcessInstance(getJbpmId(workflowId)); ProcessInstance processInstance = graphSession.getProcessInstance(getJbpmId(workflowId));
return createWorkflowInstance(processInstance); return processInstance == null ? null : createWorkflowInstance(processInstance);
} }
}); });
} }
@ -457,6 +473,22 @@ public class JBPMEngine extends BPMEngine
} }
} }
/**
* Gets a jBPM Process Instance
* @param graphSession jBPM graph session
* @param workflowId workflow id
* @return process instance
*/
private ProcessInstance getProcessInstance(GraphSession graphSession, String workflowId)
{
ProcessInstance processInstance = graphSession.getProcessInstance(getJbpmId(workflowId));
if (processInstance == null)
{
throw new WorkflowException("Workflow instance '" + workflowId + "' does not exist");
}
return processInstance;
}
/* (non-Javadoc) /* (non-Javadoc)
* @see org.alfresco.repo.workflow.WorkflowComponent#getWorkflowPaths(java.lang.String) * @see org.alfresco.repo.workflow.WorkflowComponent#getWorkflowPaths(java.lang.String)
*/ */
@ -471,7 +503,7 @@ public class JBPMEngine extends BPMEngine
{ {
// retrieve process instance // retrieve process instance
GraphSession graphSession = context.getGraphSession(); GraphSession graphSession = context.getGraphSession();
ProcessInstance processInstance = graphSession.loadProcessInstance(getJbpmId(workflowId)); ProcessInstance processInstance = getProcessInstance(graphSession, workflowId);
// convert jBPM tokens to workflow posisitons // convert jBPM tokens to workflow posisitons
List<Token> tokens = processInstance.findAllTokens(); List<Token> tokens = processInstance.findAllTokens();
@ -509,38 +541,11 @@ public class JBPMEngine extends BPMEngine
{ {
// retrieve and cancel process instance // retrieve and cancel process instance
GraphSession graphSession = context.getGraphSession(); GraphSession graphSession = context.getGraphSession();
ProcessInstance processInstance = graphSession.loadProcessInstance(getJbpmId(workflowId)); ProcessInstance processInstance = getProcessInstance(graphSession, workflowId);
// TODO: Determine if this is the most appropriate way to cancel workflow... // TODO: Determine if this is the most appropriate way to cancel workflow...
// It might be useful to record point at which it was cancelled etc // It might be useful to record point at which it was cancelled etc
WorkflowInstance workflowInstance = createWorkflowInstance(processInstance); WorkflowInstance workflowInstance = createWorkflowInstance(processInstance);
//
// TODO: remove - workaround for JBPM variable mapping constraint exception
//
Collection<TaskInstance> tasks = processInstance.getTaskMgmtInstance().getTaskInstances();
for (TaskInstance task : tasks)
{
Map<String, Serializable> taskVariables = task.getVariablesLocally();
for (String varName : taskVariables.keySet())
{
task.deleteVariableLocally(varName);
}
}
ContextInstance processContext = processInstance.getContextInstance();
Map<Token, TokenVariableMap> tokenVarMaps = processContext.getTokenVariableMaps();
for (Map.Entry<Token, TokenVariableMap> mapEntry : tokenVarMaps.entrySet())
{
TokenVariableMap tokenVarMap = mapEntry.getValue();
Map<String, Serializable> variables = tokenVarMap.getVariables();
for (String name : variables.keySet())
{
tokenVarMap.deleteVariable(name);
}
}
//
// end TODO
//
// delete the process instance // delete the process instance
graphSession.deleteProcessInstance(processInstance, true, true, true); graphSession.deleteProcessInstance(processInstance, true, true, true);
workflowInstance.active = false; workflowInstance.active = false;
@ -738,6 +743,22 @@ public class JBPMEngine extends BPMEngine
} }
} }
/**
* Gets a jBPM Task Instance
* @param taskSession jBPM task session
* @param taskId task id
* @return task instance
*/
private TaskInstance getTaskInstance(TaskMgmtSession taskSession, String taskId)
{
TaskInstance taskInstance = taskSession.getTaskInstance(getJbpmId(taskId));
if (taskInstance == null)
{
throw new WorkflowException("Task instance '" + taskId + "' does not exist");
}
return taskInstance;
}
/* (non-Javadoc) /* (non-Javadoc)
* @see org.alfresco.repo.workflow.TaskComponent#updateTask(java.lang.String, java.util.Map, java.util.Map, java.util.Map) * @see org.alfresco.repo.workflow.TaskComponent#updateTask(java.lang.String, java.util.Map, java.util.Map, java.util.Map)
*/ */
@ -751,7 +772,7 @@ public class JBPMEngine extends BPMEngine
{ {
// retrieve task // retrieve task
TaskMgmtSession taskSession = context.getTaskMgmtSession(); TaskMgmtSession taskSession = context.getTaskMgmtSession();
TaskInstance taskInstance = taskSession.loadTaskInstance(getJbpmId(taskId)); TaskInstance taskInstance = getTaskInstance(taskSession, taskId);
// create properties to set on task instance // create properties to set on task instance
Map<QName, Serializable> newProperties = properties; Map<QName, Serializable> newProperties = properties;
@ -871,12 +892,7 @@ public class JBPMEngine extends BPMEngine
{ {
// retrieve task // retrieve task
TaskMgmtSession taskSession = context.getTaskMgmtSession(); TaskMgmtSession taskSession = context.getTaskMgmtSession();
TaskInstance taskInstance = taskSession.loadTaskInstance(getJbpmId(taskId)); TaskInstance taskInstance = getTaskInstance(taskSession, taskId);
// set status to complete
Map<QName, Serializable> taskProperties = new HashMap<QName, Serializable>();
taskProperties.put(WorkflowModel.PROP_STATUS, "Completed");
setTaskProperties(taskInstance, taskProperties);
// signal the transition on the task // signal the transition on the task
if (transition == null) if (transition == null)
@ -922,8 +938,8 @@ public class JBPMEngine extends BPMEngine
{ {
// retrieve task // retrieve task
TaskMgmtSession taskSession = context.getTaskMgmtSession(); TaskMgmtSession taskSession = context.getTaskMgmtSession();
TaskInstance taskInstance = taskSession.loadTaskInstance(getJbpmId(taskId)); TaskInstance taskInstance = taskSession.getTaskInstance(getJbpmId(taskId));
return createWorkflowTask(taskInstance); return taskInstance == null ? null : createWorkflowTask(taskInstance);
} }
}); });
} }
@ -1134,7 +1150,7 @@ public class JBPMEngine extends BPMEngine
} }
// retrieve jBPM token for workflow position // retrieve jBPM token for workflow position
ProcessInstance processInstance = session.loadProcessInstance(getJbpmId(path[0])); ProcessInstance processInstance = getProcessInstance(session, path[0]);
String tokenId = path[1].replace(WORKFLOW_TOKEN_SEPERATOR, "/"); String tokenId = path[1].replace(WORKFLOW_TOKEN_SEPERATOR, "/");
Token token = processInstance.findToken(tokenId); Token token = processInstance.findToken(tokenId);
if (token == null) if (token == null)
@ -1166,8 +1182,7 @@ public class JBPMEngine extends BPMEngine
for (Entry<String, Object> entry : vars.entrySet()) for (Entry<String, Object> entry : vars.entrySet())
{ {
String key = entry.getKey(); String key = entry.getKey();
String name = key.replace("_", ":"); QName qname = mapNameToQName(key);
QName qname = QName.createQName(name, this.namespaceService);
// add variable, only if part of task definition or locally defined on task // add variable, only if part of task definition or locally defined on task
if (taskProperties.containsKey(qname) || taskAssocs.containsKey(qname) || instance.hasVariableLocally(key)) if (taskProperties.containsKey(qname) || taskAssocs.containsKey(qname) || instance.hasVariableLocally(key))
@ -1356,8 +1371,7 @@ public class JBPMEngine extends BPMEngine
else if (key.equals(WorkflowModel.ASSOC_PACKAGE)) else if (key.equals(WorkflowModel.ASSOC_PACKAGE))
{ {
// Attach workflow definition & instance id to Workflow Package in Repository // Attach workflow definition & instance id to Workflow Package in Repository
String name = key.toPrefixString(this.namespaceService); String name = mapQNameToName(key);
name = name.replace(':', '_');
JBPMNode existingWorkflowPackage = (JBPMNode)instance.getVariable(name); JBPMNode existingWorkflowPackage = (JBPMNode)instance.getVariable(name);
// first check if provided workflow package has already been associated with another workflow instance // first check if provided workflow package has already been associated with another workflow instance
@ -1401,8 +1415,7 @@ public class JBPMEngine extends BPMEngine
// no specific mapping to jBPM task has been established, so place into // no specific mapping to jBPM task has been established, so place into
// the generic task variable bag // the generic task variable bag
String name = key.toPrefixString(this.namespaceService); String name = mapQNameToName(key);
name = name.replace(':', '_');
instance.setVariableLocally(name, value); instance.setVariableLocally(name, value);
} }
} }
@ -1414,7 +1427,7 @@ public class JBPMEngine extends BPMEngine
*/ */
protected void setDefaultTaskProperties(TaskInstance instance) protected void setDefaultTaskProperties(TaskInstance instance)
{ {
Map<QName, Serializable> existingValues = null; Map<QName, Serializable> existingValues = getTaskProperties(instance, true);
Map<QName, Serializable> defaultValues = new HashMap<QName, Serializable>(); Map<QName, Serializable> defaultValues = new HashMap<QName, Serializable>();
// construct an anonymous type that flattens all mandatory aspects // construct an anonymous type that flattens all mandatory aspects
@ -1427,10 +1440,6 @@ public class JBPMEngine extends BPMEngine
String defaultValue = entry.getValue().getDefaultValue(); String defaultValue = entry.getValue().getDefaultValue();
if (defaultValue != null) if (defaultValue != null)
{ {
if (existingValues == null)
{
existingValues = getTaskProperties(instance, true);
}
if (existingValues.get(entry.getKey()) == null) if (existingValues.get(entry.getKey()) == null)
{ {
defaultValues.put(entry.getKey(), defaultValue); defaultValues.put(entry.getKey(), defaultValue);
@ -1442,7 +1451,7 @@ public class JBPMEngine extends BPMEngine
String description = (String)existingValues.get(WorkflowModel.PROP_DESCRIPTION); String description = (String)existingValues.get(WorkflowModel.PROP_DESCRIPTION);
if (description == null || description.length() == 0) if (description == null || description.length() == 0)
{ {
description = (String)instance.getContextInstance().getVariable("bpm_workflowDescription"); description = (String)instance.getContextInstance().getVariable(mapQNameToName(WorkflowModel.PROP_WORKFLOW_DESCRIPTION));
if (description != null && description.length() > 0) if (description != null && description.length() > 0)
{ {
defaultValues.put(WorkflowModel.PROP_DESCRIPTION, description); defaultValues.put(WorkflowModel.PROP_DESCRIPTION, description);
@ -1462,16 +1471,62 @@ public class JBPMEngine extends BPMEngine
} }
/** /**
* Set Task Outcome based on specified Transition * Sets default description for the Task
* *
* @param instance task instance * @param instance task instance
* @param transition transition
*/ */
protected void setTaskOutcome(TaskInstance instance, Transition transition) public void setDefaultStartTaskDescription(TaskInstance instance)
{ {
Map<QName, Serializable> outcome = new HashMap<QName, Serializable>(); String description = instance.getTask().getDescription();
outcome.put(WorkflowModel.PROP_OUTCOME, transition.getName()); if (description == null || description.length() == 0)
setTaskProperties(instance, outcome); {
description = (String)instance.getContextInstance().getVariable(mapQNameToName(WorkflowModel.PROP_WORKFLOW_DESCRIPTION));
if (description != null && description.length() > 0)
{
Map<QName, Serializable> defaultValues = new HashMap<QName, Serializable>();
defaultValues.put(WorkflowModel.PROP_DESCRIPTION, description);
setTaskProperties(instance, defaultValues);
}
}
}
/**
* Initialise Workflow Instance properties
*
* @param startTask start task instance
*/
protected void setDefaultWorkflowProperties(TaskInstance startTask)
{
Map<QName, Serializable> taskProperties = getTaskProperties(startTask, true);
ContextInstance processContext = startTask.getContextInstance();
String workflowDescriptionName = mapQNameToName(WorkflowModel.PROP_WORKFLOW_DESCRIPTION);
if (!processContext.hasVariable(workflowDescriptionName))
{
processContext.setVariable(workflowDescriptionName, taskProperties.get(WorkflowModel.PROP_WORKFLOW_DESCRIPTION));
}
String workflowDueDateName = mapQNameToName(WorkflowModel.PROP_WORKFLOW_DUE_DATE);
if (!processContext.hasVariable(workflowDueDateName))
{
processContext.setVariable(workflowDueDateName, taskProperties.get(WorkflowModel.PROP_WORKFLOW_DUE_DATE));
}
String workflowPriorityName = mapQNameToName(WorkflowModel.PROP_WORKFLOW_PRIORITY);
if (!processContext.hasVariable(workflowPriorityName))
{
processContext.setVariable(workflowPriorityName, taskProperties.get(WorkflowModel.PROP_WORKFLOW_PRIORITY));
}
String workflowPackageName = mapQNameToName(WorkflowModel.ASSOC_PACKAGE);
if (!processContext.hasVariable(workflowPackageName))
{
Serializable packageNodeRef = taskProperties.get(WorkflowModel.ASSOC_PACKAGE);
processContext.setVariable(workflowPackageName, convertNodeRefs(packageNodeRef instanceof List, packageNodeRef));
}
String workflowContextName = mapQNameToName(WorkflowModel.PROP_CONTEXT);
if (!processContext.hasVariable(workflowContextName))
{
Serializable contextRef = taskProperties.get(WorkflowModel.PROP_CONTEXT);
processContext.setVariable(workflowContextName, convertNodeRefs(contextRef instanceof List, contextRef));
}
} }
/** /**
@ -1538,6 +1593,39 @@ public class JBPMEngine extends BPMEngine
return authority; return authority;
} }
/**
* Map jBPM variable name to QName
*
* @param name jBPM variable name
* @return qname
*/
private QName mapNameToQName(String name)
{
QName qname = null;
String qnameStr = name.replaceFirst("_", ":");
try
{
qname = QName.createQName(qnameStr, this.namespaceService);
}
catch(NamespaceException e)
{
qname = QName.createQName(name, this.namespaceService);
}
return qname;
}
/**
* Map QName to jBPM variable name
*
* @param name QName
* @return jBPM variable name
*/
private String mapQNameToName(QName name)
{
String nameStr = name.toPrefixString(this.namespaceService);
return nameStr.replace(':', '_');
}
/** /**
* Get an I18N Label for a workflow item * Get an I18N Label for a workflow item
* *
@ -1651,6 +1739,7 @@ public class JBPMEngine extends BPMEngine
{ {
WorkflowInstance workflowInstance = new WorkflowInstance(); WorkflowInstance workflowInstance = new WorkflowInstance();
workflowInstance.id = createGlobalId(new Long(instance.getId()).toString()); workflowInstance.id = createGlobalId(new Long(instance.getId()).toString());
workflowInstance.description = (String)instance.getContextInstance().getVariable(mapQNameToName(WorkflowModel.PROP_WORKFLOW_DESCRIPTION));
workflowInstance.definition = createWorkflowDefinition(instance.getProcessDefinition()); workflowInstance.definition = createWorkflowDefinition(instance.getProcessDefinition());
workflowInstance.active = !instance.hasEnded(); workflowInstance.active = !instance.hasEnded();
JBPMNode initiator = (JBPMNode)instance.getContextInstance().getVariable("initiator"); JBPMNode initiator = (JBPMNode)instance.getContextInstance().getVariable("initiator");
@ -1658,6 +1747,16 @@ public class JBPMEngine extends BPMEngine
{ {
workflowInstance.initiator = initiator.getNodeRef(); workflowInstance.initiator = initiator.getNodeRef();
} }
JBPMNode context = (JBPMNode)instance.getContextInstance().getVariable(mapQNameToName(WorkflowModel.PROP_CONTEXT));
if (context != null)
{
workflowInstance.context = context.getNodeRef();
}
JBPMNode workflowPackage = (JBPMNode)instance.getContextInstance().getVariable(mapQNameToName(WorkflowModel.ASSOC_PACKAGE));
if (workflowPackage != null)
{
workflowInstance.workflowPackage = workflowPackage.getNodeRef();
}
workflowInstance.startDate = instance.getStart(); workflowInstance.startDate = instance.getStart();
workflowInstance.endDate = instance.getEnd(); workflowInstance.endDate = instance.getEnd();
return workflowInstance; return workflowInstance;

View File

@ -100,6 +100,7 @@ public class ReviewAndApproveTest extends BaseSpringTest
params.put(WorkflowModel.PROP_WORKFLOW_DUE_DATE, reviewDueDate); params.put(WorkflowModel.PROP_WORKFLOW_DUE_DATE, reviewDueDate);
NodeRef reviewer = personService.getPerson("admin"); NodeRef reviewer = personService.getPerson("admin");
params.put(WorkflowModel.ASSOC_ASSIGNEE, reviewer); params.put(WorkflowModel.ASSOC_ASSIGNEE, reviewer);
params.put(WorkflowModel.PROP_WORKFLOW_DESCRIPTION, "Test review");
WorkflowPath path = workflowComponent.startWorkflow(workflowDef.id, params); WorkflowPath path = workflowComponent.startWorkflow(workflowDef.id, params);
assertNotNull(path); assertNotNull(path);
@ -113,6 +114,8 @@ public class ReviewAndApproveTest extends BaseSpringTest
assertNotNull(endedTask); assertNotNull(endedTask);
assertTrue(endedTask.properties.containsKey(WorkflowModel.PROP_OUTCOME)); assertTrue(endedTask.properties.containsKey(WorkflowModel.PROP_OUTCOME));
assertEquals("", endedTask.properties.get(WorkflowModel.PROP_OUTCOME)); assertEquals("", endedTask.properties.get(WorkflowModel.PROP_OUTCOME));
assertEquals("Test review", endedTask.properties.get(WorkflowModel.PROP_DESCRIPTION));
assertEquals("Test review", endedTask.path.instance.description);
List<WorkflowTask> assignedTasks = taskComponent.getAssignedTasks("admin", WorkflowTaskState.IN_PROGRESS); List<WorkflowTask> assignedTasks = taskComponent.getAssignedTasks("admin", WorkflowTaskState.IN_PROGRESS);
assertNotNull(assignedTasks); assertNotNull(assignedTasks);

View File

@ -16,7 +16,13 @@
*/ */
package org.alfresco.repo.workflow.jbpm; package org.alfresco.repo.workflow.jbpm;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import org.alfresco.repo.workflow.WorkflowModel;
import org.alfresco.service.cmr.workflow.WorkflowException; import org.alfresco.service.cmr.workflow.WorkflowException;
import org.alfresco.service.namespace.QName;
import org.jbpm.graph.def.Transition; import org.jbpm.graph.def.Transition;
import org.jbpm.graph.exe.ExecutionContext; import org.jbpm.graph.exe.ExecutionContext;
import org.jbpm.taskmgmt.exe.TaskInstance; import org.jbpm.taskmgmt.exe.TaskInstance;
@ -96,14 +102,30 @@ public class WorkflowTaskInstance extends TaskInstance
@Override @Override
public void end(Transition transition) public void end(Transition transition)
{ {
// NOTE: Set the outcome first, so it's available during the submission of // Set task properties on completion of task
// NOTE: Set properties first, so they're available during the submission of
// task variables to the process context // task variables to the process context
Map<QName, Serializable> taskProperties = new HashMap<QName, Serializable>();
Transition outcome = (transition == null) ? token.getNode().getDefaultLeavingTransition() : transition; Transition outcome = (transition == null) ? token.getNode().getDefaultLeavingTransition() : transition;
if (outcome != null) if (outcome != null)
{ {
getJBPMEngine().setTaskOutcome(this, outcome); taskProperties.put(WorkflowModel.PROP_OUTCOME, outcome.getName());
} }
taskProperties.put(WorkflowModel.PROP_STATUS, "Completed");
getJBPMEngine().setTaskProperties(this, taskProperties);
// perform transition
super.end(transition); super.end(transition);
if (getTask().getStartState() != null)
{
// if ending a start task, push start task properties to process context, if not
// already done
getJBPMEngine().setDefaultWorkflowProperties(this);
// set task description
getJBPMEngine().setDefaultStartTaskDescription(this);
}
} }
} }

View File

@ -57,7 +57,7 @@
<event type="node-enter"> <event type="node-enter">
<script> <script>
System.out.println("javascript: " + alfrescoScriptResult); System.out.println("javascript: " + alfrescoScriptResult);
System.out.println("bpm_workflowDescription: " + bpm_description); System.out.println("bpm_workflowDescription: " + bpm_workflowDescription);
</script> </script>
</event> </event>
</end-state> </end-state>

View File

@ -37,6 +37,7 @@ import org.alfresco.service.cmr.security.AuthenticationService;
import org.alfresco.service.cmr.security.AuthorityService; import org.alfresco.service.cmr.security.AuthorityService;
import org.alfresco.service.cmr.security.OwnableService; import org.alfresco.service.cmr.security.OwnableService;
import org.alfresco.service.cmr.security.PermissionService; import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.cmr.security.PersonService;
import org.alfresco.service.cmr.version.VersionService; import org.alfresco.service.cmr.version.VersionService;
import org.alfresco.service.cmr.view.ExporterService; import org.alfresco.service.cmr.view.ExporterService;
import org.alfresco.service.cmr.view.ImporterService; import org.alfresco.service.cmr.view.ImporterService;
@ -88,6 +89,7 @@ public interface ServiceRegistry
static final QName WORKFLOW_SERVICE = QName.createQName(NamespaceService.ALFRESCO_URI, "WorkflowService"); static final QName WORKFLOW_SERVICE = QName.createQName(NamespaceService.ALFRESCO_URI, "WorkflowService");
static final QName AUDIT_SERVICE = QName.createQName(NamespaceService.ALFRESCO_URI, "AuditService"); static final QName AUDIT_SERVICE = QName.createQName(NamespaceService.ALFRESCO_URI, "AuditService");
static final QName OWNABLE_SERVICE = QName.createQName(NamespaceService.ALFRESCO_URI, "OwnableService"); static final QName OWNABLE_SERVICE = QName.createQName(NamespaceService.ALFRESCO_URI, "OwnableService");
static final QName PERSON_SERVICE = QName.createQName(NamespaceService.ALFRESCO_URI, "PersonService");
/** /**
* Get the list of services provided by the Repository * Get the list of services provided by the Repository
@ -280,4 +282,11 @@ public interface ServiceRegistry
*/ */
@NotAuditable @NotAuditable
OwnableService getOwnableService(); OwnableService getOwnableService();
/**
* Get the person service (or null if one is not provided)
* @return
*/
@NotAuditable
PersonService getPersonService();
} }

View File

@ -36,6 +36,15 @@ public final class NodeRef implements EntityRef, Serializable
private final StoreRef storeRef; private final StoreRef storeRef;
private final String id; private final String id;
/**
* @see #NodeRef(StoreRef, String)
* @see StoreRef#StoreRef(String, String)
*/
public NodeRef(String protocol, String identifier, String id)
{
this(new StoreRef(protocol, identifier), id);
}
/** /**
* Construct a Node Reference from a Store Reference and Node Id * Construct a Node Reference from a Store Reference and Node Id
* *

View File

@ -33,6 +33,9 @@ public class WorkflowInstance
/** Workflow Instance unique id */ /** Workflow Instance unique id */
public String id; public String id;
/** Workflow Instance description */
public String description;
/** Is this Workflow instance still "in-flight" or has it completed? */ /** Is this Workflow instance still "in-flight" or has it completed? */
public boolean active; public boolean active;
@ -45,6 +48,12 @@ public class WorkflowInstance
/** Workflow End Date */ /** Workflow End Date */
public Date endDate; public Date endDate;
/** Workflow Package */
public NodeRef workflowPackage;
/** Workflow Context */
public NodeRef context;
/** Workflow Definition */ /** Workflow Definition */
public WorkflowDefinition definition; public WorkflowDefinition definition;

View File

@ -100,7 +100,7 @@ public interface WorkflowService
* Gets a Workflow Definition by unique Id * Gets a Workflow Definition by unique Id
* *
* @param workflowDefinitionId the workflow definition id * @param workflowDefinitionId the workflow definition id
* @return the deployed workflow definition * @return the deployed workflow definition (or null if not found)
*/ */
@Auditable(parameters = {"workflowDefinitionId"}) @Auditable(parameters = {"workflowDefinitionId"})
public WorkflowDefinition getDefinitionById(String workflowDefinitionId); public WorkflowDefinition getDefinitionById(String workflowDefinitionId);
@ -109,7 +109,7 @@ public interface WorkflowService
* Gets a Workflow Definition by unique name * Gets a Workflow Definition by unique name
* *
* @param workflowName workflow name e.g. jbpm://review * @param workflowName workflow name e.g. jbpm://review
* @return the deployed workflow definition * @return the deployed workflow definition (or null if not found)
*/ */
@Auditable(parameters = {"workflowName"}) @Auditable(parameters = {"workflowName"})
public WorkflowDefinition getDefinitionByName(String workflowName); public WorkflowDefinition getDefinitionByName(String workflowName);
@ -153,7 +153,7 @@ public interface WorkflowService
* Gets a specific workflow instances * Gets a specific workflow instances
* *
* @param workflowId the id of the workflow to retrieve * @param workflowId the id of the workflow to retrieve
* @return the workflow instance * @return the workflow instance (or null if not found)
*/ */
@Auditable(parameters = {"workflowId"}) @Auditable(parameters = {"workflowId"})
public WorkflowInstance getWorkflowById(String workflowId); public WorkflowInstance getWorkflowById(String workflowId);
@ -204,7 +204,7 @@ public interface WorkflowService
* Gets a Task by unique Id * Gets a Task by unique Id
* *
* @param taskId the task id * @param taskId the task id
* @return the task * @return the task (or null, if not found)
*/ */
@Auditable(parameters = {"taskId"}) @Auditable(parameters = {"taskId"})
public WorkflowTask getTaskById(String taskId); public WorkflowTask getTaskById(String taskId);

View File

@ -0,0 +1,48 @@
<ehcache>
<!-- Sets the path to the directory where cache .data files are created.
If the path is a Java System Property it is replaced by
its value in the running VM.
The following properties are translated:
user.home - User's home directory
user.dir - User's current working directory
java.io.tmpdir - Default temp file path
<diskStore path="java.io.tmpdir"/> -->
<!--Default Cache configuration. These will applied to caches programmatically created through
the CacheManager.
The following attributes are required:
maxElementsInMemory - Sets the maximum number of objects that will be created in memory
eternal - Sets whether elements are eternal. If eternal, timeouts are ignored and the
element is never expired.
overflowToDisk - Sets whether elements can overflow to disk when the in-memory cache
has reached the maxInMemory limit.
The following attributes are optional:
timeToIdleSeconds - Sets the time to idle for an element before it expires.
i.e. The maximum amount of time between accesses before an element expires
Is only used if the element is not eternal.
Optional attribute. A value of 0 means that an Element can idle for infinity.
The default value is 0.
timeToLiveSeconds - Sets the time to live for an element before it expires.
i.e. The maximum time between creation time and when an element expires.
Is only used if the element is not eternal.
Optional attribute. A value of 0 means that and Element can live for infinity.
The default value is 0.
diskPersistent - Whether the disk store persists between restarts of the Virtual Machine.
The default value is false.
diskExpiryThreadIntervalSeconds- The number of seconds between runs of the disk expiry thread. The default value
is 120 seconds.
-->
<defaultCache
maxElementsInMemory="100000"
eternal="true"
overflowToDisk="false"
diskPersistent="false"
/>
</ehcache>

View File

@ -0,0 +1,166 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd">
<hibernate-configuration>
<session-factory>
<!-- jdbc connection properties -->
<property name="hibernate.dialect">org.hibernate.dialect.HSQLDialect</property>
<property name="hibernate.connection.driver_class">org.hsqldb.jdbcDriver</property>
<property name="hibernate.connection.url">jdbc:hsqldb:mem:.;sql.enforce_strict_size=true</property>
<property name="hibernate.connection.username">sa</property>
<property name="hibernate.connection.password"></property>
<!-- other hibernate properties
<property name="hibernate.show_sql">true</property>
<property name="hibernate.format_sql">true</property>
<property name="hibernate.use_sql_comments">true</property>
-->
<!-- ############################################ -->
<!-- # mapping files with external dependencies # -->
<!-- ############################################ -->
<!-- following mapping file has a dependendy on -->
<!-- 'bsh-{version}.jar'. -->
<!-- uncomment this if you don't have bsh on your -->
<!-- classpath. you won't be able to use the -->
<!-- script element in process definition files -->
<mapping resource="org/jbpm/graph/action/Script.hbm.xml"/>
<!-- following mapping files have a dependendy on -->
<!-- 'jbpm-identity-{version}.jar', mapping files -->
<!-- of the pluggable jbpm identity component. -->
<!-- comment out the following 3 lines if you don't-->
<!-- want to use the default jBPM identity mgmgt -->
<!-- component -->
<mapping resource="org/jbpm/identity/User.hbm.xml"/>
<mapping resource="org/jbpm/identity/Group.hbm.xml"/>
<mapping resource="org/jbpm/identity/Membership.hbm.xml"/>
<!-- ###################### -->
<!-- # jbpm mapping files # -->
<!-- ###################### -->
<!-- hql queries and type defs -->
<mapping resource="org/jbpm/db/hibernate.queries.hbm.xml" />
<!-- graph.def mapping files -->
<mapping resource="org/jbpm/graph/def/ProcessDefinition.hbm.xml"/>
<mapping resource="org/jbpm/graph/def/Node.hbm.xml"/>
<mapping resource="org/jbpm/graph/def/Transition.hbm.xml"/>
<mapping resource="org/jbpm/graph/def/Event.hbm.xml"/>
<mapping resource="org/jbpm/graph/def/Action.hbm.xml"/>
<mapping resource="org/jbpm/graph/def/SuperState.hbm.xml"/>
<mapping resource="org/jbpm/graph/def/ExceptionHandler.hbm.xml"/>
<mapping resource="org/jbpm/instantiation/Delegation.hbm.xml"/>
<!-- graph.node mapping files -->
<mapping resource="org/jbpm/graph/node/StartState.hbm.xml"/>
<mapping resource="org/jbpm/graph/node/EndState.hbm.xml"/>
<mapping resource="org/jbpm/graph/node/ProcessState.hbm.xml"/>
<mapping resource="org/jbpm/graph/node/Decision.hbm.xml"/>
<mapping resource="org/jbpm/graph/node/Fork.hbm.xml"/>
<mapping resource="org/jbpm/graph/node/Join.hbm.xml"/>
<mapping resource="org/jbpm/graph/node/State.hbm.xml"/>
<mapping resource="org/jbpm/graph/node/TaskNode.hbm.xml"/>
<!-- context.def mapping files -->
<mapping resource="org/jbpm/context/def/ContextDefinition.hbm.xml"/>
<mapping resource="org/jbpm/context/def/VariableAccess.hbm.xml"/>
<!-- taskmgmt.def mapping files -->
<mapping resource="org/jbpm/taskmgmt/def/TaskMgmtDefinition.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/def/Swimlane.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/def/Task.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/def/TaskController.hbm.xml"/>
<!-- module.def mapping files -->
<mapping resource="org/jbpm/module/def/ModuleDefinition.hbm.xml"/>
<!-- bytes mapping files -->
<mapping resource="org/jbpm/bytes/ByteArray.hbm.xml"/>
<!-- file.def mapping files -->
<mapping resource="org/jbpm/file/def/FileDefinition.hbm.xml"/>
<!-- scheduler.def mapping files -->
<mapping resource="org/jbpm/scheduler/def/CreateTimerAction.hbm.xml"/>
<mapping resource="org/jbpm/scheduler/def/CancelTimerAction.hbm.xml"/>
<!-- graph.exe mapping files -->
<mapping resource="org/jbpm/graph/exe/Comment.hbm.xml"/>
<mapping resource="org/jbpm/graph/exe/ProcessInstance.hbm.xml"/>
<mapping resource="org/jbpm/graph/exe/Token.hbm.xml"/>
<mapping resource="org/jbpm/graph/exe/RuntimeAction.hbm.xml"/>
<!-- module.exe mapping files -->
<mapping resource="org/jbpm/module/exe/ModuleInstance.hbm.xml"/>
<!-- context.exe mapping files -->
<mapping resource="org/jbpm/context/exe/ContextInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/TokenVariableMap.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/VariableInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/ByteArrayInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/DateInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/DoubleInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/HibernateLongInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/HibernateStringInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/LongInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/NullInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/StringInstance.hbm.xml"/>
<!-- msg.db mapping files -->
<mapping resource="org/jbpm/msg/Message.hbm.xml"/>
<mapping resource="org/jbpm/msg/db/TextMessage.hbm.xml"/>
<mapping resource="org/jbpm/command/ExecuteActionCommand.hbm.xml"/>
<mapping resource="org/jbpm/command/ExecuteNodeCommand.hbm.xml"/>
<mapping resource="org/jbpm/command/SignalCommand.hbm.xml"/>
<mapping resource="org/jbpm/command/TaskInstanceEndCommand.hbm.xml"/>
<!-- taskmgmt.exe mapping files -->
<mapping resource="org/jbpm/taskmgmt/exe/TaskMgmtInstance.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/exe/TaskInstance.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/exe/PooledActor.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/exe/SwimlaneInstance.hbm.xml"/>
<!-- scheduler.exe mapping files -->
<mapping resource="org/jbpm/scheduler/exe/Timer.hbm.xml"/>
<!-- logging mapping files -->
<mapping resource="org/jbpm/logging/log/ProcessLog.hbm.xml"/>
<mapping resource="org/jbpm/logging/log/MessageLog.hbm.xml"/>
<mapping resource="org/jbpm/logging/log/CompositeLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/ActionLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/NodeLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/ProcessInstanceCreateLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/ProcessInstanceEndLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/ProcessStateLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/SignalLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/TokenCreateLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/TokenEndLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/TransitionLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/VariableLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/VariableCreateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/VariableDeleteLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/VariableUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/variableinstance/ByteArrayUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/variableinstance/DateUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/variableinstance/DoubleUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/variableinstance/HibernateLongUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/variableinstance/HibernateStringUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/variableinstance/LongUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/variableinstance/StringUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/log/TaskLog.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/log/TaskCreateLog.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/log/TaskAssignLog.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/log/TaskEndLog.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/log/SwimlaneLog.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/log/SwimlaneCreateLog.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/log/SwimlaneAssignLog.hbm.xml"/>
</session-factory>
</hibernate-configuration>