Merged V1.4 to HEAD

svn merge svn://svn.alfresco.com:3691/alfresco/BRANCHES/V1.4@3925 svn://svn.alfresco.com:3691/alfresco/BRANCHES/V1.4@3965 .


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@3966 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley 2006-09-29 07:45:33 +00:00
parent d4947ef511
commit cf29ca2343
51 changed files with 2076 additions and 1985 deletions

View File

@ -285,6 +285,9 @@
<property name="serviceRegistry">
<ref bean="ServiceRegistry"></ref>
</property>
<property name="headerEncoding">
<value>${mail.header}</value>
</property>
</bean>
<bean id="set-property-value" class="org.alfresco.repo.action.executer.SetPropertyValueActionExecuter" parent="action-executer">

View File

@ -2,24 +2,12 @@
xmlns:cm="http://www.alfresco.org/model/content/1.0"
xmlns:app="http://www.alfresco.org/model/application/1.0">
<!--
<cm:folder>
<app:uifacets />
<cm:name>${tutorial.space.name}</cm:name>
<cm:description>${tutorial.space.description}</cm:description>
<app:icon>space-icon-doc</app:icon>
<cm:contains>
-->
<cm:content>
<app:uifacets />
<cm:name>${tutorial.document.name}</cm:name>
<cm:title>${tutorial.document.title}</cm:title>
<cm:description>${tutorial.document.description}</cm:description>
<cm:content>contentUrl=classpath:alfresco/bootstrap/${tutorial.document.name}|mimetype=application/pdf|size=|encoding=</cm:content>
</cm:content>
<!--
</cm:contains>
</cm:folder>
-->
<cm:content>
<app:uifacets />
<cm:name>${tutorial.document.name}</cm:name>
<cm:title>${tutorial.document.title}</cm:title>
<cm:description>${tutorial.document.description}</cm:description>
<cm:content>contentUrl=classpath:alfresco/bootstrap/Alfresco-Tutorial.pdf|mimetype=application/pdf|size=|encoding=</cm:content>
</cm:content>
</view:view>

View File

@ -0,0 +1,38 @@
<?xml version='1.0' encoding='UTF-8'?>
<!DOCTYPE beans PUBLIC '-//SPRING//DTD BEAN//EN' 'http://www.springframework.org/dtd/spring-beans.dtd'>
<beans>
<!-- Forces the reindexing of nodes where content may have been missing before -->
<!--
This component can be triggered at intervals where asynchronous content sharing
between clustered servers has been set up.
-->
<bean id="missingContentReindexTrigger" class="org.alfresco.util.TriggerBean">
<property name="jobDetail">
<bean class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass">
<value>org.alfresco.repo.node.index.IndexRecoveryJob</value>
</property>
<property name="jobDataAsMap">
<map>
<entry key="indexRecoveryComponent">
<ref bean="missingContentReindexComponent" />
</entry>
</map>
</property>
</bean>
</property>
<property name="scheduler">
<ref bean="schedulerFactory" />
</property>
<!-- Give the server 5 minutes and then check for missing content -->
<property name="startDelayMinutes">
<value>5</value>
</property>
<property name="repeatCount">
<value>0</value>
</property>
</bean>
</beans>

View File

@ -4,10 +4,10 @@
<beans>
<bean id="indexRecoveryComponentBase" abstract="true" >
<property name="sessionFactory">
<ref bean="sessionFactory" />
<property name="authenticationComponent">
<ref bean="authenticationComponentImpl" />
</property>
<property name="transactionService">
<property name="transactionComponent">
<ref bean="transactionComponent" />
</property>
<property name="indexer">
@ -22,62 +22,33 @@
<property name="nodeService">
<ref bean="nodeService" />
</property>
<property name="nodeDaoService">
<ref bean="nodeDaoService" />
</property>
</bean>
<!-- full node index recovery -->
<bean id="indexRecoveryComponent" class="org.alfresco.repo.node.index.FullIndexRecoveryComponent" parent="indexRecoveryComponentBase">
<!-- Properties controlling full index rebuilding / tracking -->
<property name="executeFullRecovery">
<value>false</value> <!-- enable this to start the full index recovery -->
</property>
<property name="runContinuously">
<value>false</value> <!-- ensure the index is up to date and then stop -->
</property>
<property name="waitTime">
<value>1000</value> <!-- milliseconds to wait between checks for new transactions -->
</property>
<property name="l2CacheMode">
<value>NORMAL</value> <!-- normal L2 cache usage (database is changed by this server only) -->
</property>
</bean>
<!-- attempt to reindex content that was missing before -->
<!-- index recovery and validation -->
<!--
<bean id="missingContentReindexComponent" class="org.alfresco.repo.node.index.MissingContentReindexComponent" parent="indexRecoveryComponentBase">
<property name="runContinuously">
<value>false</value>
</property>
<property name="waitTime">
<value>300000</value>
Recovery types are:
NONE: Ignore
VALIDATE: Checks that the last transaction for each store is represented in the indexes
AUTO: Validates and auto-recovers if validation fails
FULL: Full index rebuild, processing all transactions in order. The server is temporarily suspended.
-->
<bean
id="indexRecoveryComponent"
class="org.alfresco.repo.node.index.FullIndexRecoveryComponent"
parent="indexRecoveryComponentBase">
<property name="recoveryMode">
<value>${index.recovery.mode}</value>
</property>
</bean>
<bean id="missingContentReindexTrigger" class="org.alfresco.util.TriggerBean">
<property name="jobDetail">
<bean id="IndexRecoveryJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass">
<value>org.alfresco.repo.node.index.IndexRecoveryJob</value>
</property>
<property name="jobDataAsMap">
<map>
<entry key="missingContentReindexComponent">
<ref bean="missingContentReindexComponent" />
</entry>
</map>
</property>
</bean>
</property>
<property name="startDelay">
<value>60000</value>
</property>
<property name="repeatCount">
<value>0</value>
</property>
<property name="scheduler">
<ref bean="schedulerFactory" />
</property>
</bean>
-->
<!-- Bean that attempts to index content that was previously missing -->
<bean
id="missingContentReindexComponent"
class="org.alfresco.repo.node.index.MissingContentReindexComponent"
parent="indexRecoveryComponentBase">
</bean>
</beans>

View File

@ -10,14 +10,14 @@ bpm_businessprocessmodel.transition.description=Task Done
# Base Task
bpm_businessprocessmodel.type.bpm_task.title=Task
bpm_businessprocessmodel.type.bpm_task.description=Task
bpm_businessprocessmodel.property.bpm_taskId.title=Task Identifier
bpm_businessprocessmodel.property.bpm_taskId.description=Task Identifier
bpm_businessprocessmodel.property.bpm_startDate.title=Task Start Date
bpm_businessprocessmodel.property.bpm_startDate.description=Task Start Date
bpm_businessprocessmodel.property.bpm_completionDate.title=Task Completion Date
bpm_businessprocessmodel.property.bpm_completionDate.description=Task Completion Date
bpm_businessprocessmodel.property.bpm_dueDate.title=Task Due Date
bpm_businessprocessmodel.property.bpm_dueDate.description=Task Due Date
bpm_businessprocessmodel.property.bpm_taskId.title=Identifier
bpm_businessprocessmodel.property.bpm_taskId.description=Identifier
bpm_businessprocessmodel.property.bpm_startDate.title=Start Date
bpm_businessprocessmodel.property.bpm_startDate.description=Start Date
bpm_businessprocessmodel.property.bpm_completionDate.title=Completion Date
bpm_businessprocessmodel.property.bpm_completionDate.description=Completion Date
bpm_businessprocessmodel.property.bpm_dueDate.title=Due Date
bpm_businessprocessmodel.property.bpm_dueDate.description=Due Date
bpm_businessprocessmodel.property.bpm_status.title=Status
bpm_businessprocessmodel.property.bpm_status.description=Status
bpm_businessprocessmodel.property.bpm_priority.title=Priority
@ -36,9 +36,9 @@ bpm_businessprocessmodel.property.bpm_workflowInstanceId.title=Workflow Instance
bpm_businessprocessmodel.property.bpm_workflowInstanceId.description=Workflow Instance Id
bpm_businessprocessmodel.property.bpm_context.title=Task Context
bpm_businessprocessmodel.property.bpm_context.description=The context within which this task has been assigned
bpm_businessprocessmodel.property.bpm_description.title=Task Description
bpm_businessprocessmodel.property.bpm_description.title=Description
bpm_businessprocessmodel.property.bpm_description.description=Description of what needs to be achieved
bpm_businessprocessmodel.property.bpm_outcome.title=Task Outcome
bpm_businessprocessmodel.property.bpm_outcome.title=Outcome
bpm_businessprocessmodel.property.bpm_outcome.description=Decision made on completing Task
bpm_businessprocessmodel.property.bpm_completedItems.title=Completed Items
bpm_businessprocessmodel.property.bpm_completedItems.description=Package items marked as complete

View File

@ -1,4 +1,10 @@
# Content-related messages
content.content_missing=The node''s content is missing: \n node: {0} \n reader: {1} \n Please contact your system administrator.
content.runtime_exec.property_moved=The property ''errorCodes'' has moved down onto the RuntimeExec class
content.runtime_exec.property_moved=The property ''errorCodes'' has moved down onto the RuntimeExec class
index.recovery.store_not_up_to_date=The indexes for store ''{0}'' are not synchronized with the database.
index.recovery.starting=Index recovery started: {0} transactions.
index.recovery.complete=Index recovery completed.
index.recovery.progress=\t{0} % complete.
index.recovery.terminated=Index recovery terminated.

View File

@ -291,6 +291,11 @@
<aspect name="bpm:workflowPackage">
<properties>
<!-- Created by Workflow Service (true), or provided from outside (false) -->
<property name="bpm:isSystemPackage">
<type>d:boolean</type>
</property>
<!-- -->
<!-- Associated Workflow -->
<!-- -->

View File

@ -1018,7 +1018,7 @@
<idref local="WorkflowService_transaction"/>
<idref local="AuditMethodInterceptor"/>
<idref local="exceptionTranslator"/>
<idref bean="WorkflowService_security"/>
<idref bean="WorkflowService_security"/>
<idref local="WorkflowService_descriptor"/>
</list>
</property>

View File

@ -8,13 +8,14 @@ dir.contentstore.deleted=${dir.root}/contentstore.deleted
dir.auditcontentstore=${dir.root}/audit.contentstore
# The location for lucene index files
dir.indexes=${dir.root}/lucene-indexes
# The location for lucene index locks
dir.indexes.lock=${dir.indexes}/locks
# The index recovery mode (NONE, VALIDATE, AUTO, FULL)
index.recovery.mode=VALIDATE
# #################### #
# Lucene configuration #
# #################### #
@ -74,6 +75,8 @@ mail.username=anonymous
mail.password=
# Set this value to UTF-8 or similar for encoding of email messages as required
mail.encoding=UTF-8
# Set this value to 7bit or similar for Asian encoding of email headers as required
mail.header=
# System Configuration

View File

@ -2,9 +2,25 @@
<!DOCTYPE beans PUBLIC '-//SPRING//DTD BEAN//EN' 'http://www.springframework.org/dtd/spring-beans.dtd'>
<beans>
<!-- -->
<!-- Scheduled jobs -->
<!-- -->
<!-- Task scheduler -->
<!-- Triggers should not appear here - the scheduler should be injected into the trigger definition -->
<!-- This bean should not need to apear else where in extension configuration -->
<bean id="schedulerFactory" class="org.springframework.scheduling.quartz.SchedulerFactoryBean">
<property name="waitForJobsToCompleteOnShutdown">
<value>true</value>
</property>
<property name="configLocation">
<value>classpath:alfresco/domain/quartz.properties</value>
</property>
<property name="schedulerName">
<value>DefaultScheduler</value>
</property>
</bean>
<!-- -->
<!-- Scheduled tasks -->
<!-- -->
<bean id="ftsIndexerTrigger" class="org.alfresco.util.TriggerBean">
<property name="jobDetail">
@ -21,16 +37,74 @@
</property>
</bean>
</property>
<!-- Try and do some indexing every minute after starting up -->
<property name="startDelay">
<value>60000</value>
<property name="scheduler">
<ref bean="schedulerFactory" />
</property>
<property name="repeatInterval">
<value>60000</value>
<!-- Try and do some indexing every minute after starting up -->
<property name="startDelayMinutes">
<value>1</value>
</property>
<property name="repeatIntervalMinutes">
<value>1</value>
</property>
</bean>
<!-- Forces the reindexing of nodes where content may have been missing before -->
<!--
This component can be triggered at intervals where asynchronous content sharing
between clustered servers has been set up
-->
<bean id="missingContentReindexTrigger" class="org.alfresco.util.TriggerBean">
<property name="jobDetail">
<bean class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass">
<value>org.alfresco.repo.node.index.IndexRecoveryJob</value>
</property>
<property name="jobDataAsMap">
<map>
<entry key="indexRecoveryComponent">
<ref bean="missingContentReindexComponent" />
</entry>
</map>
</property>
</bean>
</property>
<property name="scheduler">
<ref bean="schedulerFactory" />
</property>
<!-- Give the server 5 minutes and then check for missing content -->
<property name="startDelayMinutes">
<value>5</value>
</property>
<property name="repeatCount">
<value>0</value>
</property>
</bean>
<bean id="indexRecoveryTrigger" class="org.alfresco.util.TriggerBean">
<property name="jobDetail">
<bean class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass">
<value>org.alfresco.repo.node.index.IndexRecoveryJob</value>
</property>
<property name="jobDataAsMap">
<map>
<entry key="indexRecoveryComponent">
<ref bean="indexRecoveryComponent" />
</entry>
</map>
</property>
</bean>
</property>
<property name="scheduler">
<ref bean="schedulerFactory" />
</property>
<property name="startDelayMinutes">
<value>1</value>
</property>
<property name="repeatCount">
<value>0</value>
</property>
</bean>
<bean id="tempFileCleanerTrigger" class="org.alfresco.util.TriggerBean">
@ -48,15 +122,16 @@
</property>
</bean>
</property>
<property name="startDelay">
<value>1800000</value><!-- start after half an hour -->
</property>
<property name="repeatInterval">
<value>3600000</value><!-- repeat every hour -->
</property>
<property name="scheduler">
<ref bean="schedulerFactory" />
</property>
<!-- start after half an hour and repeat hourly -->
<property name="startDelayMinutes">
<value>30</value>
</property>
<property name="repeatIntervalMinutes">
<value>60</value>
</property>
</bean>
<bean id="fileContentStoreCleanerJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
@ -84,33 +159,6 @@
</property>
</bean>
<bean id="indexRecoveryTrigger" class="org.alfresco.util.TriggerBean">
<property name="jobDetail">
<bean id="IndexRecoveryJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass">
<value>org.alfresco.repo.node.index.IndexRecoveryJob</value>
</property>
<property name="jobDataAsMap">
<map>
<entry key="indexRecoveryComponent">
<ref bean="indexRecoveryComponent" />
</entry>
</map>
</property>
</bean>
</property>
<property name="startDelay">
<value>60000</value> <!-- start after 1 minute -->
</property>
<property name="repeatCount">
<value>0</value> <!-- DO NOT REPEAT !!!!! -->
</property>
<property name="scheduler">
<ref bean="schedulerFactory" />
</property>
</bean>
<bean id="indexBackupJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass">
<value>org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory2$LuceneIndexBackupJob</value>
@ -150,26 +198,12 @@
<ref bean="schedulerFactory" />
</property>
-->
<property name="startDelay">
<value>3600000</value><!-- start after an hour -->
<!-- start after an hour and repeat hourly -->
<property name="startDelayMinutes">
<value>60</value>
</property>
<property name="repeatInterval">
<value>3600000</value><!-- repeat every hour -->
</property>
</bean>
<!-- Scheduled tasks -->
<!-- Triggers should not appear here - the scheduler should be injected into the trigger definition -->
<!-- This bean should not need to apear else where in extension configuration -->
<bean id="schedulerFactory" class="org.springframework.scheduling.quartz.SchedulerFactoryBean">
<property name="waitForJobsToCompleteOnShutdown">
<value>true</value>
</property>
<property name="configLocation">
<value>classpath:alfresco/domain/quartz.properties</value>
</property>
<property name="schedulerName">
<value>DefaultScheduler</value>
<property name="repeatIntervalMinutes">
<value>60</value>
</property>
</bean>

View File

@ -5,17 +5,7 @@
<swimlane name="initiator"/>
<start-state name="start">
<task name="wf:submitAdhocTask" swimlane="initiator">
<controller>
<variable name="bpm_assignee" access="write"/>
<variable name="bpm_workflowDescription" access="write"/>
<variable name="bpm_workflowDueDate" access="write"/>
<variable name="bpm_workflowPriority" access="write"/>
<variable name="bpm_package" access="write"/>
<variable name="bpm_context" access="write"/>
<variable name="wf_notifyMe" access="write"/>
</controller>
</task>
<task name="wf:submitAdhocTask" swimlane="initiator"/>
<transition name="" to="adhoc"/>
</start-state>

View File

@ -2,55 +2,46 @@
<process-definition xmlns="urn:jbpm.org:jpdl-3.1" name="wf:review">
<swimlane name="initiator"/>
<swimlane name="initiator" />
<start-state name="start">
<task name="wf:submitReviewTask" swimlane="initiator">
<controller>
<variable name="bpm_assignee" access="write"/>
<variable name="bpm_workflowDescription" access="write"/>
<variable name="bpm_workflowDueDate" access="write"/>
<variable name="bpm_workflowPriority" access="write"/>
<variable name="bpm_package" access="write"/>
<variable name="bpm_context" access="write"/>
</controller>
</task>
<transition name="" to="review"/>
</start-state>
<start-state name="start">
<task name="wf:submitReviewTask" swimlane="initiator" />
<transition name="" to="review" />
</start-state>
<swimlane name="reviewer">
<assignment actor-id="#{bpm_assignee.properties['cm:userName']}" />
</swimlane>
<task-node name="review">
<task name="wf:reviewTask" swimlane="reviewer">
<event type="task-create">
<script>
if (bpm_workflowDueDate != void)
{
taskInstance.dueDate = bpm_workflowDueDate;
}
if (bpm_workflowPriority != void)
{
taskInstance.priority = bpm_workflowPriority;
}
</script>
</event>
</task>
<transition name="reject" to="rejected" />
<transition name="approve" to="approved" />
</task-node>
<task-node name="rejected">
<task name="wf:rejectedTask" swimlane="initiator" />
<transition name="" to="end" />
</task-node>
<task-node name="approved">
<task name="wf:approvedTask" swimlane="initiator" />
<transition name="" to="end" />
</task-node>
<end-state name="end" />
<swimlane name="reviewer">
<assignment actor-id="#{bpm_assignee.properties['cm:userName']}"/>
</swimlane>
<task-node name="review">
<task name="wf:reviewTask" swimlane="reviewer">
<event type="task-create">
<script>
if (bpm_workflowDueDate != void)
{
taskInstance.dueDate = bpm_workflowDueDate;
}
if (bpm_workflowPriority != void)
{
taskInstance.priority = bpm_workflowPriority;
}
</script>
</event>
</task>
<transition name="reject" to="rejected"/>
<transition name="approve" to="approved"/>
</task-node>
<task-node name="rejected">
<task name="wf:rejectedTask" swimlane="initiator"/>
<transition name="" to="end"/>
</task-node>
<task-node name="approved">
<task name="wf:approvedTask" swimlane="initiator"/>
<transition name="" to="end"/>
</task-node>
<end-state name="end"/>
</process-definition>

View File

@ -201,7 +201,6 @@ public class RepositoryImpl implements Repository
{
// construct the session
SessionImpl sessionImpl = new SessionImpl(this);
registerSession(sessionImpl);
// authenticate user
AuthenticationService authenticationService = getServiceRegistry().getAuthenticationService();
@ -211,7 +210,6 @@ public class RepositoryImpl implements Repository
}
catch(AuthenticationException e)
{
deregisterSession();
throw new LoginException("Alfresco Repository failed to authenticate credentials", e);
}
@ -222,11 +220,11 @@ public class RepositoryImpl implements Repository
// session is now ready
Session session = sessionImpl.getProxy();
registerSession(sessionImpl);
return session;
}
catch(AlfrescoRuntimeException e)
{
deregisterSession();
throw new RepositoryException(e);
}
}

View File

@ -109,6 +109,11 @@ public class MailActionExecuter extends ActionExecuterAbstractBase
*/
private ServiceRegistry serviceRegistry;
/**
* Mail header encoding scheme
*/
private String headerEncoding = null;
/**
* @param javaMailSender the java mail sender
*/
@ -164,6 +169,14 @@ public class MailActionExecuter extends ActionExecuterAbstractBase
{
this.nodeService = nodeService;
}
/**
* @param headerEncoding The mail header encoding to set.
*/
public void setHeaderEncoding(String headerEncoding)
{
this.headerEncoding = headerEncoding;
}
/**
* Execute the rule action
@ -180,6 +193,12 @@ public class MailActionExecuter extends ActionExecuterAbstractBase
{
MimeMessageHelper message = new MimeMessageHelper(mimeMessage);
// set header encoding if one has been supplied
if (headerEncoding != null && headerEncoding.length() != 0)
{
mimeMessage.setHeader("Content-Transfer-Encoding", headerEncoding);
}
// set recipient
String to = (String)ruleAction.getParameterValue(PARAM_TO);
if (to != null && to.length() != 0)

View File

@ -44,6 +44,7 @@ import org.alfresco.util.BaseSpringTest;
import org.alfresco.util.GUID;
import org.hibernate.CacheMode;
import org.hibernate.exception.ConstraintViolationException;
import org.hibernate.exception.GenericJDBCException;
/**
* Test persistence and retrieval of Hibernate-specific implementations of the
@ -161,6 +162,11 @@ public class HibernateNodeTest extends BaseSpringTest
{
// expected
}
catch(GenericJDBCException e)
{
// Sybase
// expected
}
}
/**
@ -251,8 +257,8 @@ public class HibernateNodeTest extends BaseSpringTest
assoc1.setQname(QName.createQName(null, "number1"));
assoc1.setChildNodeName("number1");
assoc1.setChildNodeNameCrc(1);
getSession().save(assoc1);
assoc1.buildAssociation(containerNode, contentNode);
getSession().save(assoc1);
// make another association between the same two parent and child nodes
ChildAssoc assoc2 = new ChildAssocImpl();
@ -261,8 +267,8 @@ public class HibernateNodeTest extends BaseSpringTest
assoc2.setQname(QName.createQName(null, "number2"));
assoc2.setChildNodeName("number2");
assoc2.setChildNodeNameCrc(2);
getSession().save(assoc2);
assoc2.buildAssociation(containerNode, contentNode);
getSession().save(assoc2);
assertFalse("Hashcode incorrent", assoc2.hashCode() == 0);
assertNotSame("Assoc equals failure", assoc1, assoc2);

View File

@ -314,56 +314,6 @@
assoc.target.id = :targetId
</query>
<query name="node.GetNextChangeTxnIds">
select distinct
transaction.changeTxnId
from
org.alfresco.repo.domain.hibernate.TransactionImpl as transaction
where
transaction.changeTxnId > :currentTxnId
order by
transaction.changeTxnId
</query>
<query name="node.GetChangedNodeStatusesCount">
select
count(transaction.changeTxnId)
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
join status.transaction as transaction
where
status.key.protocol = :storeProtocol and
status.key.identifier = :storeIdentifier and
status.node.id is not null and
transaction.changeTxnId = :changeTxnId
</query>
<query name="node.GetChangedNodeStatuses">
select
status
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
join status.transaction as transaction
where
status.key.protocol = :storeProtocol and
status.key.identifier = :storeIdentifier and
status.node.id is not null and
transaction.changeTxnId = :changeTxnId
</query>
<query name="node.GetDeletedNodeStatuses">
select
status
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
join status.transaction as transaction
where
status.key.protocol = :storeProtocol and
status.key.identifier = :storeIdentifier and
status.node.id is null and
transaction.changeTxnId = :changeTxnId
</query>
<query name="node.GetNodesWithPropertyValuesByActualType">
select
node

View File

@ -59,4 +59,81 @@
server.ipAddress = :ipAddress
</query>
<query name="txn.GetLastTxnIdForStore">
select
max(txn.id)
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
join status.transaction as txn
where
status.key.protocol = :protocol and
status.key.identifier = :identifier
</query>
<query name="txn.CountTransactions">
select
count(txn.id)
from
org.alfresco.repo.domain.hibernate.TransactionImpl as txn
</query>
<query name="txn.GetNextTxns">
<![CDATA[
select
txn
from
org.alfresco.repo.domain.hibernate.TransactionImpl as txn
where
txn.id > :lastTxnId
order by
txn.id
]]>
</query>
<query name="txn.GetTxnUpdateCountForStore">
select
count(status.key.guid)
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
join status.transaction as txn
where
txn.id = :txnId and
status.node is not null and
status.key.protocol = :protocol and
status.key.identifier = :identifier
</query>
<query name="txn.GetTxnDeleteCountForStore">
select
count(status.key.guid)
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
join status.transaction as txn
where
txn.id = :txnId and
status.node is null and
status.key.protocol = :protocol and
status.key.identifier = :identifier
</query>
<query name="txn.GetTxnChangesForStore">
select
status
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
where
status.transaction.id = :txnId and
status.key.protocol = :protocol and
status.key.identifier = :identifier
</query>
<query name="txn.GetTxnChanges">
select
status
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
where
status.transaction.id = :txnId
</query>
</hibernate-mapping>

View File

@ -202,7 +202,7 @@ public final class Actions implements Scopeable
@SuppressWarnings("synthetic-access")
public void execute(Node node)
{
if (this.parameters.isModified())
if (this.parameters != null && this.parameters.isModified())
{
Map<String, Serializable> actionParams = action.getParameterValues();
actionParams.clear();

View File

@ -25,10 +25,12 @@ import org.alfresco.repo.domain.Node;
import org.alfresco.repo.domain.NodeAssoc;
import org.alfresco.repo.domain.NodeStatus;
import org.alfresco.repo.domain.Store;
import org.alfresco.repo.domain.Transaction;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.InvalidTypeException;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.namespace.QName;
/**
@ -233,4 +235,12 @@ public interface NodeDaoService
* @return Returns the values for the given type definition
*/
public List<Serializable> getPropertyValuesByActualType(DataTypeDefinition actualDataTypeDefinition);
public Transaction getLastTxn(final StoreRef storeRef);
public int getTxnUpdateCountForStore(final StoreRef storeRef, final long txnId);
public int getTxnDeleteCountForStore(final StoreRef storeRef, final long txnId);
public int getTransactionCount();
public List<Transaction> getNextTxns(final Transaction lastTxn, final int count);
public List<NodeRef> getTxnChangesForStore(final StoreRef storeRef, final long txnId);
public List<NodeRef> getTxnChanges(final long txnId);
}

View File

@ -990,4 +990,176 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
}
return convertedValues;
}
/*
* Queries for transactions
*/
private static final String QUERY_GET_LAST_TXN_ID_FOR_STORE = "txn.GetLastTxnIdForStore";
private static final String QUERY_GET_TXN_UPDATE_COUNT_FOR_STORE = "txn.GetTxnUpdateCountForStore";
private static final String QUERY_GET_TXN_DELETE_COUNT_FOR_STORE = "txn.GetTxnDeleteCountForStore";
private static final String QUERY_COUNT_TRANSACTIONS = "txn.CountTransactions";
private static final String QUERY_GET_NEXT_TXNS = "txn.GetNextTxns";
private static final String QUERY_GET_TXN_CHANGES_FOR_STORE = "txn.GetTxnChangesForStore";
private static final String QUERY_GET_TXN_CHANGES = "txn.GetTxnChanges";
@SuppressWarnings("unchecked")
public Transaction getLastTxn(final StoreRef storeRef)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_LAST_TXN_ID_FOR_STORE);
query.setString("protocol", storeRef.getProtocol())
.setString("identifier", storeRef.getIdentifier())
.setMaxResults(1)
.setReadOnly(true);
return query.uniqueResult();
}
};
Long txnId = (Long) getHibernateTemplate().execute(callback);
Transaction txn = null;
if (txnId != null)
{
txn = (Transaction) getSession().get(TransactionImpl.class, txnId);
}
// done
return txn;
}
@SuppressWarnings("unchecked")
public int getTxnUpdateCountForStore(final StoreRef storeRef, final long txnId)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_TXN_UPDATE_COUNT_FOR_STORE);
query.setLong("txnId", txnId)
.setString("protocol", storeRef.getProtocol())
.setString("identifier", storeRef.getIdentifier())
.setMaxResults(1)
.setReadOnly(true);
return query.uniqueResult();
}
};
Integer count = (Integer) getHibernateTemplate().execute(callback);
// done
return count;
}
@SuppressWarnings("unchecked")
public int getTxnDeleteCountForStore(final StoreRef storeRef, final long txnId)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_TXN_DELETE_COUNT_FOR_STORE);
query.setLong("txnId", txnId)
.setString("protocol", storeRef.getProtocol())
.setString("identifier", storeRef.getIdentifier())
.setMaxResults(1)
.setReadOnly(true);
return query.uniqueResult();
}
};
Integer count = (Integer) getHibernateTemplate().execute(callback);
// done
return count;
}
@SuppressWarnings("unchecked")
public int getTransactionCount()
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_COUNT_TRANSACTIONS);
query.setMaxResults(1)
.setReadOnly(true);
return query.uniqueResult();
}
};
Integer count = (Integer) getHibernateTemplate().execute(callback);
// done
return count.intValue();
}
@SuppressWarnings("unchecked")
public List<Transaction> getNextTxns(final Transaction lastTxn, final int count)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
long lastTxnId = (lastTxn == null) ? -1L : lastTxn.getId();
Query query = session.getNamedQuery(QUERY_GET_NEXT_TXNS);
query.setLong("lastTxnId", lastTxnId)
.setMaxResults(count)
.setReadOnly(true);
return query.list();
}
};
List<Transaction> results = (List<Transaction>) getHibernateTemplate().execute(callback);
// done
return results;
}
@SuppressWarnings("unchecked")
public List<NodeRef> getTxnChangesForStore(final StoreRef storeRef, final long txnId)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_TXN_CHANGES_FOR_STORE);
query.setLong("txnId", txnId)
.setString("protocol", storeRef.getProtocol())
.setString("identifier", storeRef.getIdentifier())
.setReadOnly(true);
return query.list();
}
};
List<NodeStatus> results = (List<NodeStatus>) getHibernateTemplate().execute(callback);
// transform into a simpler form
List<NodeRef> nodeRefs = new ArrayList<NodeRef>(results.size());
for (NodeStatus nodeStatus : results)
{
NodeRef nodeRef = new NodeRef(storeRef, nodeStatus.getKey().getGuid());
nodeRefs.add(nodeRef);
}
// done
return nodeRefs;
}
@SuppressWarnings("unchecked")
public List<NodeRef> getTxnChanges(final long txnId)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_TXN_CHANGES);
query.setLong("txnId", txnId)
.setReadOnly(true);
return query.list();
}
};
List<NodeStatus> results = (List<NodeStatus>) getHibernateTemplate().execute(callback);
// transform into a simpler form
List<NodeRef> nodeRefs = new ArrayList<NodeRef>(results.size());
for (NodeStatus nodeStatus : results)
{
NodeRef nodeRef = new NodeRef(
nodeStatus.getKey().getProtocol(),
nodeStatus.getKey().getIdentifier(),
nodeStatus.getKey().getGuid());
nodeRefs.add(nodeRef);
}
// done
return nodeRefs;
}
}

View File

@ -0,0 +1,227 @@
/*
* Copyright (C) 2005-2006 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.node.index;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import net.sf.acegisecurity.Authentication;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.transaction.TransactionComponent;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.util.PropertyCheck;
import org.alfresco.util.VmShutdownListener;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Abstract helper for reindexing.
*
* @see #reindexImpl()
* @see #getIndexerWriteLock()
* @see #isShuttingDown()
*
* @author Derek Hulley
*/
public abstract class AbstractReindexComponent implements IndexRecovery
{
private static Log logger = LogFactory.getLog(AbstractReindexComponent.class);
/** kept to notify the thread that it should quit */
private static VmShutdownListener vmShutdownListener = new VmShutdownListener("MissingContentReindexComponent");
private AuthenticationComponent authenticationComponent;
/** provides transactions to atomically index each missed transaction */
protected TransactionComponent transactionService;
/** the component to index the node hierarchy */
protected Indexer indexer;
/** the FTS indexer that we will prompt to pick up on any un-indexed text */
protected FullTextSearchIndexer ftsIndexer;
/** the component providing searches of the indexed nodes */
protected SearchService searcher;
/** the component giving direct access to <b>store</b> instances */
protected NodeService nodeService;
/** the component giving direct access to <b>transaction</b> instances */
protected NodeDaoService nodeDaoService;
private boolean shutdown;
private final WriteLock indexerWriteLock;
public AbstractReindexComponent()
{
shutdown = false;
ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock();
indexerWriteLock = readWriteLock.writeLock();
}
/**
* Convenience method to get a common write lock. This can be used to avoid
* concurrent access to the work methods.
*/
protected WriteLock getIndexerWriteLock()
{
return indexerWriteLock;
}
/**
* Programmatically notify a reindex thread to terminate
*
* @param shutdown true to shutdown, false to reset
*/
public void setShutdown(boolean shutdown)
{
this.shutdown = shutdown;
}
/**
*
* @return Returns true if the VM shutdown hook has been triggered, or the instance
* was programmatically {@link #shutdown shut down}
*/
protected boolean isShuttingDown()
{
return shutdown || vmShutdownListener.isVmShuttingDown();
}
/**
* @param authenticationComponent ensures that reindexing operates as system user
*/
public void setAuthenticationComponent(AuthenticationComponent authenticationComponent)
{
this.authenticationComponent = authenticationComponent;
}
/**
* Set the low-level transaction component to use
*
* @param transactionComponent provide transactions to index each missed transaction
*/
public void setTransactionComponent(TransactionComponent transactionComponent)
{
this.transactionService = transactionComponent;
}
/**
* @param indexer the indexer that will be index
*/
public void setIndexer(Indexer indexer)
{
this.indexer = indexer;
}
/**
* @param ftsIndexer the FTS background indexer
*/
public void setFtsIndexer(FullTextSearchIndexer ftsIndexer)
{
this.ftsIndexer = ftsIndexer;
}
/**
* @param searcher component providing index searches
*/
public void setSearcher(SearchService searcher)
{
this.searcher = searcher;
}
/**
* @param nodeService provides information about nodes for indexing
*/
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
/**
* @param nodeDaoService provides access to transaction-related queries
*/
public void setNodeDaoService(NodeDaoService nodeDaoService)
{
this.nodeDaoService = nodeDaoService;
}
/**
* Perform the actual work. This method will be called as the system user
* and within an existing transaction. This thread will only ever be accessed
* by a single thread per instance.
*
*/
protected abstract void reindexImpl();
/**
* If this object is currently busy, then it just nothing
*/
public final void reindex()
{
PropertyCheck.mandatory(this, "authenticationComponent", this.authenticationComponent);
PropertyCheck.mandatory(this, "ftsIndexer", this.ftsIndexer);
PropertyCheck.mandatory(this, "indexer", this.indexer);
PropertyCheck.mandatory(this, "searcher", this.searcher);
PropertyCheck.mandatory(this, "nodeService", this.nodeService);
PropertyCheck.mandatory(this, "nodeDaoService", this.nodeDaoService);
PropertyCheck.mandatory(this, "transactionComponent", this.transactionService);
if (indexerWriteLock.tryLock())
{
Authentication auth = null;
try
{
auth = AuthenticationUtil.getCurrentAuthentication();
// authenticate as the system user
authenticationComponent.setSystemUserAsCurrentUser();
TransactionWork<Object> reindexWork = new TransactionWork<Object>()
{
public Object doWork() throws Exception
{
reindexImpl();
return null;
}
};
TransactionUtil.executeInUserTransaction(transactionService, reindexWork);
}
finally
{
try { indexerWriteLock.unlock(); } catch (Throwable e) {}
if (auth != null)
{
authenticationComponent.setCurrentAuthentication(auth);
}
}
// done
if (logger.isDebugEnabled())
{
logger.debug("Reindex work completed: " + this);
}
}
else
{
if (logger.isDebugEnabled())
{
logger.debug("Bypassed reindex work - already busy: " + this);
}
}
}
}

View File

@ -1,134 +0,0 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.node.index;
import java.util.ArrayList;
import java.util.List;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.transaction.TransactionService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Ensures that the FTS indexing picks up on any outstanding documents that
* require indexing.
* <p>
* FTS indexing is a background process. It is therefore possible that
* certain documents don't get indexed when the server shuts down.
*
* @author Derek Hulley
*/
public class FtsIndexRecoveryComponent implements IndexRecovery
{
private static Log logger = LogFactory.getLog(FtsIndexRecoveryComponent.class);
/** provides transactions to atomically index each missed transaction */
private TransactionService transactionService;
/** the FTS indexer that we will prompt to pick up on any un-indexed text */
private FullTextSearchIndexer ftsIndexer;
/** the component giving direct access to <b>node</b> instances */
private NodeService nodeService;
/** the workspaces to reindex */
private List<StoreRef> storeRefs;
public FtsIndexRecoveryComponent()
{
this.storeRefs = new ArrayList<StoreRef>(2);
}
/**
* @param transactionService provide transactions to index each missed transaction
*/
public void setTransactionService(TransactionService transactionService)
{
this.transactionService = transactionService;
}
/**
* @param ftsIndexer the FTS background indexer
*/
public void setFtsIndexer(FullTextSearchIndexer ftsIndexer)
{
this.ftsIndexer = ftsIndexer;
}
/**
* @param nodeService provides information about nodes for indexing
*/
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
/**
* Set the workspaces that need reindexing
*
* @param storeRefStrings a list of strings representing store references
*/
public void setStores(List<String> storeRefStrings)
{
storeRefs.clear();
for (String storeRefStr : storeRefStrings)
{
StoreRef storeRef = new StoreRef(storeRefStr);
storeRefs.add(storeRef);
}
}
/**
* Ensures that the FTS indexing is activated for any outstanding full text searches.
*/
public void reindex()
{
TransactionWork<Object> reindexWork = new TransactionWork<Object>()
{
public Object doWork()
{
// reindex each store
for (StoreRef storeRef : storeRefs)
{
// check if the store exists
if (!nodeService.exists(storeRef))
{
// store does not exist
if (logger.isDebugEnabled())
{
logger.debug("Skipping reindex of non-existent store: " + storeRef);
}
continue;
}
// prompt FTS to reindex the store
ftsIndexer.requiresIndex(storeRef);
}
// done
return null;
}
};
TransactionUtil.executeInUserTransaction(transactionService, reindexWork);
// done
if (logger.isDebugEnabled())
{
logger.debug("Prompted FTS index on stores: " + storeRefs);
}
}
}

View File

@ -1,60 +0,0 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.node.index;
import junit.framework.TestCase;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper;
import org.springframework.context.ApplicationContext;
/**
* Checks that the FTS index recovery component is working
*
* @author Derek Hulley
*/
public class FtsIndexRecoveryComponentTest extends TestCase
{
private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
private IndexRecovery indexRecoverer;
private TransactionService txnService;
public void setUp() throws Exception
{
indexRecoverer = (IndexRecovery) ctx.getBean("indexRecoveryComponent");
txnService = (TransactionService) ctx.getBean("transactionComponent");
}
public void testReindexing() throws Exception
{
// performs a reindex
TransactionWork<Object> reindexWork = new TransactionWork<Object>()
{
public Object doWork()
{
indexRecoverer.reindex();
return null;
}
};
// reindex
TransactionUtil.executeInNonPropagatingUserTransaction(txnService, reindexWork);
}
}

View File

@ -16,660 +16,365 @@
*/
package org.alfresco.repo.node.index;
import java.util.ArrayList;
import java.util.List;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.i18n.I18NUtil;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.domain.NodeStatus;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerImpl;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.domain.Transaction;
import org.alfresco.repo.search.impl.lucene.LuceneQueryParser;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.repository.NodeRef.Status;
import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.transaction.TransactionService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hibernate.CacheMode;
import org.hibernate.Query;
import org.hibernate.Session;
import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
/**
* Ensures that the FTS indexing picks up on any outstanding documents that
* require indexing.
* <p>
* This component must be used as a singleton (one per VM) and may only be
* called to reindex once. It will start a thread that processes all available
* transactions and keeps checking to ensure that the index is up to date with
* the latest database changes.
* <p>
* <b>The following points are important:</b>
* <ul>
* <li>
* By default, the Hibernate L2 cache is used during processing.
* This can be disabled by either disabling the L2 cache globally
* for the server (not recommended) or by setting the
* {@link #setL2CacheMode(String) l2CacheMode} property. If the
* database is static then the L2 cache usage can be set to use
* the <code>NORMAL</code> mode. <code>REFRESH</code> should be
* used where the server will still be accessed from some clients
* despite the database changing. <code>NORMAL</code> can be used
* in the case of the caches being clustered, i.e. the caches will
* not be out of date w.r.t. the database.
* </li>
* <li>
* This process should only be used continuously where the index
* transactions are following the database transactions. Use the
* {@link #setRunContinuously(boolean) runContinuously} property
* to change this behaviour.
* </li>
* </ul>
* Component to check and recover the indexes.
*
* @author Derek Hulley
*/
public class FullIndexRecoveryComponent extends HibernateDaoSupport implements IndexRecovery
public class FullIndexRecoveryComponent extends AbstractReindexComponent
{
public static final String QUERY_GET_NEXT_CHANGE_TXN_IDS = "node.GetNextChangeTxnIds";
public static final String QUERY_GET_CHANGED_NODE_STATUSES = "node.GetChangedNodeStatuses";
public static final String QUERY_GET_DELETED_NODE_STATUSES = "node.GetDeletedNodeStatuses";
public static final String QUERY_GET_CHANGED_NODE_STATUSES_COUNT = "node.GetChangedNodeStatusesCount";
private static final String START_TXN_ID = "000";
private static final String ERR_STORE_NOT_UP_TO_DATE = "index.recovery.store_not_up_to_date";
private static final String MSG_RECOVERY_STARTING = "index.recovery.starting";
private static final String MSG_RECOVERY_COMPLETE = "index.recovery.complete";
private static final String MSG_RECOVERY_PROGRESS = "index.recovery.progress";
private static final String MSG_RECOVERY_TERMINATED = "index.recovery.terminated";
private static Log logger = LogFactory.getLog(FullIndexRecoveryComponent.class);
/** ensures that this process is kicked off once per VM */
private static boolean started = false;
/** The current transaction ID being processed */
private static String currentTxnId = START_TXN_ID;
/** kept to notify the thread that it should quit */
private boolean killThread = false;
/** provides transactions to atomically index each missed transaction */
private TransactionService transactionService;
/** the component to index the node hierarchy */
private Indexer indexer;
/** the FTS indexer that we will prompt to pick up on any un-indexed text */
private FullTextSearchIndexer ftsIndexer;
/** the component providing searches of the indexed nodes */
private SearchService searcher;
/** the component giving direct access to <b>node</b> instances */
private NodeService nodeService;
/** set this to run the index recovery component */
private boolean executeFullRecovery;
/** set this on to keep checking for new transactions and never stop */
private boolean runContinuously;
/** set the time to wait between checking indexes */
private long waitTime;
/** controls how the L2 cache is used */
private CacheMode l2CacheMode;
/**
* @return Returns the ID of the current (or last) transaction processed
*/
public static String getCurrentTransactionId()
public static enum RecoveryMode
{
return currentTxnId;
/** Do nothing - not even a check */
NONE,
/** Perform a quick check on the state of the indexes only */
VALIDATE,
/** Performs a quick validation and then starts a full pass-through on failure */
AUTO,
/** Performs a full pass-through of all recorded transactions to ensure that the indexes are up to date */
FULL;
}
private RecoveryMode recoveryMode;
public FullIndexRecoveryComponent()
{
this.killThread = false;
this.executeFullRecovery = false;
this.runContinuously = false;
this.waitTime = 1000L;
this.l2CacheMode = CacheMode.REFRESH;
// ensure that we kill the thread when the VM is shutting down
Runnable shutdownRunnable = new Runnable()
{
public void run()
{
killThread = true;
};
};
Thread shutdownThread = new Thread(shutdownRunnable);
Runtime.getRuntime().addShutdownHook(shutdownThread);
recoveryMode = RecoveryMode.VALIDATE;
}
/**
* @return Returns true if the component has already been started
* Set the type of recovery to perform.
*
* @param recoveryMode one of the {@link RecoveryMode } values
*/
public static boolean isStarted()
public void setRecoveryMode(String recoveryMode)
{
return started;
}
/**
* @param transactionService provide transactions to index each missed transaction
*/
public void setTransactionService(TransactionService transactionService)
{
this.transactionService = transactionService;
}
/**
* @param indexer the indexer that will be index
*/
public void setIndexer(Indexer indexer)
{
this.indexer = indexer;
this.recoveryMode = RecoveryMode.valueOf(recoveryMode);
}
/**
* @param ftsIndexer the FTS background indexer
*/
public void setFtsIndexer(FullTextSearchIndexer ftsIndexer)
@Override
protected void reindexImpl()
{
this.ftsIndexer = ftsIndexer;
}
/**
* @param searcher component providing index searches
*/
public void setSearcher(SearchService searcher)
{
this.searcher = searcher;
}
/**
* @param nodeService provides information about nodes for indexing
*/
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
/**
* Set this to <code>true</code> to initiate the full index recovery.
* <p>
* This used to default to <code>true</code> but is now false. Set this
* if the potentially long-running process of checking and fixing the
* indexes must be started.
*
* @param executeFullRecovery
*/
public void setExecuteFullRecovery(boolean executeFullRecovery)
{
this.executeFullRecovery = executeFullRecovery;
}
/**
* Set this to ensure that the process continuously checks for new transactions.
* If not, it will permanently terminate once it catches up with the current
* transactions.
*
* @param runContinuously true to never cease looking for new transactions
*/
public void setRunContinuously(boolean runContinuously)
{
this.runContinuously = runContinuously;
}
/**
* Set the time to wait between checking for new transaction changes in the database.
*
* @param waitTime the time to wait in milliseconds
*/
public void setWaitTime(long waitTime)
{
this.waitTime = waitTime;
}
/**
* Set the hibernate cache mode by name
*
* @see org.hibernate.CacheMode
*/
public void setL2CacheMode(String l2CacheModeStr)
{
if (l2CacheModeStr.equals("GET"))
if (logger.isDebugEnabled())
{
l2CacheMode = CacheMode.GET;
}
else if (l2CacheModeStr.equals("IGNORE"))
{
l2CacheMode = CacheMode.IGNORE;
}
else if (l2CacheModeStr.equals("NORMAL"))
{
l2CacheMode = CacheMode.NORMAL;
}
else if (l2CacheModeStr.equals("PUT"))
{
l2CacheMode = CacheMode.PUT;
}
else if (l2CacheModeStr.equals("REFRESH"))
{
l2CacheMode = CacheMode.REFRESH;
}
else
{
throw new IllegalArgumentException("Unrecognised Hibernate L2 cache mode: " + l2CacheModeStr);
}
}
/**
* Ensure that the index is up to date with the current state of the persistence layer.
* The full list of unique transaction change IDs is retrieved and used to detect
* which are not present in the index. All the node changes and deletions for the
* remaining transactions are then indexed.
*/
public synchronized void reindex()
{
if (FullIndexRecoveryComponent.started)
{
throw new AlfrescoRuntimeException
("Only one FullIndexRecoveryComponent may be used per VM and it may only be called once");
logger.debug("Performing index recovery for type: " + recoveryMode);
}
// ensure that we don't redo this work
FullIndexRecoveryComponent.started = true;
// work to mark the stores for full text reindexing
TransactionWork<Object> ftsReindexWork = new TransactionWork<Object>()
// do we just ignore
if (recoveryMode == RecoveryMode.NONE)
{
public Object doWork()
{
List<StoreRef> storeRefs = nodeService.getStores();
// reindex each store
for (StoreRef storeRef : storeRefs)
{
// check if the store exists
if (!nodeService.exists(storeRef))
{
// store does not exist
if (logger.isDebugEnabled())
{
logger.debug("Skipping reindex of non-existent store: " + storeRef);
}
continue;
}
// prompt FTS to reindex the store
ftsIndexer.requiresIndex(storeRef);
}
// done
if (logger.isDebugEnabled())
{
logger.debug("Prompted FTS index on stores: " + storeRefs);
}
return null;
}
};
TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, ftsReindexWork);
// start full index recovery, if necessary
if (!this.executeFullRecovery)
return;
}
// check the level of cover required
boolean fullRecoveryRequired = false;
if (recoveryMode == RecoveryMode.FULL) // no validate required
{
if (logger.isDebugEnabled())
fullRecoveryRequired = true;
}
else // validate first
{
List<StoreRef> storeRefs = nodeService.getStores();
for (StoreRef storeRef : storeRefs)
{
logger.debug("Full index recovery is off - quitting");
// get the last txn ID in the database
Transaction txn = nodeDaoService.getLastTxn(storeRef);
boolean lastChangeTxnIdInIndex = isTxnIdPresentInIndex(storeRef, txn);
if (lastChangeTxnIdInIndex)
{
// this store is good
continue;
}
// this store isn't up to date
String msg = I18NUtil.getMessage(ERR_STORE_NOT_UP_TO_DATE, storeRef);
logger.warn(msg);
// the store is out of date - validation failed
if (recoveryMode == RecoveryMode.VALIDATE)
{
// next store
continue;
}
else if (recoveryMode == RecoveryMode.AUTO)
{
fullRecoveryRequired = true;
}
}
}
else
// put the server into read-only mode for the duration
boolean allowWrite = !transactionService.isReadOnly();
try
{
// set the state of the reindex
FullIndexRecoveryComponent.currentTxnId = START_TXN_ID;
// set the server into read-only mode
transactionService.setAllowWrite(false);
// start a stateful thread that will begin processing the reindexing the transactions
Runnable runnable = new ReindexRunner();
Thread reindexThread = new Thread(runnable);
// make it a daemon thread
reindexThread.setDaemon(true);
// it should not be a high priority
reindexThread.setPriority(Thread.MIN_PRIORITY);
// start it
reindexThread.start();
if (logger.isDebugEnabled())
// do we need to perform a full recovery
if (fullRecoveryRequired)
{
logger.debug("Full index recovery thread started: \n" +
" continuous: " + runContinuously);
performFullRecovery();
}
}
finally
{
// restore read-only state
transactionService.setAllowWrite(allowWrite);
}
}
/**
* Stateful thread runnable that executes reindex calls.
*
* @see FullIndexRecoveryComponent#reindexNodes()
*
* @author Derek Hulley
*/
private class ReindexRunner implements Runnable
private static final int MAX_TRANSACTIONS_PER_ITERATION = 1000;
private void performFullRecovery()
{
public void run()
int txnCount = nodeDaoService.getTransactionCount();
// starting
String msgStart = I18NUtil.getMessage(MSG_RECOVERY_STARTING, txnCount);
logger.info(msgStart);
// count the transactions
int processedCount = 0;
Transaction lastTxn = null;
while(true)
{
// keep this thread going permanently
while (!killThread)
List<Transaction> nextTxns = nodeDaoService.getNextTxns(
lastTxn,
MAX_TRANSACTIONS_PER_ITERATION);
// reindex each transaction
for (Transaction txn : nextTxns)
{
try
Long txnId = txn.getId();
// check if we have to terminate
if (isShuttingDown())
{
// reindex nodes
List<String> txnsIndexed = FullIndexRecoveryComponent.this.reindexNodes();
// reindex missing content
// @SuppressWarnings("unused")
// int missingContentCount = FullIndexRecoveryComponent.this.reindexMissingContent();
// check if the process should terminate
if (txnsIndexed.size() == 0 && !runContinuously)
{
// the thread has caught up with all the available work and should not
// run continuously
if (logger.isDebugEnabled())
{
logger.debug("Thread quitting - no more available indexing to do: \n" +
" last txn: " + FullIndexRecoveryComponent.getCurrentTransactionId());
}
break;
}
// brief pause
synchronized(FullIndexRecoveryComponent.this)
{
FullIndexRecoveryComponent.this.wait(waitTime);
}
String msgTerminated = I18NUtil.getMessage(MSG_RECOVERY_TERMINATED);
logger.warn(msgTerminated);
return;
}
catch (InterruptedException e)
reindexTransaction(txnId);
// dump a progress report every 10% of the way
double before = (double) processedCount / (double) txnCount * 10.0; // 0 - 10
processedCount++;
double after = (double) processedCount / (double) txnCount * 10.0; // 0 - 10
if (Math.floor(before) < Math.floor(after)) // crossed a 0 - 10 integer boundary
{
// ignore
}
catch (Throwable e)
{
if (killThread)
{
// the shutdown may have caused the exception - ignore it
}
else
{
// we are still a go; report it
logger.error("Reindex failure", e);
}
int complete = ((int)Math.floor(after))*10;
String msgProgress = I18NUtil.getMessage(MSG_RECOVERY_PROGRESS, complete);
logger.info(msgProgress);
}
}
// have we finished?
if (nextTxns.size() == 0)
{
// there are no more
break;
}
lastTxn = nextTxns.get(nextTxns.size() - 1);
}
}
/**
* @return Returns the transaction ID just reindexed, i.e. where some work was performed
*/
private List<String> reindexNodes()
{
// get a list of all transactions still requiring a check
List<String> txnsToCheck = getNextChangeTxnIds(FullIndexRecoveryComponent.currentTxnId);
// loop over each transaction
for (String changeTxnId : txnsToCheck)
{
reindexNodes(changeTxnId);
}
// done
return txnsToCheck;
String msgDone = I18NUtil.getMessage(MSG_RECOVERY_COMPLETE);
logger.info(msgDone);
}
/**
* Reindexes changes specific to the change transaction ID.
* <p>
* <b>All exceptions are absorbed.</b>
* Perform a full reindexing of the given transaction in the context of a completely
* new transaction.
*
* @param txnId the transaction identifier
*/
private void reindexNodes(final String changeTxnId)
public void reindexTransaction(final long txnId)
{
/*
* This must execute each within its own transaction.
* The cache size is therefore not an issue.
*/
if (logger.isDebugEnabled())
{
logger.debug("Reindexing transaction: " + txnId);
}
TransactionWork<Object> reindexWork = new TransactionWork<Object>()
{
public Object doWork() throws Exception
{
// perform the work in a Hibernate callback
HibernateCallback callback = new ReindexCallback(changeTxnId);
getHibernateTemplate().execute(callback);
// get the node references pertinent to the transaction
List<NodeRef> nodeRefs = nodeDaoService.getTxnChanges(txnId);
// reindex each node
for (NodeRef nodeRef : nodeRefs)
{
Status nodeStatus = nodeService.getNodeStatus(nodeRef);
if (nodeStatus == null)
{
// it's not there any more
continue;
}
if (nodeStatus.isDeleted()) // node deleted
{
// only the child node ref is relevant
ChildAssociationRef assocRef = new ChildAssociationRef(
ContentModel.ASSOC_CHILDREN,
null,
null,
nodeRef);
indexer.deleteNode(assocRef);
}
else // node created
{
// get the primary assoc for the node
ChildAssociationRef primaryAssocRef = nodeService.getPrimaryParent(nodeRef);
// reindex
indexer.createNode(primaryAssocRef);
}
}
// done
return null;
}
};
try
{
TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, reindexWork);
}
catch (Throwable e)
{
logger.error("Transaction reindex failed: \n" +
" txn: " + changeTxnId,
e);
}
finally
{
// Up the current transaction now, in case the process fails at this point.
// This will prevent the transaction from being processed again.
// This applies to failures as well, which should be dealt with externally
// and having the entire process start again, e.g. such as a system reboot
currentTxnId = changeTxnId;
}
TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, reindexWork, true);
// done
}
/**
* Stateful inner class that implements a single reindex call for a given store
* and transaction.
* <p>
* It must be called within its own transaction.
*
* @author Derek Hulley
*/
private class ReindexCallback implements HibernateCallback
private boolean isTxnIdPresentInIndex(StoreRef storeRef, Transaction txn)
{
private final String changeTxnId;
public ReindexCallback(String changeTxnId)
if (logger.isDebugEnabled())
{
this.changeTxnId = changeTxnId;
logger.debug("Checking for transaction in index: \n" +
" store: " + storeRef + "\n" +
" txn: " + txn);
}
/**
* Changes the L2 cache usage before reindexing for each store
*
* @see #reindexNodes(StoreRef, String)
*/
public Object doInHibernate(Session session)
String changeTxnId = txn.getChangeTxnId();
// count the changes in the transaction
int updateCount = nodeDaoService.getTxnUpdateCountForStore(storeRef, txn.getId());
int deleteCount = nodeDaoService.getTxnDeleteCountForStore(storeRef, txn.getId());
if (logger.isDebugEnabled())
{
// set the way the L2 cache is used
getSession().setCacheMode(l2CacheMode);
// reindex each store
// for (StoreRef storeRef : storeRefs)
// {
// if (!nodeService.exists(storeRef))
// {
// // the store is not present
// continue;
// }
// // reindex for store
// reindexNodes(storeRef, changeTxnId);
// }
// done
return null;
logger.debug("Transaction has " + updateCount + " updates and " + deleteCount + " deletes: " + txn);
}
private void reindexNodes(StoreRef storeRef, String changeTxnId)
// do the most update check, which is most common
if (deleteCount == 0 && updateCount == 0)
{
if (logger.isDebugEnabled())
{
logger.debug("No changes in transaction: " + txn);
}
// there's nothing to check for
return true;
}
else if (updateCount > 0)
{
// check if we need to perform this operation
SearchParameters sp = new SearchParameters();
sp.addStore(storeRef);
// search for it in the index
String query = "TX:\"" + changeTxnId + "\"";
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery(query);
ResultSet results = null;
try
{
SearchParameters sp = new SearchParameters();
sp.addStore(storeRef);
// search for it in the index, sorting with youngest first, fetching only 1
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("TX:" + LuceneQueryParser.escape(changeTxnId));
sp.setLimit(1);
results = searcher.query(sp);
// did the index have any of these changes?
if (results.length() > 0)
{
// the transaction has an entry in the index - assume that it was
// atomically correct
if (logger.isDebugEnabled())
{
logger.debug("Transaction present in index - no indexing required: \n" +
" store: " + storeRef + "\n" +
" txn: " + changeTxnId);
logger.debug("Index has results for txn (OK): " + txn);
}
return;
return true; // there were updates/creates and results for the txn were found
}
else
{
if (logger.isDebugEnabled())
{
logger.debug("Index has no results for txn (Index out of date): " + txn);
}
return false;
}
}
finally
{
if (results != null)
{
results.close();
}
}
// the index has no record of this
// were there any changes, or is it all just deletions?
int changedCount = getChangedNodeStatusesCount(storeRef, changeTxnId);
if (changedCount == 0)
{
// no nodes were changed in the transaction, i.e. they are only deletions
// the index is quite right not to have any entries for the transaction
if (logger.isDebugEnabled())
{
logger.debug("Transaction only has deletions - no indexing required: \n" +
" store: " + storeRef + "\n" +
" txn: " + changeTxnId);
}
return;
}
// process the deletions relevant to the txn and the store
List<NodeStatus> deletedNodeStatuses = getDeletedNodeStatuses(storeRef, changeTxnId);
for (NodeStatus status : deletedNodeStatuses)
{
NodeRef nodeRef = new NodeRef(storeRef, status.getKey().getGuid());
// only the child node ref is relevant
ChildAssociationRef assocRef = new ChildAssociationRef(
ContentModel.ASSOC_CHILDREN,
null,
null,
nodeRef);
indexer.deleteNode(assocRef);
}
// process additions
List<NodeStatus> changedNodeStatuses = getChangedNodeStatuses(storeRef, changeTxnId);
for (NodeStatus status : changedNodeStatuses)
{
NodeRef nodeRef = new NodeRef(storeRef, status.getKey().getGuid());
// get the primary assoc for the node
ChildAssociationRef primaryAssocRef = nodeService.getPrimaryParent(nodeRef);
// reindex
indexer.createNode(primaryAssocRef);
}
// done
if (logger.isDebugEnabled())
{
logger.debug("Transaction reindexed: \n" +
" store: " + storeRef + "\n" +
" txn: " + changeTxnId + "\n" +
" deletions: " + deletedNodeStatuses.size() + "\n" +
" modifications: " + changedNodeStatuses.size());
if (results != null) { results.close(); }
}
}
};
/**
* Retrieve next 50 transaction IDs that are greater than the given transaction ID.
*
* @param currentTxnId the transaction ID that must be less than all returned results
* @return Returns an ordered list of the next 50 transaction IDs
*/
@SuppressWarnings("unchecked")
public List<String> getNextChangeTxnIds(final String currentTxnId)
{
HibernateCallback callback = new HibernateCallback()
// there have been deletes, so we have to ensure that none of the nodes deleted are present in the index
// get all node refs for the transaction
Long txnId = txn.getId();
List<NodeRef> nodeRefs = nodeDaoService.getTxnChangesForStore(storeRef, txnId);
for (NodeRef nodeRef : nodeRefs)
{
public Object doInHibernate(Session session)
if (logger.isDebugEnabled())
{
Query query = session.getNamedQuery(QUERY_GET_NEXT_CHANGE_TXN_IDS);
query.setString("currentTxnId", currentTxnId)
.setMaxResults(50)
.setReadOnly(true);
return query.list();
logger.debug("Searching for node in index: \n" +
" node: " + nodeRef + "\n" +
" txn: " + txn);
}
};
List<String> queryResults = (List<String>) getHibernateTemplate().execute(callback);
// done
return queryResults;
}
@SuppressWarnings("unchecked")
public int getChangedNodeStatusesCount(final StoreRef storeRef, final String changeTxnId)
{
HibernateCallback callback = new HibernateCallback()
// we know that these are all deletions
ResultSet results = null;
try
{
SearchParameters sp = new SearchParameters();
sp.addStore(storeRef);
// search for it in the index, sorting with youngest first, fetching only 1
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("ID:" + LuceneQueryParser.escape(nodeRef.toString()));
sp.setLimit(1);
results = searcher.query(sp);
if (results.length() == 0)
{
// no results, as expected
if (logger.isDebugEnabled())
{
logger.debug(" --> Node not found (OK)");
}
continue;
}
else
{
if (logger.isDebugEnabled())
{
logger.debug(" --> Node found (Index out of date)");
}
return false;
}
}
finally
{
if (results != null) { results.close(); }
}
}
// all tests passed
if (logger.isDebugEnabled())
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_CHANGED_NODE_STATUSES_COUNT);
query.setString("storeProtocol", storeRef.getProtocol())
.setString("storeIdentifier", storeRef.getIdentifier())
.setString("changeTxnId", changeTxnId)
.setReadOnly(true);
return query.uniqueResult();
}
};
Integer changeCount = (Integer) getHibernateTemplate().execute(callback);
// done
return changeCount.intValue();
}
@SuppressWarnings("unchecked")
public List<NodeStatus> getChangedNodeStatuses(final StoreRef storeRef, final String changeTxnId)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_CHANGED_NODE_STATUSES);
query.setString("storeProtocol", storeRef.getProtocol())
.setString("storeIdentifier", storeRef.getIdentifier())
.setString("changeTxnId", changeTxnId)
.setReadOnly(true);
return query.list();
}
};
List<NodeStatus> queryResults = (List) getHibernateTemplate().execute(callback);
// done
return queryResults;
}
@SuppressWarnings("unchecked")
public List<NodeStatus> getDeletedNodeStatuses(final StoreRef storeRef, final String changeTxnId)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_DELETED_NODE_STATUSES);
query.setString("storeProtocol", storeRef.getProtocol())
.setString("storeIdentifier", storeRef.getIdentifier())
.setString("changeTxnId", changeTxnId)
.setReadOnly(true);
return query.list();
}
};
List<NodeStatus> queryResults = (List) getHibernateTemplate().execute(callback);
// done
return queryResults;
logger.debug("Index is in synch with transaction: " + txn);
}
return true;
}
}

View File

@ -16,24 +16,8 @@
*/
package org.alfresco.repo.node.index;
import java.util.ArrayList;
import java.util.List;
import junit.framework.TestCase;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.InvalidStoreRefException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper;
import org.springframework.context.ApplicationContext;
@ -46,114 +30,35 @@ public class FullIndexRecoveryComponentTest extends TestCase
{
private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
private TransactionService transactionService;
private FullIndexRecoveryComponent indexRecoverer;
private NodeService nodeService;
private TransactionService txnService;
private Indexer indexer;
private List<StoreRef> storeRefs;
public void setUp() throws Exception
{
transactionService = (TransactionService) ctx.getBean("transactionComponent");
indexRecoverer = (FullIndexRecoveryComponent) ctx.getBean("indexRecoveryComponent");
txnService = (TransactionService) ctx.getBean("transactionComponent");
nodeService = (NodeService) ctx.getBean("nodeService");
indexer = (Indexer) ctx.getBean("indexerComponent");
// create 2 stores
TransactionWork<List<StoreRef>> createStoresWork = new TransactionWork<List<StoreRef>>()
{
public List<StoreRef> doWork() throws Exception
{
List<StoreRef> storeRefs = new ArrayList<StoreRef>(2);
storeRefs.add(nodeService.createStore(StoreRef.PROTOCOL_WORKSPACE, getName() + System.nanoTime()));
storeRefs.add(nodeService.createStore(StoreRef.PROTOCOL_WORKSPACE, getName() + System.nanoTime()));
return storeRefs;
}
};
storeRefs = TransactionUtil.executeInUserTransaction(transactionService, createStoresWork);
}
public void testNothing() throws Exception
public void testSetup() throws Exception
{
}
public void xtestReindexing() throws Exception
public synchronized void testReindexing() throws Exception
{
// don't do anything if the component has already started
if (FullIndexRecoveryComponent.isStarted())
{
return;
}
// deletes a content node from the index
final List<String> storeRefStrings = new ArrayList<String>(2);
TransactionWork<String> dropNodeIndexWork = new TransactionWork<String>()
{
public String doWork()
{
// create a node in each store and drop it from the index
for (StoreRef storeRef : storeRefs)
{
try
{
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
ChildAssociationRef assocRef = nodeService.createNode(
rootNodeRef,
ContentModel.ASSOC_CONTAINS,
QName.createQName(NamespaceService.ALFRESCO_URI, "unindexedChild" + System.currentTimeMillis()),
ContentModel.TYPE_BASE);
// this will have indexed it, so remove it from the index
indexer.deleteNode(assocRef);
// make the string version of the storeRef
storeRefStrings.add(storeRef.toString());
}
catch (InvalidStoreRefException e)
{
// just ignore stores that are invalid
}
}
return AlfrescoTransactionSupport.getTransactionId();
}
};
// create un-indexed nodes
String txnId = TransactionUtil.executeInNonPropagatingUserTransaction(txnService, dropNodeIndexWork);
indexRecoverer.setExecuteFullRecovery(true);
// indexRecoverer.setStores(storeRefStrings);
indexRecoverer.setRecoveryMode(FullIndexRecoveryComponent.RecoveryMode.FULL.name());
// reindex
indexRecoverer.reindex();
// check that reindexing fails
try
Thread reindexThread = new Thread()
{
indexRecoverer.reindex();
fail("Reindexer failed to prevent reindex from being called twice");
}
catch (RuntimeException e)
{
// expected
}
public void run()
{
indexRecoverer.reindex();
}
};
reindexThread.setDaemon(true);
reindexThread.start();
// reindexThread.run();
// loop for some time, giving it a chance to do its thing
String lastProcessedTxnId = null;
for (int i = 0; i < 60; i++)
{
lastProcessedTxnId = FullIndexRecoveryComponent.getCurrentTransactionId();
if (lastProcessedTxnId.equals(txnId))
{
break;
}
// wait for a second
synchronized(this)
{
this.wait(1000L);
}
}
// check that the index was recovered
assertEquals("Index transaction not up to date", txnId, lastProcessedTxnId);
// wait a bit and then terminate
wait(10000);
indexRecoverer.setShutdown(true);
wait(10000);
}
}

View File

@ -1,741 +1,141 @@
///*
// * Copyright (C) 2005-2006 Alfresco, Inc.
// *
// * Licensed under the Mozilla Public License version 1.1
// * with a permitted attribution clause. You may obtain a
// * copy of the License at
// *
// * http://www.alfresco.org/legal/license.txt
// *
// * Unless required by applicable law or agreed to in writing,
// * software distributed under the License is distributed on an
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// * either express or implied. See the License for the specific
// * language governing permissions and limitations under the
// * License.
// */
//package org.alfresco.repo.node.index;
//
//import java.util.ArrayList;
//import java.util.List;
//
//import org.alfresco.error.AlfrescoRuntimeException;
//import org.alfresco.model.ContentModel;
//import org.alfresco.repo.domain.NodeStatus;
//import org.alfresco.repo.search.Indexer;
//import org.alfresco.repo.search.impl.lucene.LuceneIndexerImpl;
//import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
//import org.alfresco.repo.transaction.TransactionUtil;
//import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
//import org.alfresco.service.cmr.repository.ChildAssociationRef;
//import org.alfresco.service.cmr.repository.NodeRef;
//import org.alfresco.service.cmr.repository.NodeService;
//import org.alfresco.service.cmr.repository.StoreRef;
//import org.alfresco.service.cmr.search.ResultSet;
//import org.alfresco.service.cmr.search.SearchParameters;
//import org.alfresco.service.cmr.search.SearchService;
//import org.alfresco.service.transaction.TransactionService;
//import org.apache.commons.logging.Log;
//import org.apache.commons.logging.LogFactory;
//import org.hibernate.CacheMode;
//import org.hibernate.Query;
//import org.hibernate.Session;
//import org.springframework.orm.hibernate3.HibernateCallback;
//import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
//
///**
// * Ensures that the FTS indexing picks up on any outstanding documents that
// * require indexing.
// * <p>
// * This component must be used as a singleton (one per VM) and may only be
// * called to reindex once. It will start a thread that processes all available
// * transactions and keeps checking to ensure that the index is up to date with
// * the latest database changes.
// * <p>
// * <b>The following points are important:</b>
// * <ul>
// * <li>
// * By default, the Hibernate L2 cache is used during processing.
// * This can be disabled by either disabling the L2 cache globally
// * for the server (not recommended) or by setting the
// * {@link #setL2CacheMode(String) l2CacheMode} property. If the
// * database is static then the L2 cache usage can be set to use
// * the <code>NORMAL</code> mode. <code>REFRESH</code> should be
// * used where the server will still be accessed from some clients
// * despite the database changing. <code>NORMAL</code> can be used
// * in the case of the caches being clustered, i.e. the caches will
// * not be out of date w.r.t. the database.
// * </li>
// * <li>
// * This process should only be used continuously where the index
// * transactions are following the database transactions. Use the
// * {@link #setRunContinuously(boolean) runContinuously} property
// * to change this behaviour.
// * </li>
// * </ul>
// *
// * @author Derek Hulley
// */
//public class MissingContentReindexComponent extends HibernateDaoSupport implements IndexRecovery
//{
// public static final String QUERY_GET_NEXT_CHANGE_TXN_IDS = "node.GetNextChangeTxnIds";
// public static final String QUERY_GET_CHANGED_NODE_STATUSES = "node.GetChangedNodeStatuses";
// public static final String QUERY_GET_DELETED_NODE_STATUSES = "node.GetDeletedNodeStatuses";
// public static final String QUERY_GET_CHANGED_NODE_STATUSES_COUNT = "node.GetChangedNodeStatusesCount";
//
// private static final String START_TXN_ID = "000";
//
// private static Log logger = LogFactory.getLog(FullIndexRecoveryComponent.class);
//
// /** ensures that this process is kicked off once per VM */
// private static boolean started = false;
// /** The current transaction ID being processed */
// private static String currentTxnId = START_TXN_ID;
// /** kept to notify the thread that it should quite */
// private boolean killThread = false;
//
// /** provides transactions to atomically index each missed transaction */
// private TransactionService transactionService;
// /** the component to index the node hierarchy */
// private Indexer indexer;
// /** the FTS indexer that we will prompt to pick up on any un-indexed text */
// private FullTextSearchIndexer ftsIndexer;
// /** the component providing searches of the indexed nodes */
// private SearchService searcher;
// /** the component giving direct access to <b>node</b> instances */
// private NodeService nodeService;
// /** set this to run the index recovery component */
// private boolean executeFullRecovery;
// /** set this on to keep checking for new transactions and never stop */
// private boolean runContinuously;
// /** set the time to wait between checking indexes */
// private long waitTime;
// /** controls how the L2 cache is used */
// private CacheMode l2CacheMode;
//
// /**
// * @return Returns the ID of the current (or last) transaction processed
// */
// public static String getCurrentTransactionId()
// {
// return currentTxnId;
// }
//
// public FullIndexRecoveryComponent()
// {
// this.killThread = false;
// this.executeFullRecovery = false;
// this.runContinuously = false;
// this.waitTime = 1000L;
// this.l2CacheMode = CacheMode.REFRESH;
//
// // ensure that we kill the thread when the VM is shutting down
// Runnable shutdownRunnable = new Runnable()
// {
// public void run()
// {
// killThread = true;
// };
// };
// Thread shutdownThread = new Thread(shutdownRunnable);
// Runtime.getRuntime().addShutdownHook(shutdownThread);
// }
//
// /**
// * @return Returns true if the component has already been started
// */
// public static boolean isStarted()
// {
// return started;
// }
//
// /**
// * @param transactionService provide transactions to index each missed transaction
// */
// public void setTransactionService(TransactionService transactionService)
// {
// this.transactionService = transactionService;
// }
//
// /**
// * @param indexer the indexer that will be index
// */
// public void setIndexer(Indexer indexer)
// {
// this.indexer = indexer;
// }
//
// /**
// * @param ftsIndexer the FTS background indexer
// */
// public void setFtsIndexer(FullTextSearchIndexer ftsIndexer)
// {
// this.ftsIndexer = ftsIndexer;
// }
//
// /**
// * @param searcher component providing index searches
// */
// public void setSearcher(SearchService searcher)
// {
// this.searcher = searcher;
// }
//
// /**
// * @param nodeService provides information about nodes for indexing
// */
// public void setNodeService(NodeService nodeService)
// {
// this.nodeService = nodeService;
// }
//
// /**
// * Set this to <code>true</code> to initiate the full index recovery.
// * <p>
// * This used to default to <code>true</code> but is now false. Set this
// * if the potentially long-running process of checking and fixing the
// * indexes must be started.
// *
// * @param executeFullRecovery
// */
// public void setExecuteFullRecovery(boolean executeFullRecovery)
// {
// this.executeFullRecovery = executeFullRecovery;
// }
//
// /**
// * Set this to ensure that the process continuously checks for new transactions.
// * If not, it will permanently terminate once it catches up with the current
// * transactions.
// *
// * @param runContinuously true to never cease looking for new transactions
// */
// public void setRunContinuously(boolean runContinuously)
// {
// this.runContinuously = runContinuously;
// }
//
// /**
// * Set the time to wait between checking for new transaction changes in the database.
// *
// * @param waitTime the time to wait in milliseconds
// */
// public void setWaitTime(long waitTime)
// {
// this.waitTime = waitTime;
// }
//
// /**
// * Set the hibernate cache mode by name
// *
// * @see org.hibernate.CacheMode
// */
// public void setL2CacheMode(String l2CacheModeStr)
// {
// if (l2CacheModeStr.equals("GET"))
// {
// l2CacheMode = CacheMode.GET;
// }
// else if (l2CacheModeStr.equals("IGNORE"))
// {
// l2CacheMode = CacheMode.IGNORE;
// }
// else if (l2CacheModeStr.equals("NORMAL"))
// {
// l2CacheMode = CacheMode.NORMAL;
// }
// else if (l2CacheModeStr.equals("PUT"))
// {
// l2CacheMode = CacheMode.PUT;
// }
// else if (l2CacheModeStr.equals("REFRESH"))
// {
// l2CacheMode = CacheMode.REFRESH;
// }
// else
// {
// throw new IllegalArgumentException("Unrecognised Hibernate L2 cache mode: " + l2CacheModeStr);
// }
// }
//
// /**
// * Ensure that the index is up to date with the current state of the persistence layer.
// * The full list of unique transaction change IDs is retrieved and used to detect
// * which are not present in the index. All the node changes and deletions for the
// * remaining transactions are then indexed.
// */
// public synchronized void reindex()
// {
// if (FullIndexRecoveryComponent.started)
// {
// throw new AlfrescoRuntimeException
// ("Only one FullIndexRecoveryComponent may be used per VM and it may only be called once");
// }
//
// // ensure that we don't redo this work
// FullIndexRecoveryComponent.started = true;
//
// // work to mark the stores for full text reindexing
// TransactionWork<Object> ftsReindexWork = new TransactionWork<Object>()
// {
// public Object doWork()
// {
// List<StoreRef> storeRefs = nodeService.getStores();
// // reindex each store
// for (StoreRef storeRef : storeRefs)
// {
// // check if the store exists
// if (!nodeService.exists(storeRef))
// {
// // store does not exist
// if (logger.isDebugEnabled())
// {
// logger.debug("Skipping reindex of non-existent store: " + storeRef);
// }
// continue;
// }
//
// // prompt FTS to reindex the store
// ftsIndexer.requiresIndex(storeRef);
// }
// // done
// if (logger.isDebugEnabled())
// {
// logger.debug("Prompted FTS index on stores: " + storeRefs);
// }
// return null;
// }
// };
// TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, ftsReindexWork);
//
// // start full index recovery, if necessary
// if (!this.executeFullRecovery)
// {
// if (logger.isDebugEnabled())
// {
// logger.debug("Full index recovery is off - quitting");
// }
// }
// else
// {
// // set the state of the reindex
// FullIndexRecoveryComponent.currentTxnId = START_TXN_ID;
//
// // start a stateful thread that will begin processing the reindexing the transactions
// Runnable runnable = new ReindexRunner();
// Thread reindexThread = new Thread(runnable);
// // make it a daemon thread
// reindexThread.setDaemon(true);
// // it should not be a high priority
// reindexThread.setPriority(Thread.MIN_PRIORITY);
// // start it
// reindexThread.start();
//
// if (logger.isDebugEnabled())
// {
// logger.debug("Full index recovery thread started: \n" +
// " continuous: " + runContinuously);
// }
// }
// }
//
// /**
// * Stateful thread runnable that executes reindex calls.
// *
// * @see FullIndexRecoveryComponent#reindexNodes()
// *
// * @author Derek Hulley
// */
// private class ReindexRunner implements Runnable
// {
// public void run()
// {
// // keep this thread going permanently
// while (!killThread)
// {
// try
// {
// // reindex nodes
// List<String> txnsIndexed = FullIndexRecoveryComponent.this.reindexNodes();
// // reindex missing content
// @SuppressWarnings("unused")
// int missingContentCount = FullIndexRecoveryComponent.this.reindexMissingContent();
// // check if the process should terminate
// if (txnsIndexed.size() == 0 && !runContinuously)
// {
// // the thread has caught up with all the available work and should not
// // run continuously
// if (logger.isDebugEnabled())
// {
// logger.debug("Thread quitting - no more available indexing to do: \n" +
// " last txn: " + FullIndexRecoveryComponent.getCurrentTransactionId());
// }
// break;
// }
// // brief pause
// synchronized(FullIndexRecoveryComponent.this)
// {
// FullIndexRecoveryComponent.this.wait(waitTime);
// }
// }
// catch (InterruptedException e)
// {
// // ignore
// }
// catch (Throwable e)
// {
// if (killThread)
// {
// // the shutdown may have caused the exception - ignore it
// }
// else
// {
// // we are still a go; report it
// logger.error("Reindex failure", e);
// }
// }
// }
// }
// }
//
// /**
// * @return Returns the number of documents reindexed
// */
// private int reindexMissingContent()
// {
// int count = 0;
// for (StoreRef storeRef : storeRefs)
// {
// count += reindexMissingContent(storeRef);
// }
// return count;
// }
//
// /**
// * @param storeRef the store to check for missing content
// * @return Returns the number of documents reindexed
// */
// private int reindexMissingContent(StoreRef storeRef)
// {
// SearchParameters sp = new SearchParameters();
// sp.addStore(storeRef);
//
// // search for it in the index
// String query = "TEXT:" + LuceneIndexerImpl.NOT_INDEXED_CONTENT_MISSING;
// sp.setLanguage(SearchService.LANGUAGE_LUCENE);
// sp.setQuery(query);
// ResultSet results = null;
// try
// {
// results = searcher.query(sp);
//
// int count = 0;
// // loop over the results and get the details of the nodes that have missing content
// List<ChildAssociationRef> assocRefs = results.getChildAssocRefs();
// for (ChildAssociationRef assocRef : assocRefs)
// {
// final NodeRef childNodeRef = assocRef.getChildRef();
// // prompt for a reindex - it might fail again, but we just keep plugging away
// TransactionWork<Object> reindexWork = new TransactionWork<Object>()
// {
// public Object doWork()
// {
// indexer.updateNode(childNodeRef);
// return null;
// }
// };
// TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, reindexWork);
// count++;
// }
// // done
// if (logger.isDebugEnabled())
// {
// logger.debug("Reindexed missing content: \n" +
// " store: " + storeRef + "\n" +
// " node count: " + count);
// }
// return count;
// }
// finally
// {
// if (results != null)
// {
// results.close();
// }
// }
// }
//
// /**
// * @return Returns the transaction ID just reindexed, i.e. where some work was performed
// */
// private List<String> reindexNodes()
// {
// // get a list of all transactions still requiring a check
// List<String> txnsToCheck = getNextChangeTxnIds(FullIndexRecoveryComponent.currentTxnId);
//
// // loop over each transaction
// for (String changeTxnId : txnsToCheck)
// {
// reindexNodes(changeTxnId);
// }
//
// // done
// return txnsToCheck;
// }
//
// /**
// * Reindexes changes specific to the change transaction ID.
// * <p>
// * <b>All exceptions are absorbed.</b>
// */
// private void reindexNodes(final String changeTxnId)
// {
// /*
// * This must execute each within its own transaction.
// * The cache size is therefore not an issue.
// */
// TransactionWork<Object> reindexWork = new TransactionWork<Object>()
// {
// public Object doWork() throws Exception
// {
// // perform the work in a Hibernate callback
// HibernateCallback callback = new ReindexCallback(changeTxnId);
// getHibernateTemplate().execute(callback);
// // done
// return null;
// }
// };
// try
// {
// TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, reindexWork);
// }
// catch (Throwable e)
// {
// logger.error("Transaction reindex failed: \n" +
// " txn: " + changeTxnId,
// e);
// }
// finally
// {
// // Up the current transaction now, in case the process fails at this point.
// // This will prevent the transaction from being processed again.
// // This applies to failures as well, which should be dealt with externally
// // and having the entire process start again, e.g. such as a system reboot
// currentTxnId = changeTxnId;
// }
// }
//
// /**
// * Stateful inner class that implements a single reindex call for a given store
// * and transaction.
// * <p>
// * It must be called within its own transaction.
// *
// * @author Derek Hulley
// */
// private class ReindexCallback implements HibernateCallback
// {
// private final String changeTxnId;
//
// public ReindexCallback(String changeTxnId)
// {
// this.changeTxnId = changeTxnId;
// }
//
// /**
// * Changes the L2 cache usage before reindexing for each store
// *
// * @see #reindexNodes(StoreRef, String)
// */
// public Object doInHibernate(Session session)
// {
// // set the way the L2 cache is used
// getSession().setCacheMode(l2CacheMode);
//
// // reindex each store
// for (StoreRef storeRef : storeRefs)
// {
// if (!nodeService.exists(storeRef))
// {
// // the store is not present
// continue;
// }
// // reindex for store
// reindexNodes(storeRef, changeTxnId);
// }
// // done
// return null;
// }
//
// private void reindexNodes(StoreRef storeRef, String changeTxnId)
// {
// // check if we need to perform this operation
// SearchParameters sp = new SearchParameters();
// sp.addStore(storeRef);
//
// // search for it in the index
// String query = "TX:\"" + changeTxnId + "\"";
// sp.setLanguage(SearchService.LANGUAGE_LUCENE);
// sp.setQuery(query);
// ResultSet results = null;
// try
// {
// results = searcher.query(sp);
// // did the index have any of these changes?
// if (results.length() > 0)
// {
// // the transaction has an entry in the index - assume that it was
// // atomically correct
// if (logger.isDebugEnabled())
// {
// logger.debug("Transaction present in index - no indexing required: \n" +
// " store: " + storeRef + "\n" +
// " txn: " + changeTxnId);
// }
// return;
// }
// }
// finally
// {
// if (results != null)
// {
// results.close();
// }
// }
// // the index has no record of this
// // were there any changes, or is it all just deletions?
// int changedCount = getChangedNodeStatusesCount(storeRef, changeTxnId);
// if (changedCount == 0)
// {
// // no nodes were changed in the transaction, i.e. they are only deletions
// // the index is quite right not to have any entries for the transaction
// if (logger.isDebugEnabled())
// {
// logger.debug("Transaction only has deletions - no indexing required: \n" +
// " store: " + storeRef + "\n" +
// " txn: " + changeTxnId);
// }
// return;
// }
//
// // process the deletions relevant to the txn and the store
// List<NodeStatus> deletedNodeStatuses = getDeletedNodeStatuses(storeRef, changeTxnId);
// for (NodeStatus status : deletedNodeStatuses)
// {
// NodeRef nodeRef = new NodeRef(storeRef, status.getKey().getGuid());
// // only the child node ref is relevant
// ChildAssociationRef assocRef = new ChildAssociationRef(
// ContentModel.ASSOC_CHILDREN,
// null,
// null,
// nodeRef);
// indexer.deleteNode(assocRef);
// }
//
// // process additions
// List<NodeStatus> changedNodeStatuses = getChangedNodeStatuses(storeRef, changeTxnId);
// for (NodeStatus status : changedNodeStatuses)
// {
// NodeRef nodeRef = new NodeRef(storeRef, status.getKey().getGuid());
// // get the primary assoc for the node
// ChildAssociationRef primaryAssocRef = nodeService.getPrimaryParent(nodeRef);
// // reindex
// indexer.createNode(primaryAssocRef);
// }
//
// // done
// if (logger.isDebugEnabled())
// {
// logger.debug("Transaction reindexed: \n" +
// " store: " + storeRef + "\n" +
// " txn: " + changeTxnId + "\n" +
// " deletions: " + deletedNodeStatuses.size() + "\n" +
// " modifications: " + changedNodeStatuses.size());
// }
// }
// };
//
// /**
// * Retrieve all transaction IDs that are greater than the given transaction ID.
// *
// * @param currentTxnId the transaction ID that must be less than all returned results
// * @return Returns an ordered list of transaction IDs
// */
// @SuppressWarnings("unchecked")
// public List<String> getNextChangeTxnIds(final String currentTxnId)
// {
// HibernateCallback callback = new HibernateCallback()
// {
// public Object doInHibernate(Session session)
// {
// Query query = session.getNamedQuery(QUERY_GET_NEXT_CHANGE_TXN_IDS);
// query.setString("currentTxnId", currentTxnId)
// .setReadOnly(true);
// return query.list();
// }
// };
// List<String> queryResults = (List<String>) getHibernateTemplate().execute(callback);
// // done
// return queryResults;
// }
//
// @SuppressWarnings("unchecked")
// public int getChangedNodeStatusesCount(final StoreRef storeRef, final String changeTxnId)
// {
// HibernateCallback callback = new HibernateCallback()
// {
// public Object doInHibernate(Session session)
// {
// Query query = session.getNamedQuery(QUERY_GET_CHANGED_NODE_STATUSES_COUNT);
// query.setString("storeProtocol", storeRef.getProtocol())
// .setString("storeIdentifier", storeRef.getIdentifier())
// .setString("changeTxnId", changeTxnId)
// .setReadOnly(true);
// return query.uniqueResult();
// }
// };
// Integer changeCount = (Integer) getHibernateTemplate().execute(callback);
// // done
// return changeCount.intValue();
// }
//
// @SuppressWarnings("unchecked")
// public List<NodeStatus> getChangedNodeStatuses(final StoreRef storeRef, final String changeTxnId)
// {
// HibernateCallback callback = new HibernateCallback()
// {
// public Object doInHibernate(Session session)
// {
// Query query = session.getNamedQuery(QUERY_GET_CHANGED_NODE_STATUSES);
// query.setString("storeProtocol", storeRef.getProtocol())
// .setString("storeIdentifier", storeRef.getIdentifier())
// .setString("changeTxnId", changeTxnId)
// .setReadOnly(true);
// return query.list();
// }
// };
// List<NodeStatus> queryResults = (List) getHibernateTemplate().execute(callback);
// // done
// return queryResults;
// }
//
// @SuppressWarnings("unchecked")
// public List<NodeStatus> getDeletedNodeStatuses(final StoreRef storeRef, final String changeTxnId)
// {
// HibernateCallback callback = new HibernateCallback()
// {
// public Object doInHibernate(Session session)
// {
// Query query = session.getNamedQuery(QUERY_GET_DELETED_NODE_STATUSES);
// query.setString("storeProtocol", storeRef.getProtocol())
// .setString("storeIdentifier", storeRef.getIdentifier())
// .setString("changeTxnId", changeTxnId)
// .setReadOnly(true);
// return query.list();
// }
// };
// List<NodeStatus> queryResults = (List) getHibernateTemplate().execute(callback);
// // done
// return queryResults;
// }
//}
/*
* Copyright (C) 2005-2006 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.node.index;
import java.util.List;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerImpl;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.ResultSetRow;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.cmr.search.SearchService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* This component attempts to reindex
*
* @author Derek Hulley
*/
public class MissingContentReindexComponent extends AbstractReindexComponent
{
private static Log logger = LogFactory.getLog(MissingContentReindexComponent.class);
/** keep track of whether the FTS indexer thread has been poked */
private boolean ftsIndexerCalled;
public MissingContentReindexComponent()
{
ftsIndexerCalled = false;
}
/**
* If this object is currently busy, then it just nothing
*/
@Override
public void reindexImpl()
{
List<StoreRef> storeRefs = nodeService.getStores();
int count = 0;
for (StoreRef storeRef : storeRefs)
{
// prompt the FTS reindexing
if (!ftsIndexerCalled)
{
ftsIndexer.requiresIndex(storeRef);
}
// reindex missing content
count += reindexMissingContent(storeRef);
// check if we have to break out
if (isShuttingDown())
{
break;
}
}
// The FTS indexer only needs to be prompted once
ftsIndexerCalled = true;
// done
if (logger.isDebugEnabled())
{
logger.debug("Missing content indexing touched " + count + " content nodes");
}
}
/**
* @param storeRef the store to check for missing content
* @return Returns the number of documents reindexed
*/
private int reindexMissingContent(StoreRef storeRef)
{
SearchParameters sp = new SearchParameters();
sp.addStore(storeRef);
// search for it in the index, sorting with youngest first
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("TEXT:" + LuceneIndexerImpl.NOT_INDEXED_CONTENT_MISSING);
sp.addSort(SearchParameters.SORT_IN_DOCUMENT_ORDER_DESCENDING);
ResultSet results = null;
try
{
results = searcher.query(sp);
int count = 0;
// iterate over the nodes and prompt for reindexing
for (ResultSetRow row : results)
{
final NodeRef childNodeRef = row.getNodeRef();
// prompt for a reindex - it might fail again, but we just keep plugging away
TransactionWork<Object> reindexWork = new TransactionWork<Object>()
{
public Object doWork()
{
indexer.updateNode(childNodeRef);
return null;
}
};
TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, reindexWork);
// check if we have to break out
if (isShuttingDown())
{
break;
}
}
// done
if (logger.isDebugEnabled())
{
logger.debug(
"Reindexed missing content: \n" +
" store: " + storeRef + "\n" +
" node count: " + count);
}
return count;
}
finally
{
if (results != null)
{
results.close();
}
}
}
}

View File

@ -0,0 +1,172 @@
/*
* Copyright (C) 2005-2006 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.node.index;
import junit.framework.TestCase;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.AbstractContentStore;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.impl.lucene.LuceneIndexerImpl;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.repo.transaction.TransactionComponent;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper;
import org.springframework.context.ApplicationContext;
/**
* @see org.alfresco.repo.node.index.MissingContentReindexComponent
*
* @author Derek Hulley
*/
public class MissingContentReindexComponentTest extends TestCase
{
private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
private AuthenticationComponent authenticationComponent;
private SearchService searchService;
private NodeService nodeService;
private FileFolderService fileFolderService;
private ContentStore contentStore;
private FullTextSearchIndexer ftsIndexer;
private NodeRef rootNodeRef;
private MissingContentReindexComponent reindexer;
@Override
protected void setUp() throws Exception
{
ServiceRegistry serviceRegistry = (ServiceRegistry) ctx.getBean(ServiceRegistry.SERVICE_REGISTRY);
searchService = serviceRegistry.getSearchService();
nodeService = serviceRegistry.getNodeService();
fileFolderService = serviceRegistry.getFileFolderService();
authenticationComponent = (AuthenticationComponent) ctx.getBean("authenticationComponentImpl");
contentStore = (ContentStore) ctx.getBean("fileContentStore");
ftsIndexer = (FullTextSearchIndexer) ctx.getBean("LuceneFullTextSearchIndexer");
Indexer indexer = (Indexer) ctx.getBean("indexerComponent");
NodeDaoService nodeDaoService = (NodeDaoService) ctx.getBean("nodeDaoService");
TransactionService transactionService = serviceRegistry.getTransactionService();
reindexer = new MissingContentReindexComponent();
reindexer.setAuthenticationComponent(authenticationComponent);
reindexer.setFtsIndexer(ftsIndexer);
reindexer.setIndexer(indexer);
reindexer.setNodeDaoService(nodeDaoService);
reindexer.setNodeService(nodeService);
reindexer.setSearcher(searchService);
reindexer.setTransactionComponent((TransactionComponent)transactionService);
// authenticate
authenticationComponent.setSystemUserAsCurrentUser();
// create a root node for the test
StoreRef storeRef = nodeService.createStore("test", getName() + "-" + System.nanoTime());
rootNodeRef = nodeService.getRootNode(storeRef);
rootNodeRef = nodeService.createNode(
rootNodeRef,
ContentModel.ASSOC_CHILDREN,
QName.createQName("cm:x"),
ContentModel.TYPE_FOLDER).getChildRef();
}
@Override
protected void tearDown() throws Exception
{
authenticationComponent.clearCurrentSecurityContext();
}
/**
* Create a node with a content URL that points to missing content. It then
* checks that the indexing flagged it, prompts a reindex of missing content
* and checks that the text was properly indexed.
*/
public synchronized void testReindex() throws Exception
{
// create a node with missing content
String contentUrl = AbstractContentStore.createNewUrl();
ContentData contentData = new ContentData(contentUrl, "text/plain", 0L, "UTF8");
// create the file node
NodeRef nodeRef = fileFolderService.create(rootNodeRef, "myfile", ContentModel.TYPE_CONTENT).getNodeRef();
// add the content
nodeService.setProperty(nodeRef, ContentModel.PROP_CONTENT, contentData);
// wait a bit for the indexing
ftsIndexer.index();
wait(1000);
// check that the content was but that the content was M.I.A.
SearchParameters sp = new SearchParameters();
sp.addStore(rootNodeRef.getStoreRef());
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("TEXT:" + LuceneIndexerImpl.NOT_INDEXED_CONTENT_MISSING);
sp.addSort(SearchParameters.SORT_IN_DOCUMENT_ORDER_DESCENDING);
ResultSet results = null;
try
{
results = searchService.query(sp);
assertTrue("Content missing NICM not found", results.length() == 1);
}
finally
{
if (results != null) { results.close(); }
}
// now put some content in the store
ContentWriter writer = contentStore.getWriter(null, contentUrl);
writer.setMimetype("text/plain");
writer.setEncoding("UTF8");
writer.putContent("123abc456def");
// prompt for reindex
reindexer.reindex();
// wait for it to have been indexed again
ftsIndexer.index();
wait(1000);
// search for the text
sp = new SearchParameters();
sp.addStore(rootNodeRef.getStoreRef());
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("TEXT:" + "123abc456def");
sp.addSort("@" + ContentModel.PROP_CREATED, false);
results = null;
try
{
results = searchService.query(sp);
assertTrue("Indexed content node found", results.length() == 1);
}
finally
{
if (results != null) { results.close(); }
}
}
}

View File

@ -42,6 +42,11 @@ public class ClosingIndexSearcher extends IndexSearcher
this.reader = r;
}
/*package*/ IndexReader getReader()
{
return reader;
}
@Override
public void close() throws IOException
{

View File

@ -145,7 +145,7 @@ public abstract class LuceneBase2
}
}
protected Searcher getSearcher(LuceneIndexer2 luceneIndexer) throws LuceneIndexException
protected ClosingIndexSearcher getSearcher(LuceneIndexer2 luceneIndexer) throws LuceneIndexException
{
// If we know the delta id we should do better

View File

@ -43,7 +43,6 @@ import org.alfresco.repo.search.IndexerException;
import org.alfresco.repo.search.impl.lucene.fts.FTSIndexerAware;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.search.impl.lucene.index.TransactionStatus;
import org.alfresco.repo.search.impl.lucene.index.IndexInfo.LockWork;
import org.alfresco.service.cmr.dictionary.AspectDefinition;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService;
@ -1316,7 +1315,7 @@ public class LuceneIndexerImpl2 extends LuceneBase2 implements LuceneIndexer2
if (includeDirectoryDocuments)
{
if (nodeTypeDef.getChildAssociations().size() > 0)
if (nodeTypeDef != null && nodeTypeDef.getChildAssociations().size() > 0)
{
if (directPaths.contains(pair.getFirst()))
{

View File

@ -59,17 +59,13 @@ import org.saxpath.SAXPathException;
import com.werken.saxpath.XPathReader;
/**
* The Lucene implementation of Searcher At the moment we support only lucene
* based queries.
*
* TODO: Support for other query languages
* The Lucene implementation of Searcher At the moment we support only lucene based queries. TODO: Support for other query languages
*
* @author andyh
*
*/
public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
{
/**
* Default field name
*/
@ -90,9 +86,7 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
*/
/**
* Get an initialised searcher for the store and transaction Normally we do
* not search against a a store and delta. Currently only gets the searcher
* against the main index.
* Get an initialised searcher for the store and transaction Normally we do not search against a a store and delta. Currently only gets the searcher against the main index.
*
* @param storeRef
* @param deltaId
@ -115,9 +109,7 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
}
/**
* Get an intialised searcher for the store. No transactional ammendsmends
* are searched.
*
* Get an intialised searcher for the store. No transactional ammendsmends are searched.
*
* @param storeRef
* @return
@ -134,7 +126,7 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
public boolean indexExists()
{
//return mainIndexExists();
// return mainIndexExists();
return true;
}
@ -220,7 +212,7 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
Query query = LuceneQueryParser.parse(parameterisedQueryString, DEFAULT_FIELD, new LuceneAnalyser(
dictionaryService), namespacePrefixResolver, dictionaryService, defaultOperator);
Searcher searcher = getSearcher(indexer);
ClosingIndexSearcher searcher = getSearcher(indexer);
if (searcher == null)
{
// no index return an empty result set
@ -238,7 +230,14 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
switch (sd.getSortType())
{
case FIELD:
fields[index++] = new SortField(sd.getField(), !sd.isAscending());
if (searcher.getReader().getFieldNames().contains(sd.getField()))
{
fields[index++] = new SortField(sd.getField(), !sd.isAscending());
}
else
{
fields[index++] = new SortField(null, SortField.DOC, !sd.isAscending());
}
break;
case DOCUMENT:
fields[index++] = new SortField(null, SortField.DOC, !sd.isAscending());
@ -348,8 +347,7 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
}
/**
* The definitions must provide a default value, or of not there must be a
* parameter to provide the value
* The definitions must provide a default value, or of not there must be a parameter to provide the value
*
* @param definition
* @param queryParameters
@ -396,12 +394,9 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
}
/*
* Parameterise the query string - not sure if it is required to escape
* lucence spacials chars The parameters could be used to build the query -
* the contents of parameters should alread have been escaped if required.
* ... mush better to provide the parameters and work out what to do TODO:
* conditional query escapement - may be we should have a parameter type
* that is not escaped
* Parameterise the query string - not sure if it is required to escape lucence spacials chars The parameters could be used to build the query - the contents of parameters
* should alread have been escaped if required. ... mush better to provide the parameters and work out what to do TODO: conditional query escapement - may be we should have a
* parameter type that is not escaped
*/
private String parameterise(String unparameterised, Map<QName, QueryParameterDefinition> map,
QueryParameter[] queryParameters, NamespacePrefixResolver nspr) throws QueryParameterisationException
@ -567,7 +562,6 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
/**
* @return Returns true if the pattern is present, otherwise false.
*
* @see #setIndexer(Indexer)
* @see #setSearcher(SearchService)
*/

View File

@ -381,6 +381,10 @@ public class LuceneTest2 extends TestCase
super(arg0);
}
public void firstTest() throws Exception
{
testSort();
}
public void test0() throws Exception
{
@ -1063,6 +1067,17 @@ public class LuceneTest2 extends TestCase
results.close();
luceneFTS.resume();
SearchParameters sp17 = new SearchParameters();
sp17.addStore(rootNodeRef.getStoreRef());
sp17.setLanguage(SearchService.LANGUAGE_LUCENE);
sp17.setQuery("PATH:\"//.\"");
sp17.addSort("cabbage", false);
results = searcher.query(sp17);
results.close();
luceneFTS.resume();
}
public void test1() throws Exception

View File

@ -531,6 +531,9 @@ public class NTLMAuthenticationComponentImpl extends AbstractAuthenticationCompo
authSess = m_passthruServers.openSession();
if ( authSess == null)
throw new AuthenticationException("Failed to open session to passthru server");
// Authenticate using the credentials supplied
authenticateLocal(ntlmToken, authSess);
@ -748,6 +751,11 @@ public class NTLMAuthenticationComponentImpl extends AbstractAuthenticationCompo
authSess = m_passthruServers.openSession();
// Check if the session was opened to the passthru server
if ( authSess == null)
throw new AuthenticationServiceException("Failed to open passthru auth session");
ntlmToken.setAuthenticationExpireTime(System.currentTimeMillis() + getSessionTimeout());
// Get the challenge from the initial session negotiate stage

View File

@ -26,30 +26,34 @@ import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.cmr.security.PermissionService;
import org.springframework.beans.factory.InitializingBean;
/**
* LockOwnerDynamicAuthority
*/
public class LockOwnerDynamicAuthority implements DynamicAuthority, InitializingBean
{
private LockService lockService;
private NodeService nodeService;
public LockOwnerDynamicAuthority()
{
super();
}
public boolean hasAuthority(NodeRef nodeRef, String userName)
{
if(lockService.getLockStatus(nodeRef) == LockStatus.LOCK_OWNER)
if (lockService.getLockStatus(nodeRef) == LockStatus.LOCK_OWNER)
{
return true;
}
if(nodeService.hasAspect(nodeRef, ContentModel.ASPECT_WORKING_COPY))
if (nodeService.hasAspect(nodeRef, ContentModel.ASPECT_WORKING_COPY))
{
NodeRef originial = DefaultTypeConverter.INSTANCE.convert(NodeRef.class, nodeService.getProperty(nodeRef, ContentModel.PROP_COPY_REFERENCE));
return (lockService.getLockStatus(originial) == LockStatus.LOCK_OWNER);
NodeRef original = DefaultTypeConverter.INSTANCE.convert(
NodeRef.class, nodeService.getProperty(nodeRef, ContentModel.PROP_COPY_REFERENCE));
if (nodeService.exists(original))
{
return (lockService.getLockStatus(original) == LockStatus.LOCK_OWNER);
}
else
{
return false;
}
}
else
{
@ -80,12 +84,8 @@ public class LockOwnerDynamicAuthority implements DynamicAuthority, Initializing
this.lockService = lockService;
}
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
}

View File

@ -43,6 +43,7 @@ import org.alfresco.service.cmr.security.AuthenticationService;
import org.alfresco.service.cmr.security.AuthorityService;
import org.alfresco.service.cmr.security.OwnableService;
import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.cmr.security.PersonService;
import org.alfresco.service.cmr.version.VersionService;
import org.alfresco.service.cmr.view.ExporterService;
import org.alfresco.service.cmr.view.ImporterService;
@ -340,6 +341,13 @@ public class ServiceDescriptorRegistry
{
return (OwnableService)getService(OWNABLE_SERVICE);
}
/* (non-Javadoc)
* @see org.alfresco.service.ServiceRegistry#getPersonService()
*/
public PersonService getPersonService()
{
return (PersonService)getService(PERSON_SERVICE);
}
}

View File

@ -45,6 +45,10 @@ public class DummyTransactionService implements TransactionService
return false;
}
public void setReadOnly(boolean readOnly)
{
}
public UserTransaction getUserTransaction()
{
return txn;

View File

@ -60,6 +60,7 @@ public interface WorkflowModel
// workflow package
static final QName ASPECT_WORKFLOW_PACKAGE = QName.createQName(NamespaceService.BPM_MODEL_1_0_URI, "workflowPackage");
static final QName PROP_IS_SYSTEM_PACKAGE = QName.createQName(NamespaceService.BPM_MODEL_1_0_URI, "isSystemPackage");
static final QName PROP_WORKFLOW_DEFINITION_ID = QName.createQName(NamespaceService.BPM_MODEL_1_0_URI, "workflowDefinitionId");
static final QName PROP_WORKFLOW_DEFINITION_NAME = QName.createQName(NamespaceService.BPM_MODEL_1_0_URI, "workflowDefinitionName");
static final QName PROP_WORKFLOW_INSTANCE_ID = QName.createQName(NamespaceService.BPM_MODEL_1_0_URI, "workflowInstanceId");

View File

@ -40,6 +40,16 @@ public interface WorkflowPackageComponent
*/
public NodeRef createPackage(NodeRef container);
/**
* Deletes a Workflow Package
*
* The workflow package aspect is removed, and if the container was previously created by the workflow
* service (i.e. not provided from elsewhere), it will be deleted.
*
* @param container
*/
public void deletePackage(NodeRef container);
// TODO: Further support for finding packages via meta-data of WorkflowPackage aspect
/**

View File

@ -89,6 +89,7 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
public NodeRef createPackage(NodeRef container)
{
// create a container, if one is not specified
boolean isSystemPackage = false;
if (container == null)
{
// create simple folder in workflow system folder
@ -112,6 +113,7 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
QName qname = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, containerName);
ChildAssociationRef childRef = nodeService.createNode(packages, ContentModel.ASSOC_CONTAINS, qname, ContentModel.TYPE_SYSTEM_FOLDER);
container = childRef.getChildRef();
isSystemPackage = true;
}
// attach workflow package
@ -120,11 +122,31 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
throw new WorkflowException("Container '" + container + "' is already a workflow package.");
}
nodeService.addAspect(container, WorkflowModel.ASPECT_WORKFLOW_PACKAGE, null);
nodeService.setProperty(container, WorkflowModel.PROP_IS_SYSTEM_PACKAGE, isSystemPackage);
// return container
return container;
}
/* (non-Javadoc)
* @see org.alfresco.repo.workflow.WorkflowPackageComponent#deletePackage(org.alfresco.service.cmr.repository.NodeRef)
*/
public void deletePackage(NodeRef container)
{
if (container != null && nodeService.exists(container) && nodeService.hasAspect(container, WorkflowModel.ASPECT_WORKFLOW_PACKAGE))
{
Boolean isSystemPackage = (Boolean)nodeService.getProperty(container, WorkflowModel.PROP_IS_SYSTEM_PACKAGE);
if (isSystemPackage != null && isSystemPackage.booleanValue())
{
nodeService.deleteNode(container);
}
else
{
nodeService.removeAspect(container, WorkflowModel.ASPECT_WORKFLOW_PACKAGE);
}
}
}
/* (non-Javadoc)
* @see org.alfresco.repo.workflow.WorkflowPackageComponent#getWorkflowIdsForContent(org.alfresco.service.cmr.repository.NodeRef, boolean)
*/
@ -150,7 +172,6 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
}
return workflowIds;
}
/**
* Gets the system workflow container for storing workflow related items
@ -170,7 +191,6 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
}
return systemWorkflowContainer;
}
/**
* Finds the system workflow container
@ -193,7 +213,6 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
return systemWorkflowContainer;
}
/**
* Finds the system container
*
@ -214,7 +233,6 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
}
return nodeRefs.get(0);
}
/**
* Creates the System Workflow Container

View File

@ -210,6 +210,8 @@ public class WorkflowServiceImpl implements WorkflowService
*/
public WorkflowInstance cancelWorkflow(String workflowId)
{
WorkflowInstance instance = getWorkflowById(workflowId);
workflowPackageComponent.deletePackage(instance.workflowPackage);
String engineId = BPMEngineRegistry.getEngineId(workflowId);
WorkflowComponent component = getWorkflowComponent(engineId);
return component.cancelWorkflow(workflowId);
@ -319,7 +321,7 @@ public class WorkflowServiceImpl implements WorkflowService
String engineId = BPMEngineRegistry.getEngineId(workflowId);
WorkflowComponent component = getWorkflowComponent(engineId);
WorkflowInstance instance = component.getWorkflowById(workflowId);
if (instance.active == active)
if (instance != null && instance.active == active)
{
workflowInstances.add(instance);
}

View File

@ -0,0 +1,186 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.workflow.jbpm;
import java.util.List;
import junit.framework.TestCase;
import org.jbpm.JbpmConfiguration;
import org.jbpm.JbpmContext;
import org.jbpm.db.GraphSession;
import org.jbpm.db.TaskMgmtSession;
import org.jbpm.graph.def.ProcessDefinition;
import org.jbpm.graph.exe.ProcessInstance;
import org.jbpm.graph.exe.Token;
import org.jbpm.taskmgmt.exe.TaskInstance;
/**
* Unit Test for reproducing constraint violation during JBPM process deletion
*
* http://jira.jboss.com/jira/browse/JBPM-757
*
* @author davidc
*/
public class JBPMDeleteProcessTest extends TestCase {
static JbpmConfiguration jbpmConfiguration = null;
static long processId = -1L;
static String currentTokenPath = null;
static {
jbpmConfiguration = JbpmConfiguration.parseXmlString(
"<jbpm-configuration>" +
" <jbpm-context>" +
" <service name='persistence' " +
" factory='org.jbpm.persistence.db.DbPersistenceServiceFactory' />" +
" </jbpm-context>" +
" <string name='resource.hibernate.cfg.xml' " +
" value='jbpmresources/hibernate.cfg.xml' />" +
" <string name='resource.business.calendar' " +
" value='org/jbpm/calendar/jbpm.business.calendar.properties' />" +
" <string name='resource.default.modules' " +
" value='org/jbpm/graph/def/jbpm.default.modules.properties' />" +
" <string name='resource.converter' " +
" value='org/jbpm/db/hibernate/jbpm.converter.properties' />" +
" <string name='resource.action.types' " +
" value='org/jbpm/graph/action/action.types.xml' />" +
" <string name='resource.node.types' " +
" value='org/jbpm/graph/node/node.types.xml' />" +
" <string name='resource.varmapping' " +
" value='org/jbpm/context/exe/jbpm.varmapping.xml' />" +
"</jbpm-configuration>"
);
}
public void setUp() {
jbpmConfiguration.createSchema();
}
public void tearDown() {
jbpmConfiguration.dropSchema();
}
public void testDelete() {
deployProcessDefinition();
startProcess();
step2TaskEnd();
deleteProcess();
}
public void deployProcessDefinition() {
ProcessDefinition processDefinition = ProcessDefinition.parseXmlString
(
"<process-definition name='deletetest'>" +
" <start-state name='start'> " +
" <task name='startTask'> " +
" <controller> " +
" <variable name='var1' access='write'/> " +
" </controller> " +
" </task> " +
" <transition name='' to='step2'/> " +
" </start-state> " +
" <task-node name='step2'> " +
" <task name='step2Task'/> " +
" <transition name='' to='step3'/> " +
" </task-node>" +
" <task-node name='step3'> " +
" <task name='step3Task'/> " +
" <transition name='' to='end'/> " +
" </task-node> " +
" <end-state name='end' />" +
"</process-definition>"
);
JbpmContext jbpmContext = jbpmConfiguration.createJbpmContext();
try {
jbpmContext.deployProcessDefinition(processDefinition);
} finally {
jbpmContext.close();
}
}
public void startProcess() {
JbpmContext jbpmContext = jbpmConfiguration.createJbpmContext();
try {
GraphSession graphSession = jbpmContext.getGraphSession();
ProcessDefinition processDefinition = graphSession.findLatestProcessDefinition("deletetest");
ProcessInstance processInstance = new ProcessInstance(processDefinition);
processId = processInstance.getId();
TaskInstance taskInstance = processInstance.getTaskMgmtInstance().createStartTaskInstance();
taskInstance.setVariableLocally("var1", "var1Value");
taskInstance.end();
Token token = taskInstance.getToken();
currentTokenPath = token.getFullName();
jbpmContext.save(processInstance);
} finally {
jbpmContext.close();
}
}
public void step2TaskEnd() {
JbpmContext jbpmContext = jbpmConfiguration.createJbpmContext();
try {
GraphSession graphSession = jbpmContext.getGraphSession();
ProcessInstance processInstance = graphSession.loadProcessInstance(processId);
Token token = processInstance.findToken(currentTokenPath);
TaskMgmtSession taskSession = jbpmContext.getTaskMgmtSession();
List tasks = taskSession.findTaskInstancesByToken(token.getId());
TaskInstance taskInstance = (TaskInstance)tasks.get(0);
//
// Uncomment the following line to force constraint violation
//
// taskInstance.setVariableLocally("var1", "var1TaskValue");
taskInstance.setVariableLocally("var2", "var2UpdatedValue");
taskInstance.end();
token = taskInstance.getToken();
currentTokenPath = token.getFullName();
jbpmContext.save(processInstance);
} finally {
jbpmContext.close();
}
}
public void deleteProcess()
{
JbpmContext jbpmContext = jbpmConfiguration.createJbpmContext();
try {
GraphSession graphSession = jbpmContext.getGraphSession();
ProcessInstance processInstance = graphSession.loadProcessInstance(processId);
graphSession.deleteProcessInstance(processInstance, true, true, true);
} finally {
jbpmContext.close();
}
}
}

View File

@ -59,6 +59,7 @@ import org.alfresco.service.cmr.workflow.WorkflowTask;
import org.alfresco.service.cmr.workflow.WorkflowTaskDefinition;
import org.alfresco.service.cmr.workflow.WorkflowTaskState;
import org.alfresco.service.cmr.workflow.WorkflowTransition;
import org.alfresco.service.namespace.NamespaceException;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.hibernate.Query;
@ -67,7 +68,6 @@ import org.hibernate.proxy.HibernateProxy;
import org.jbpm.JbpmContext;
import org.jbpm.JbpmException;
import org.jbpm.context.exe.ContextInstance;
import org.jbpm.context.exe.TokenVariableMap;
import org.jbpm.db.GraphSession;
import org.jbpm.db.TaskMgmtSession;
import org.jbpm.graph.def.Node;
@ -260,8 +260,7 @@ public class JBPMEngine extends BPMEngine
{
// retrieve process definition
GraphSession graphSession = context.getGraphSession();
ProcessDefinition processDefinition = graphSession.loadProcessDefinition(getJbpmId(workflowDefinitionId));
// NOTE: if not found, should throw an exception
ProcessDefinition processDefinition = getProcessDefinition(graphSession, workflowDefinitionId);
// undeploy
// NOTE: jBPM deletes all "in-flight" processes too
@ -332,7 +331,7 @@ public class JBPMEngine extends BPMEngine
{
GraphSession graphSession = context.getGraphSession();
ProcessDefinition processDef = graphSession.findLatestProcessDefinition(createLocalId(workflowName));
return createWorkflowDefinition(processDef);
return processDef == null ? null : createWorkflowDefinition(processDef);
}
});
}
@ -342,6 +341,23 @@ public class JBPMEngine extends BPMEngine
}
}
/**
* Gets a jBPM process definition
*
* @param graphSession jBPM graph session
* @param workflowDefinitionId workflow definition id
* @return process definition
*/
private ProcessDefinition getProcessDefinition(GraphSession graphSession, String workflowDefinitionId)
{
ProcessDefinition processDefinition = graphSession.getProcessDefinition(getJbpmId(workflowDefinitionId));
if (processDefinition == null)
{
throw new WorkflowException("Workflow definition '" + workflowDefinitionId + "' does not exist");
}
return processDefinition;
}
//
// Workflow Instance Management...
@ -367,7 +383,7 @@ public class JBPMEngine extends BPMEngine
// construct a new process
GraphSession graphSession = context.getGraphSession();
ProcessDefinition processDefinition = graphSession.loadProcessDefinition(getJbpmId(workflowDefinitionId));
ProcessDefinition processDefinition = getProcessDefinition(graphSession, workflowDefinitionId);
ProcessInstance processInstance = new ProcessInstance(processDefinition);
// assign initial process context
@ -399,7 +415,7 @@ public class JBPMEngine extends BPMEngine
throw new WorkflowException("Failed to start workflow " + workflowDefinitionId, e);
}
}
/* (non-Javadoc)
* @see org.alfresco.repo.workflow.WorkflowComponent#getActiveWorkflows(java.lang.String)
*/
@ -447,7 +463,7 @@ public class JBPMEngine extends BPMEngine
// retrieve workflow
GraphSession graphSession = context.getGraphSession();
ProcessInstance processInstance = graphSession.getProcessInstance(getJbpmId(workflowId));
return createWorkflowInstance(processInstance);
return processInstance == null ? null : createWorkflowInstance(processInstance);
}
});
}
@ -457,6 +473,22 @@ public class JBPMEngine extends BPMEngine
}
}
/**
* Gets a jBPM Process Instance
* @param graphSession jBPM graph session
* @param workflowId workflow id
* @return process instance
*/
private ProcessInstance getProcessInstance(GraphSession graphSession, String workflowId)
{
ProcessInstance processInstance = graphSession.getProcessInstance(getJbpmId(workflowId));
if (processInstance == null)
{
throw new WorkflowException("Workflow instance '" + workflowId + "' does not exist");
}
return processInstance;
}
/* (non-Javadoc)
* @see org.alfresco.repo.workflow.WorkflowComponent#getWorkflowPaths(java.lang.String)
*/
@ -471,7 +503,7 @@ public class JBPMEngine extends BPMEngine
{
// retrieve process instance
GraphSession graphSession = context.getGraphSession();
ProcessInstance processInstance = graphSession.loadProcessInstance(getJbpmId(workflowId));
ProcessInstance processInstance = getProcessInstance(graphSession, workflowId);
// convert jBPM tokens to workflow posisitons
List<Token> tokens = processInstance.findAllTokens();
@ -509,37 +541,10 @@ public class JBPMEngine extends BPMEngine
{
// retrieve and cancel process instance
GraphSession graphSession = context.getGraphSession();
ProcessInstance processInstance = graphSession.loadProcessInstance(getJbpmId(workflowId));
ProcessInstance processInstance = getProcessInstance(graphSession, workflowId);
// TODO: Determine if this is the most appropriate way to cancel workflow...
// It might be useful to record point at which it was cancelled etc
WorkflowInstance workflowInstance = createWorkflowInstance(processInstance);
//
// TODO: remove - workaround for JBPM variable mapping constraint exception
//
Collection<TaskInstance> tasks = processInstance.getTaskMgmtInstance().getTaskInstances();
for (TaskInstance task : tasks)
{
Map<String, Serializable> taskVariables = task.getVariablesLocally();
for (String varName : taskVariables.keySet())
{
task.deleteVariableLocally(varName);
}
}
ContextInstance processContext = processInstance.getContextInstance();
Map<Token, TokenVariableMap> tokenVarMaps = processContext.getTokenVariableMaps();
for (Map.Entry<Token, TokenVariableMap> mapEntry : tokenVarMaps.entrySet())
{
TokenVariableMap tokenVarMap = mapEntry.getValue();
Map<String, Serializable> variables = tokenVarMap.getVariables();
for (String name : variables.keySet())
{
tokenVarMap.deleteVariable(name);
}
}
//
// end TODO
//
// delete the process instance
graphSession.deleteProcessInstance(processInstance, true, true, true);
@ -738,6 +743,22 @@ public class JBPMEngine extends BPMEngine
}
}
/**
* Gets a jBPM Task Instance
* @param taskSession jBPM task session
* @param taskId task id
* @return task instance
*/
private TaskInstance getTaskInstance(TaskMgmtSession taskSession, String taskId)
{
TaskInstance taskInstance = taskSession.getTaskInstance(getJbpmId(taskId));
if (taskInstance == null)
{
throw new WorkflowException("Task instance '" + taskId + "' does not exist");
}
return taskInstance;
}
/* (non-Javadoc)
* @see org.alfresco.repo.workflow.TaskComponent#updateTask(java.lang.String, java.util.Map, java.util.Map, java.util.Map)
*/
@ -751,7 +772,7 @@ public class JBPMEngine extends BPMEngine
{
// retrieve task
TaskMgmtSession taskSession = context.getTaskMgmtSession();
TaskInstance taskInstance = taskSession.loadTaskInstance(getJbpmId(taskId));
TaskInstance taskInstance = getTaskInstance(taskSession, taskId);
// create properties to set on task instance
Map<QName, Serializable> newProperties = properties;
@ -871,13 +892,8 @@ public class JBPMEngine extends BPMEngine
{
// retrieve task
TaskMgmtSession taskSession = context.getTaskMgmtSession();
TaskInstance taskInstance = taskSession.loadTaskInstance(getJbpmId(taskId));
TaskInstance taskInstance = getTaskInstance(taskSession, taskId);
// set status to complete
Map<QName, Serializable> taskProperties = new HashMap<QName, Serializable>();
taskProperties.put(WorkflowModel.PROP_STATUS, "Completed");
setTaskProperties(taskInstance, taskProperties);
// signal the transition on the task
if (transition == null)
{
@ -922,8 +938,8 @@ public class JBPMEngine extends BPMEngine
{
// retrieve task
TaskMgmtSession taskSession = context.getTaskMgmtSession();
TaskInstance taskInstance = taskSession.loadTaskInstance(getJbpmId(taskId));
return createWorkflowTask(taskInstance);
TaskInstance taskInstance = taskSession.getTaskInstance(getJbpmId(taskId));
return taskInstance == null ? null : createWorkflowTask(taskInstance);
}
});
}
@ -1134,7 +1150,7 @@ public class JBPMEngine extends BPMEngine
}
// retrieve jBPM token for workflow position
ProcessInstance processInstance = session.loadProcessInstance(getJbpmId(path[0]));
ProcessInstance processInstance = getProcessInstance(session, path[0]);
String tokenId = path[1].replace(WORKFLOW_TOKEN_SEPERATOR, "/");
Token token = processInstance.findToken(tokenId);
if (token == null)
@ -1166,8 +1182,7 @@ public class JBPMEngine extends BPMEngine
for (Entry<String, Object> entry : vars.entrySet())
{
String key = entry.getKey();
String name = key.replace("_", ":");
QName qname = QName.createQName(name, this.namespaceService);
QName qname = mapNameToQName(key);
// add variable, only if part of task definition or locally defined on task
if (taskProperties.containsKey(qname) || taskAssocs.containsKey(qname) || instance.hasVariableLocally(key))
@ -1356,8 +1371,7 @@ public class JBPMEngine extends BPMEngine
else if (key.equals(WorkflowModel.ASSOC_PACKAGE))
{
// Attach workflow definition & instance id to Workflow Package in Repository
String name = key.toPrefixString(this.namespaceService);
name = name.replace(':', '_');
String name = mapQNameToName(key);
JBPMNode existingWorkflowPackage = (JBPMNode)instance.getVariable(name);
// first check if provided workflow package has already been associated with another workflow instance
@ -1401,8 +1415,7 @@ public class JBPMEngine extends BPMEngine
// no specific mapping to jBPM task has been established, so place into
// the generic task variable bag
String name = key.toPrefixString(this.namespaceService);
name = name.replace(':', '_');
String name = mapQNameToName(key);
instance.setVariableLocally(name, value);
}
}
@ -1414,7 +1427,7 @@ public class JBPMEngine extends BPMEngine
*/
protected void setDefaultTaskProperties(TaskInstance instance)
{
Map<QName, Serializable> existingValues = null;
Map<QName, Serializable> existingValues = getTaskProperties(instance, true);
Map<QName, Serializable> defaultValues = new HashMap<QName, Serializable>();
// construct an anonymous type that flattens all mandatory aspects
@ -1427,10 +1440,6 @@ public class JBPMEngine extends BPMEngine
String defaultValue = entry.getValue().getDefaultValue();
if (defaultValue != null)
{
if (existingValues == null)
{
existingValues = getTaskProperties(instance, true);
}
if (existingValues.get(entry.getKey()) == null)
{
defaultValues.put(entry.getKey(), defaultValue);
@ -1442,7 +1451,7 @@ public class JBPMEngine extends BPMEngine
String description = (String)existingValues.get(WorkflowModel.PROP_DESCRIPTION);
if (description == null || description.length() == 0)
{
description = (String)instance.getContextInstance().getVariable("bpm_workflowDescription");
description = (String)instance.getContextInstance().getVariable(mapQNameToName(WorkflowModel.PROP_WORKFLOW_DESCRIPTION));
if (description != null && description.length() > 0)
{
defaultValues.put(WorkflowModel.PROP_DESCRIPTION, description);
@ -1453,7 +1462,7 @@ public class JBPMEngine extends BPMEngine
defaultValues.put(WorkflowModel.PROP_DESCRIPTION, task.title);
}
}
// assign the default values to the task
if (defaultValues.size() > 0)
{
@ -1462,16 +1471,62 @@ public class JBPMEngine extends BPMEngine
}
/**
* Set Task Outcome based on specified Transition
* Sets default description for the Task
*
* @param instance task instance
* @param transition transition
*/
protected void setTaskOutcome(TaskInstance instance, Transition transition)
public void setDefaultStartTaskDescription(TaskInstance instance)
{
Map<QName, Serializable> outcome = new HashMap<QName, Serializable>();
outcome.put(WorkflowModel.PROP_OUTCOME, transition.getName());
setTaskProperties(instance, outcome);
String description = instance.getTask().getDescription();
if (description == null || description.length() == 0)
{
description = (String)instance.getContextInstance().getVariable(mapQNameToName(WorkflowModel.PROP_WORKFLOW_DESCRIPTION));
if (description != null && description.length() > 0)
{
Map<QName, Serializable> defaultValues = new HashMap<QName, Serializable>();
defaultValues.put(WorkflowModel.PROP_DESCRIPTION, description);
setTaskProperties(instance, defaultValues);
}
}
}
/**
* Initialise Workflow Instance properties
*
* @param startTask start task instance
*/
protected void setDefaultWorkflowProperties(TaskInstance startTask)
{
Map<QName, Serializable> taskProperties = getTaskProperties(startTask, true);
ContextInstance processContext = startTask.getContextInstance();
String workflowDescriptionName = mapQNameToName(WorkflowModel.PROP_WORKFLOW_DESCRIPTION);
if (!processContext.hasVariable(workflowDescriptionName))
{
processContext.setVariable(workflowDescriptionName, taskProperties.get(WorkflowModel.PROP_WORKFLOW_DESCRIPTION));
}
String workflowDueDateName = mapQNameToName(WorkflowModel.PROP_WORKFLOW_DUE_DATE);
if (!processContext.hasVariable(workflowDueDateName))
{
processContext.setVariable(workflowDueDateName, taskProperties.get(WorkflowModel.PROP_WORKFLOW_DUE_DATE));
}
String workflowPriorityName = mapQNameToName(WorkflowModel.PROP_WORKFLOW_PRIORITY);
if (!processContext.hasVariable(workflowPriorityName))
{
processContext.setVariable(workflowPriorityName, taskProperties.get(WorkflowModel.PROP_WORKFLOW_PRIORITY));
}
String workflowPackageName = mapQNameToName(WorkflowModel.ASSOC_PACKAGE);
if (!processContext.hasVariable(workflowPackageName))
{
Serializable packageNodeRef = taskProperties.get(WorkflowModel.ASSOC_PACKAGE);
processContext.setVariable(workflowPackageName, convertNodeRefs(packageNodeRef instanceof List, packageNodeRef));
}
String workflowContextName = mapQNameToName(WorkflowModel.PROP_CONTEXT);
if (!processContext.hasVariable(workflowContextName))
{
Serializable contextRef = taskProperties.get(WorkflowModel.PROP_CONTEXT);
processContext.setVariable(workflowContextName, convertNodeRefs(contextRef instanceof List, contextRef));
}
}
/**
@ -1538,6 +1593,39 @@ public class JBPMEngine extends BPMEngine
return authority;
}
/**
* Map jBPM variable name to QName
*
* @param name jBPM variable name
* @return qname
*/
private QName mapNameToQName(String name)
{
QName qname = null;
String qnameStr = name.replaceFirst("_", ":");
try
{
qname = QName.createQName(qnameStr, this.namespaceService);
}
catch(NamespaceException e)
{
qname = QName.createQName(name, this.namespaceService);
}
return qname;
}
/**
* Map QName to jBPM variable name
*
* @param name QName
* @return jBPM variable name
*/
private String mapQNameToName(QName name)
{
String nameStr = name.toPrefixString(this.namespaceService);
return nameStr.replace(':', '_');
}
/**
* Get an I18N Label for a workflow item
*
@ -1651,6 +1739,7 @@ public class JBPMEngine extends BPMEngine
{
WorkflowInstance workflowInstance = new WorkflowInstance();
workflowInstance.id = createGlobalId(new Long(instance.getId()).toString());
workflowInstance.description = (String)instance.getContextInstance().getVariable(mapQNameToName(WorkflowModel.PROP_WORKFLOW_DESCRIPTION));
workflowInstance.definition = createWorkflowDefinition(instance.getProcessDefinition());
workflowInstance.active = !instance.hasEnded();
JBPMNode initiator = (JBPMNode)instance.getContextInstance().getVariable("initiator");
@ -1658,6 +1747,16 @@ public class JBPMEngine extends BPMEngine
{
workflowInstance.initiator = initiator.getNodeRef();
}
JBPMNode context = (JBPMNode)instance.getContextInstance().getVariable(mapQNameToName(WorkflowModel.PROP_CONTEXT));
if (context != null)
{
workflowInstance.context = context.getNodeRef();
}
JBPMNode workflowPackage = (JBPMNode)instance.getContextInstance().getVariable(mapQNameToName(WorkflowModel.ASSOC_PACKAGE));
if (workflowPackage != null)
{
workflowInstance.workflowPackage = workflowPackage.getNodeRef();
}
workflowInstance.startDate = instance.getStart();
workflowInstance.endDate = instance.getEnd();
return workflowInstance;

View File

@ -100,6 +100,7 @@ public class ReviewAndApproveTest extends BaseSpringTest
params.put(WorkflowModel.PROP_WORKFLOW_DUE_DATE, reviewDueDate);
NodeRef reviewer = personService.getPerson("admin");
params.put(WorkflowModel.ASSOC_ASSIGNEE, reviewer);
params.put(WorkflowModel.PROP_WORKFLOW_DESCRIPTION, "Test review");
WorkflowPath path = workflowComponent.startWorkflow(workflowDef.id, params);
assertNotNull(path);
@ -113,6 +114,8 @@ public class ReviewAndApproveTest extends BaseSpringTest
assertNotNull(endedTask);
assertTrue(endedTask.properties.containsKey(WorkflowModel.PROP_OUTCOME));
assertEquals("", endedTask.properties.get(WorkflowModel.PROP_OUTCOME));
assertEquals("Test review", endedTask.properties.get(WorkflowModel.PROP_DESCRIPTION));
assertEquals("Test review", endedTask.path.instance.description);
List<WorkflowTask> assignedTasks = taskComponent.getAssignedTasks("admin", WorkflowTaskState.IN_PROGRESS);
assertNotNull(assignedTasks);

View File

@ -16,7 +16,13 @@
*/
package org.alfresco.repo.workflow.jbpm;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import org.alfresco.repo.workflow.WorkflowModel;
import org.alfresco.service.cmr.workflow.WorkflowException;
import org.alfresco.service.namespace.QName;
import org.jbpm.graph.def.Transition;
import org.jbpm.graph.exe.ExecutionContext;
import org.jbpm.taskmgmt.exe.TaskInstance;
@ -96,14 +102,30 @@ public class WorkflowTaskInstance extends TaskInstance
@Override
public void end(Transition transition)
{
// NOTE: Set the outcome first, so it's available during the submission of
// Set task properties on completion of task
// NOTE: Set properties first, so they're available during the submission of
// task variables to the process context
Map<QName, Serializable> taskProperties = new HashMap<QName, Serializable>();
Transition outcome = (transition == null) ? token.getNode().getDefaultLeavingTransition() : transition;
if (outcome != null)
{
getJBPMEngine().setTaskOutcome(this, outcome);
taskProperties.put(WorkflowModel.PROP_OUTCOME, outcome.getName());
}
taskProperties.put(WorkflowModel.PROP_STATUS, "Completed");
getJBPMEngine().setTaskProperties(this, taskProperties);
// perform transition
super.end(transition);
if (getTask().getStartState() != null)
{
// if ending a start task, push start task properties to process context, if not
// already done
getJBPMEngine().setDefaultWorkflowProperties(this);
// set task description
getJBPMEngine().setDefaultStartTaskDescription(this);
}
}
}

View File

@ -57,7 +57,7 @@
<event type="node-enter">
<script>
System.out.println("javascript: " + alfrescoScriptResult);
System.out.println("bpm_workflowDescription: " + bpm_description);
System.out.println("bpm_workflowDescription: " + bpm_workflowDescription);
</script>
</event>
</end-state>

View File

@ -37,6 +37,7 @@ import org.alfresco.service.cmr.security.AuthenticationService;
import org.alfresco.service.cmr.security.AuthorityService;
import org.alfresco.service.cmr.security.OwnableService;
import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.cmr.security.PersonService;
import org.alfresco.service.cmr.version.VersionService;
import org.alfresco.service.cmr.view.ExporterService;
import org.alfresco.service.cmr.view.ImporterService;
@ -88,6 +89,7 @@ public interface ServiceRegistry
static final QName WORKFLOW_SERVICE = QName.createQName(NamespaceService.ALFRESCO_URI, "WorkflowService");
static final QName AUDIT_SERVICE = QName.createQName(NamespaceService.ALFRESCO_URI, "AuditService");
static final QName OWNABLE_SERVICE = QName.createQName(NamespaceService.ALFRESCO_URI, "OwnableService");
static final QName PERSON_SERVICE = QName.createQName(NamespaceService.ALFRESCO_URI, "PersonService");
/**
* Get the list of services provided by the Repository
@ -280,4 +282,11 @@ public interface ServiceRegistry
*/
@NotAuditable
OwnableService getOwnableService();
/**
* Get the person service (or null if one is not provided)
* @return
*/
@NotAuditable
PersonService getPersonService();
}

View File

@ -36,6 +36,15 @@ public final class NodeRef implements EntityRef, Serializable
private final StoreRef storeRef;
private final String id;
/**
* @see #NodeRef(StoreRef, String)
* @see StoreRef#StoreRef(String, String)
*/
public NodeRef(String protocol, String identifier, String id)
{
this(new StoreRef(protocol, identifier), id);
}
/**
* Construct a Node Reference from a Store Reference and Node Id
*

View File

@ -33,17 +33,26 @@ public class WorkflowInstance
/** Workflow Instance unique id */
public String id;
/** Workflow Instance description */
public String description;
/** Is this Workflow instance still "in-flight" or has it completed? */
public boolean active;
/** Initiator (cm:person) - null if System initiated */
public NodeRef initiator;
/** Workflow Start Date */
public Date startDate;
/** Workflow End Date */
public Date endDate;
/** Workflow Package */
public NodeRef workflowPackage;
/** Workflow Context */
public NodeRef context;
/** Workflow Definition */
public WorkflowDefinition definition;

View File

@ -100,7 +100,7 @@ public interface WorkflowService
* Gets a Workflow Definition by unique Id
*
* @param workflowDefinitionId the workflow definition id
* @return the deployed workflow definition
* @return the deployed workflow definition (or null if not found)
*/
@Auditable(parameters = {"workflowDefinitionId"})
public WorkflowDefinition getDefinitionById(String workflowDefinitionId);
@ -109,7 +109,7 @@ public interface WorkflowService
* Gets a Workflow Definition by unique name
*
* @param workflowName workflow name e.g. jbpm://review
* @return the deployed workflow definition
* @return the deployed workflow definition (or null if not found)
*/
@Auditable(parameters = {"workflowName"})
public WorkflowDefinition getDefinitionByName(String workflowName);
@ -153,7 +153,7 @@ public interface WorkflowService
* Gets a specific workflow instances
*
* @param workflowId the id of the workflow to retrieve
* @return the workflow instance
* @return the workflow instance (or null if not found)
*/
@Auditable(parameters = {"workflowId"})
public WorkflowInstance getWorkflowById(String workflowId);
@ -204,7 +204,7 @@ public interface WorkflowService
* Gets a Task by unique Id
*
* @param taskId the task id
* @return the task
* @return the task (or null, if not found)
*/
@Auditable(parameters = {"taskId"})
public WorkflowTask getTaskById(String taskId);

View File

@ -0,0 +1,48 @@
<ehcache>
<!-- Sets the path to the directory where cache .data files are created.
If the path is a Java System Property it is replaced by
its value in the running VM.
The following properties are translated:
user.home - User's home directory
user.dir - User's current working directory
java.io.tmpdir - Default temp file path
<diskStore path="java.io.tmpdir"/> -->
<!--Default Cache configuration. These will applied to caches programmatically created through
the CacheManager.
The following attributes are required:
maxElementsInMemory - Sets the maximum number of objects that will be created in memory
eternal - Sets whether elements are eternal. If eternal, timeouts are ignored and the
element is never expired.
overflowToDisk - Sets whether elements can overflow to disk when the in-memory cache
has reached the maxInMemory limit.
The following attributes are optional:
timeToIdleSeconds - Sets the time to idle for an element before it expires.
i.e. The maximum amount of time between accesses before an element expires
Is only used if the element is not eternal.
Optional attribute. A value of 0 means that an Element can idle for infinity.
The default value is 0.
timeToLiveSeconds - Sets the time to live for an element before it expires.
i.e. The maximum time between creation time and when an element expires.
Is only used if the element is not eternal.
Optional attribute. A value of 0 means that and Element can live for infinity.
The default value is 0.
diskPersistent - Whether the disk store persists between restarts of the Virtual Machine.
The default value is false.
diskExpiryThreadIntervalSeconds- The number of seconds between runs of the disk expiry thread. The default value
is 120 seconds.
-->
<defaultCache
maxElementsInMemory="100000"
eternal="true"
overflowToDisk="false"
diskPersistent="false"
/>
</ehcache>

View File

@ -0,0 +1,166 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd">
<hibernate-configuration>
<session-factory>
<!-- jdbc connection properties -->
<property name="hibernate.dialect">org.hibernate.dialect.HSQLDialect</property>
<property name="hibernate.connection.driver_class">org.hsqldb.jdbcDriver</property>
<property name="hibernate.connection.url">jdbc:hsqldb:mem:.;sql.enforce_strict_size=true</property>
<property name="hibernate.connection.username">sa</property>
<property name="hibernate.connection.password"></property>
<!-- other hibernate properties
<property name="hibernate.show_sql">true</property>
<property name="hibernate.format_sql">true</property>
<property name="hibernate.use_sql_comments">true</property>
-->
<!-- ############################################ -->
<!-- # mapping files with external dependencies # -->
<!-- ############################################ -->
<!-- following mapping file has a dependendy on -->
<!-- 'bsh-{version}.jar'. -->
<!-- uncomment this if you don't have bsh on your -->
<!-- classpath. you won't be able to use the -->
<!-- script element in process definition files -->
<mapping resource="org/jbpm/graph/action/Script.hbm.xml"/>
<!-- following mapping files have a dependendy on -->
<!-- 'jbpm-identity-{version}.jar', mapping files -->
<!-- of the pluggable jbpm identity component. -->
<!-- comment out the following 3 lines if you don't-->
<!-- want to use the default jBPM identity mgmgt -->
<!-- component -->
<mapping resource="org/jbpm/identity/User.hbm.xml"/>
<mapping resource="org/jbpm/identity/Group.hbm.xml"/>
<mapping resource="org/jbpm/identity/Membership.hbm.xml"/>
<!-- ###################### -->
<!-- # jbpm mapping files # -->
<!-- ###################### -->
<!-- hql queries and type defs -->
<mapping resource="org/jbpm/db/hibernate.queries.hbm.xml" />
<!-- graph.def mapping files -->
<mapping resource="org/jbpm/graph/def/ProcessDefinition.hbm.xml"/>
<mapping resource="org/jbpm/graph/def/Node.hbm.xml"/>
<mapping resource="org/jbpm/graph/def/Transition.hbm.xml"/>
<mapping resource="org/jbpm/graph/def/Event.hbm.xml"/>
<mapping resource="org/jbpm/graph/def/Action.hbm.xml"/>
<mapping resource="org/jbpm/graph/def/SuperState.hbm.xml"/>
<mapping resource="org/jbpm/graph/def/ExceptionHandler.hbm.xml"/>
<mapping resource="org/jbpm/instantiation/Delegation.hbm.xml"/>
<!-- graph.node mapping files -->
<mapping resource="org/jbpm/graph/node/StartState.hbm.xml"/>
<mapping resource="org/jbpm/graph/node/EndState.hbm.xml"/>
<mapping resource="org/jbpm/graph/node/ProcessState.hbm.xml"/>
<mapping resource="org/jbpm/graph/node/Decision.hbm.xml"/>
<mapping resource="org/jbpm/graph/node/Fork.hbm.xml"/>
<mapping resource="org/jbpm/graph/node/Join.hbm.xml"/>
<mapping resource="org/jbpm/graph/node/State.hbm.xml"/>
<mapping resource="org/jbpm/graph/node/TaskNode.hbm.xml"/>
<!-- context.def mapping files -->
<mapping resource="org/jbpm/context/def/ContextDefinition.hbm.xml"/>
<mapping resource="org/jbpm/context/def/VariableAccess.hbm.xml"/>
<!-- taskmgmt.def mapping files -->
<mapping resource="org/jbpm/taskmgmt/def/TaskMgmtDefinition.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/def/Swimlane.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/def/Task.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/def/TaskController.hbm.xml"/>
<!-- module.def mapping files -->
<mapping resource="org/jbpm/module/def/ModuleDefinition.hbm.xml"/>
<!-- bytes mapping files -->
<mapping resource="org/jbpm/bytes/ByteArray.hbm.xml"/>
<!-- file.def mapping files -->
<mapping resource="org/jbpm/file/def/FileDefinition.hbm.xml"/>
<!-- scheduler.def mapping files -->
<mapping resource="org/jbpm/scheduler/def/CreateTimerAction.hbm.xml"/>
<mapping resource="org/jbpm/scheduler/def/CancelTimerAction.hbm.xml"/>
<!-- graph.exe mapping files -->
<mapping resource="org/jbpm/graph/exe/Comment.hbm.xml"/>
<mapping resource="org/jbpm/graph/exe/ProcessInstance.hbm.xml"/>
<mapping resource="org/jbpm/graph/exe/Token.hbm.xml"/>
<mapping resource="org/jbpm/graph/exe/RuntimeAction.hbm.xml"/>
<!-- module.exe mapping files -->
<mapping resource="org/jbpm/module/exe/ModuleInstance.hbm.xml"/>
<!-- context.exe mapping files -->
<mapping resource="org/jbpm/context/exe/ContextInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/TokenVariableMap.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/VariableInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/ByteArrayInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/DateInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/DoubleInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/HibernateLongInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/HibernateStringInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/LongInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/NullInstance.hbm.xml"/>
<mapping resource="org/jbpm/context/exe/variableinstance/StringInstance.hbm.xml"/>
<!-- msg.db mapping files -->
<mapping resource="org/jbpm/msg/Message.hbm.xml"/>
<mapping resource="org/jbpm/msg/db/TextMessage.hbm.xml"/>
<mapping resource="org/jbpm/command/ExecuteActionCommand.hbm.xml"/>
<mapping resource="org/jbpm/command/ExecuteNodeCommand.hbm.xml"/>
<mapping resource="org/jbpm/command/SignalCommand.hbm.xml"/>
<mapping resource="org/jbpm/command/TaskInstanceEndCommand.hbm.xml"/>
<!-- taskmgmt.exe mapping files -->
<mapping resource="org/jbpm/taskmgmt/exe/TaskMgmtInstance.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/exe/TaskInstance.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/exe/PooledActor.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/exe/SwimlaneInstance.hbm.xml"/>
<!-- scheduler.exe mapping files -->
<mapping resource="org/jbpm/scheduler/exe/Timer.hbm.xml"/>
<!-- logging mapping files -->
<mapping resource="org/jbpm/logging/log/ProcessLog.hbm.xml"/>
<mapping resource="org/jbpm/logging/log/MessageLog.hbm.xml"/>
<mapping resource="org/jbpm/logging/log/CompositeLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/ActionLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/NodeLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/ProcessInstanceCreateLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/ProcessInstanceEndLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/ProcessStateLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/SignalLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/TokenCreateLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/TokenEndLog.hbm.xml"/>
<mapping resource="org/jbpm/graph/log/TransitionLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/VariableLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/VariableCreateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/VariableDeleteLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/VariableUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/variableinstance/ByteArrayUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/variableinstance/DateUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/variableinstance/DoubleUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/variableinstance/HibernateLongUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/variableinstance/HibernateStringUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/variableinstance/LongUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/context/log/variableinstance/StringUpdateLog.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/log/TaskLog.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/log/TaskCreateLog.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/log/TaskAssignLog.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/log/TaskEndLog.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/log/SwimlaneLog.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/log/SwimlaneCreateLog.hbm.xml"/>
<mapping resource="org/jbpm/taskmgmt/log/SwimlaneAssignLog.hbm.xml"/>
</session-factory>
</hibernate-configuration>