mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-08-07 17:49:17 +00:00
Merged HEAD-QA to HEAD (4.2) - final one
r54310-54386 git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@54387 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
@@ -54,7 +54,7 @@
|
||||
|
||||
<!-- The cross-transaction shared cache for Property entities -->
|
||||
|
||||
<bean name="propertyValueSharedCache" factory-bean="cacheFactory" factory-method="createLocalCache">
|
||||
<bean name="propertyValueSharedCache" factory-bean="cacheFactory" factory-method="createCache">
|
||||
<constructor-arg value="cache.propertyValueCache"/>
|
||||
</bean>
|
||||
|
||||
@@ -64,7 +64,7 @@
|
||||
|
||||
<!-- The cross-transaction shared cache for Property Unique Contexts -->
|
||||
|
||||
<bean name="propertyUniqueContextSharedCache" factory-bean="cacheFactory" factory-method="createInvalidateRemovalCache">
|
||||
<bean name="propertyUniqueContextSharedCache" factory-bean="cacheFactory" factory-method="createCache">
|
||||
<constructor-arg value="cache.propertyUniqueContextSharedCache"/>
|
||||
</bean>
|
||||
|
||||
|
@@ -1,26 +1,26 @@
|
||||
email.server.msg.received_by_smtp=Mottatt via SMTP fra "{0}".
|
||||
email.server.msg.received_by_smtp=Mottatt via SMTP fra ''{0}''.
|
||||
email.server.msg.default_subject=E-post-{0}
|
||||
|
||||
email.server.err.duplicate_alias=Node med e-post-alias ''{0}'' eksisterer allerede. Duplisering er ikke tillatt.
|
||||
email.server.err.sender_blocked="{0}" ble nektet tilgang.
|
||||
email.server.err.sender_blocked=''{0}'' ble nektet tilgang.
|
||||
email.server.err.inbound_mail_disabled=Alfresco-serveren er ikke konfigurert til \u00e5 godta inng\u00e5ende e-post.
|
||||
email.server.err.access_denied="{0}" ble nektet tilgang til "{1}".
|
||||
email.server.err.access_denied=''{0}'' ble nektet tilgang til ''{1}''.
|
||||
email.server.err.invalid_subject=Emnelinjen m\u00e5 v\u00e6re et gyldig filnavn.
|
||||
email.server.err.unknown_source_address='Fra'-e-postadressen ble ikke gjenkjent: {0}.
|
||||
email.server.err.user_not_email_contributor=Brukeren "{0}" er ikke i e-postbidragsytergruppen.
|
||||
email.server.err.unknown_source_address=''Fra''-e-postadressen ble ikke gjenkjent: {0}.
|
||||
email.server.err.user_not_email_contributor=Brukeren ''{0}'' er ikke i e-postbidragsytergruppen.
|
||||
email.server.err.no_email_contributor_group=E-postbidgragsytergruppen finnes ikke.
|
||||
email.server.err.invalid_node_address=E-postadressen "{0}" henviser ikke til en gyldig, tilgjengelig node.
|
||||
email.server.err.handler_not_found=Fant ikke e-postmeldingsbehandleren for nodetype "{0}".
|
||||
email.server.err.invalid_node_address=E-postadressen ''{0}'' henviser ikke til en gyldig, tilgjengelig node.
|
||||
email.server.err.handler_not_found=Fant ikke e-postmeldingsbehandleren for nodetype ''{0}''.
|
||||
email.server.err.mail_read_error=Det oppsto en feil under lesing av e-postmeldingen: {0}
|
||||
email.server.err.failed_to_create_mime_message=Kunne ikke opprette MIME-melding fra inndataflyt: {0}
|
||||
email.server.err.extracting_from_address=Kunne ikke pakke ut 'fra'-adressen: {0}
|
||||
email.server.err.no_from_address=Meldingen har ingen "fra"-adresse.
|
||||
email.server.err.extracting_to_address=Kunne ikke pakke ut 'til'-adressen: {0}
|
||||
email.server.err.no_to_address=Meldingen har ingen "til"-adresse.
|
||||
email.server.err.extracting_from_address=Kunne ikke pakke ut ''fra''-adressen: {0}
|
||||
email.server.err.no_from_address=Meldingen har ingen ''fra''-adresse.
|
||||
email.server.err.extracting_to_address=Kunne ikke pakke ut ''til''-adressen: {0}
|
||||
email.server.err.no_to_address=Meldingen har ingen ''til''-adresse.
|
||||
email.server.err.extracting_subject=Kunne ikke pakke ut meldingsemnet: {0}
|
||||
email.server.err.extracting_sent_date=Kunne ikke pakke ut 'sendt den'-datoen: {0}
|
||||
email.server.err.extracting_sent_date=Kunne ikke pakke ut ''sendt den''-datoen: {0}
|
||||
email.server.err.parse_message=Kunne ikke analysere e-postmeldingen: {0}
|
||||
email.server.err.usupported_encoding=Koding "{0}" st\u00f8ttes ikke
|
||||
email.server.err.usupported_encoding=Koding ''{0}'' st\u00f8ttes ikke
|
||||
email.server.err.failed_to_read_content_stream=Kunne ikke lese meldingsdelinnholdet: {0}
|
||||
email.server.err.incorrect_message_part=Feil meldingsdel: {0}
|
||||
|
||||
|
@@ -483,15 +483,26 @@ patch.swsdpPatch.missingSurfConfig=surf-config folder is not present in Sample:
|
||||
patch.redeployParallelActivitiWorkflows.description=Patch that redeploys both parallel activiti workflows, completion-condition now takes into account if minimum approval percentage can still be achived.
|
||||
|
||||
patch.show.audit.success=show_audit.ftl was updated successfully
|
||||
patch.show.audit.description=Patch to update references to show_audit.ftl after upgrade from 3.3.5 version to 3.4.* version
|
||||
|
||||
patch.increaseColumnSizeActiviti.description=ALF-14983 : Upgrade scripts to increase column sizes for Activiti
|
||||
patch.removeColumnActiviti.description=ALF-16038 : DB2: Upgrade script to remove ALFUSER.ACT_HI_ACTINST.OWNER_
|
||||
|
||||
patch.renameConstraintActiviti.description=ALF-15828 : DB2: Upgrade script to rename ACT_HI_PROCINST.PROC_INST_ID_ index
|
||||
patch.db-V4.1-drop-fk-indexes.description=Manually executed script upgrade V4.1: ALF-16286: DROP fk_alf_qname_ns on alf_qname table
|
||||
patch.db-V4.1-drop-activiti-feed-format.description=ALF-17493 : Remove alf_activity_feed.activity_format
|
||||
|
||||
patch.calendarAllDayEventDatesCorrectingPatch.description=This patch corrects 'to' and 'from' dates for Calendar 'All Day' Events from version 3.4 which did not take account of time zone offsets
|
||||
patch.calendarAllDayEventDatesCorrectingPatch.result=Successfully updated {0} Calendar 'All Day' Event(s)
|
||||
|
||||
patch.upgradeToActiviti5-10.description=Upgraded Activiti tables to 5.10 version
|
||||
patch.addActivtiIndexHistoricActivity.description=Additional index for activiti on historic activity (PROC_INST_ID_ and ACTIVITY_ID_)
|
||||
patch.upgradeToActiviti5-11.description=Upgraded Activiti tables to 5.11 version
|
||||
patch.redeployActivitiWorkflowsForCategory.description=Redeploy internal process definitions for category update
|
||||
|
||||
patch.db-V4.2-remove-index-acl_id.description=ALF-12284 : Update ALF_ACL_MEMBER_member table. Remove index acl_id.
|
||||
|
||||
patch.sharedFolder=Add shared folder
|
||||
patch.sharedFolder.description=Add Shared Folder
|
||||
patch.sharedFolder.success=Successfully added shared folder
|
||||
patch.sharedFolder.result.renamed=Renamed Shared Folder to: {0}
|
||||
|
||||
patch.db-V4.2-metadata-query-indexes.description=Add additional indexes to support transactional metadata query direct to the database.
|
||||
patch.upgradeToActiviti5-13.description=Upgraded Activiti tables to 5.13 version
|
@@ -483,15 +483,26 @@ patch.swsdpPatch.missingSurfConfig=surf-config folder is not present in Sample:
|
||||
patch.redeployParallelActivitiWorkflows.description=Patch that redeploys both parallel activiti workflows, completion-condition now takes into account if minimum approval percentage can still be achived.
|
||||
|
||||
patch.show.audit.success=show_audit.ftl was updated successfully
|
||||
patch.show.audit.description=Patch to update references to show_audit.ftl after upgrade from 3.3.5 version to 3.4.* version
|
||||
|
||||
patch.increaseColumnSizeActiviti.description=ALF-14983 : Upgrade scripts to increase column sizes for Activiti
|
||||
patch.removeColumnActiviti.description=ALF-16038 : DB2: Upgrade script to remove ALFUSER.ACT_HI_ACTINST.OWNER_
|
||||
|
||||
patch.renameConstraintActiviti.description=ALF-15828 : DB2: Upgrade script to rename ACT_HI_PROCINST.PROC_INST_ID_ index
|
||||
patch.db-V4.1-drop-fk-indexes.description=Manually executed script upgrade V4.1: ALF-16286: DROP fk_alf_qname_ns on alf_qname table
|
||||
patch.db-V4.1-drop-activiti-feed-format.description=ALF-17493 : Remove alf_activity_feed.activity_format
|
||||
|
||||
patch.calendarAllDayEventDatesCorrectingPatch.description=This patch corrects 'to' and 'from' dates for Calendar 'All Day' Events from version 3.4 which did not take account of time zone offsets
|
||||
patch.calendarAllDayEventDatesCorrectingPatch.result=Successfully updated {0} Calendar 'All Day' Event(s)
|
||||
|
||||
patch.upgradeToActiviti5-10.description=Upgraded Activiti tables to 5.10 version
|
||||
patch.addActivtiIndexHistoricActivity.description=Additional index for activiti on historic activity (PROC_INST_ID_ and ACTIVITY_ID_)
|
||||
patch.upgradeToActiviti5-11.description=Upgraded Activiti tables to 5.11 version
|
||||
patch.redeployActivitiWorkflowsForCategory.description=Redeploy internal process definitions for category update
|
||||
|
||||
patch.db-V4.2-remove-index-acl_id.description=ALF-12284 : Update ALF_ACL_MEMBER_member table. Remove index acl_id.
|
||||
|
||||
patch.sharedFolder=Add shared folder
|
||||
patch.sharedFolder.description=Add Shared Folder
|
||||
patch.sharedFolder.success=Successfully added shared folder
|
||||
patch.sharedFolder.result.renamed=Renamed Shared Folder to: {0}
|
||||
|
||||
patch.db-V4.2-metadata-query-indexes.description=Add additional indexes to support transactional metadata query direct to the database.
|
||||
patch.upgradeToActiviti5-13.description=Upgraded Activiti tables to 5.13 version
|
@@ -488,6 +488,10 @@ patch.increaseColumnSizeActiviti.description=ALF-14983 : Upgrade scripts to incr
|
||||
patch.removeColumnActiviti.description=ALF-16038 : DB2: Upgrade script to remove ALFUSER.ACT_HI_ACTINST.OWNER_
|
||||
|
||||
patch.renameConstraintActiviti.description=ALF-15828 : DB2: Upgrade script to rename ACT_HI_PROCINST.PROC_INST_ID_ index
|
||||
|
||||
patch.calendarAllDayEventDatesCorrectingPatch.description=This patch corrects 'to' and 'from' dates for Calendar 'All Day' Events from version 3.4 which did not take account of time zone offsets
|
||||
patch.calendarAllDayEventDatesCorrectingPatch.result=Successfully updated {0} Calendar 'All Day' Event(s)
|
||||
|
||||
patch.upgradeToActiviti5-10.description=Upgraded Activiti tables to 5.10 version
|
||||
patch.addActivtiIndexHistoricActivity.description=Additional index for activiti on historic activity (PROC_INST_ID_ and ACTIVITY_ID_)
|
||||
patch.upgradeToActiviti5-11.description=Upgraded Activiti tables to 5.11 version
|
||||
|
@@ -483,15 +483,26 @@ patch.swsdpPatch.missingSurfConfig=surf-config folder is not present in Sample:
|
||||
patch.redeployParallelActivitiWorkflows.description=Patch that redeploys both parallel activiti workflows, completion-condition now takes into account if minimum approval percentage can still be achived.
|
||||
|
||||
patch.show.audit.success=show_audit.ftl was updated successfully
|
||||
patch.show.audit.description=Patch to update references to show_audit.ftl after upgrade from 3.3.5 version to 3.4.* version
|
||||
|
||||
patch.increaseColumnSizeActiviti.description=ALF-14983 : Upgrade scripts to increase column sizes for Activiti
|
||||
patch.removeColumnActiviti.description=ALF-16038 : DB2: Upgrade script to remove ALFUSER.ACT_HI_ACTINST.OWNER_
|
||||
|
||||
patch.renameConstraintActiviti.description=ALF-15828 : DB2: Upgrade script to rename ACT_HI_PROCINST.PROC_INST_ID_ index
|
||||
patch.db-V4.1-drop-fk-indexes.description=Manually executed script upgrade V4.1: ALF-16286: DROP fk_alf_qname_ns on alf_qname table
|
||||
patch.db-V4.1-drop-activiti-feed-format.description=ALF-17493 : Remove alf_activity_feed.activity_format
|
||||
|
||||
patch.calendarAllDayEventDatesCorrectingPatch.description=This patch corrects 'to' and 'from' dates for Calendar 'All Day' Events from version 3.4 which did not take account of time zone offsets
|
||||
patch.calendarAllDayEventDatesCorrectingPatch.result=Successfully updated {0} Calendar 'All Day' Event(s)
|
||||
|
||||
patch.upgradeToActiviti5-10.description=Upgraded Activiti tables to 5.10 version
|
||||
patch.addActivtiIndexHistoricActivity.description=Additional index for activiti on historic activity (PROC_INST_ID_ and ACTIVITY_ID_)
|
||||
patch.upgradeToActiviti5-11.description=Upgraded Activiti tables to 5.11 version
|
||||
patch.redeployActivitiWorkflowsForCategory.description=Redeploy internal process definitions for category update
|
||||
|
||||
patch.db-V4.2-remove-index-acl_id.description=ALF-12284 : Update ALF_ACL_MEMBER_member table. Remove index acl_id.
|
||||
|
||||
patch.sharedFolder=Add shared folder
|
||||
patch.sharedFolder.description=Add Shared Folder
|
||||
patch.sharedFolder.success=Successfully added shared folder
|
||||
patch.sharedFolder.result.renamed=Renamed Shared Folder to: {0}
|
||||
|
||||
patch.db-V4.2-metadata-query-indexes.description=Add additional indexes to support transactional metadata query direct to the database.
|
||||
patch.upgradeToActiviti5-13.description=Upgraded Activiti tables to 5.13 version
|
@@ -483,15 +483,26 @@ patch.swsdpPatch.missingSurfConfig=surf-config folder is not present in Sample:
|
||||
patch.redeployParallelActivitiWorkflows.description=Patch that redeploys both parallel activiti workflows, completion-condition now takes into account if minimum approval percentage can still be achived.
|
||||
|
||||
patch.show.audit.success=show_audit.ftl was updated successfully
|
||||
patch.show.audit.description=Patch to update references to show_audit.ftl after upgrade from 3.3.5 version to 3.4.* version
|
||||
|
||||
patch.increaseColumnSizeActiviti.description=ALF-14983 : Upgrade scripts to increase column sizes for Activiti
|
||||
patch.removeColumnActiviti.description=ALF-16038 : DB2: Upgrade script to remove ALFUSER.ACT_HI_ACTINST.OWNER_
|
||||
|
||||
patch.renameConstraintActiviti.description=ALF-15828 : DB2: Upgrade script to rename ACT_HI_PROCINST.PROC_INST_ID_ index
|
||||
patch.db-V4.1-drop-fk-indexes.description=Manually executed script upgrade V4.1: ALF-16286: DROP fk_alf_qname_ns on alf_qname table
|
||||
patch.db-V4.1-drop-activiti-feed-format.description=ALF-17493 : Remove alf_activity_feed.activity_format
|
||||
|
||||
patch.calendarAllDayEventDatesCorrectingPatch.description=This patch corrects 'to' and 'from' dates for Calendar 'All Day' Events from version 3.4 which did not take account of time zone offsets
|
||||
patch.calendarAllDayEventDatesCorrectingPatch.result=Successfully updated {0} Calendar 'All Day' Event(s)
|
||||
|
||||
patch.upgradeToActiviti5-10.description=Upgraded Activiti tables to 5.10 version
|
||||
patch.addActivtiIndexHistoricActivity.description=Additional index for activiti on historic activity (PROC_INST_ID_ and ACTIVITY_ID_)
|
||||
patch.upgradeToActiviti5-11.description=Upgraded Activiti tables to 5.11 version
|
||||
patch.redeployActivitiWorkflowsForCategory.description=Redeploy internal process definitions for category update
|
||||
|
||||
patch.db-V4.2-remove-index-acl_id.description=ALF-12284 : Update ALF_ACL_MEMBER_member table. Remove index acl_id.
|
||||
|
||||
patch.sharedFolder=Add shared folder
|
||||
patch.sharedFolder.description=Add Shared Folder
|
||||
patch.sharedFolder.success=Successfully added shared folder
|
||||
patch.sharedFolder.result.renamed=Renamed Shared Folder to: {0}
|
||||
|
||||
patch.db-V4.2-metadata-query-indexes.description=Add additional indexes to support transactional metadata query direct to the database.
|
||||
patch.upgradeToActiviti5-13.description=Upgraded Activiti tables to 5.13 version
|
@@ -483,15 +483,26 @@ patch.swsdpPatch.missingSurfConfig=surf-config folder is not present in Sample:
|
||||
patch.redeployParallelActivitiWorkflows.description=Patch that redeploys both parallel activiti workflows, completion-condition now takes into account if minimum approval percentage can still be achived.
|
||||
|
||||
patch.show.audit.success=show_audit.ftl was updated successfully
|
||||
patch.show.audit.description=Patch to update references to show_audit.ftl after upgrade from 3.3.5 version to 3.4.* version
|
||||
|
||||
patch.increaseColumnSizeActiviti.description=ALF-14983 : Upgrade scripts to increase column sizes for Activiti
|
||||
patch.removeColumnActiviti.description=ALF-16038 : DB2: Upgrade script to remove ALFUSER.ACT_HI_ACTINST.OWNER_
|
||||
|
||||
patch.renameConstraintActiviti.description=ALF-15828 : DB2: Upgrade script to rename ACT_HI_PROCINST.PROC_INST_ID_ index
|
||||
patch.db-V4.1-drop-fk-indexes.description=Manually executed script upgrade V4.1: ALF-16286: DROP fk_alf_qname_ns on alf_qname table
|
||||
patch.db-V4.1-drop-activiti-feed-format.description=ALF-17493 : Remove alf_activity_feed.activity_format
|
||||
|
||||
patch.calendarAllDayEventDatesCorrectingPatch.description=This patch corrects 'to' and 'from' dates for Calendar 'All Day' Events from version 3.4 which did not take account of time zone offsets
|
||||
patch.calendarAllDayEventDatesCorrectingPatch.result=Successfully updated {0} Calendar 'All Day' Event(s)
|
||||
|
||||
patch.upgradeToActiviti5-10.description=Upgraded Activiti tables to 5.10 version
|
||||
patch.addActivtiIndexHistoricActivity.description=Additional index for activiti on historic activity (PROC_INST_ID_ and ACTIVITY_ID_)
|
||||
patch.upgradeToActiviti5-11.description=Upgraded Activiti tables to 5.11 version
|
||||
patch.redeployActivitiWorkflowsForCategory.description=Redeploy internal process definitions for category update
|
||||
|
||||
patch.db-V4.2-remove-index-acl_id.description=ALF-12284 : Update ALF_ACL_MEMBER_member table. Remove index acl_id.
|
||||
|
||||
patch.sharedFolder=Add shared folder
|
||||
patch.sharedFolder.description=Add Shared Folder
|
||||
patch.sharedFolder.success=Successfully added shared folder
|
||||
patch.sharedFolder.result.renamed=Renamed Shared Folder to: {0}
|
||||
|
||||
patch.db-V4.2-metadata-query-indexes.description=Add additional indexes to support transactional metadata query direct to the database.
|
||||
patch.upgradeToActiviti5-13.description=Upgraded Activiti tables to 5.13 version
|
@@ -39,6 +39,8 @@ system.cluster.license.not_enabled=License does not permit clustering: clusterin
|
||||
system.cluster.disabled=Clustering is disabled: not starting.
|
||||
system.cluster.checker.notavailable=Unable to run cluster check tool as clustering is disabled.
|
||||
system.cluster.started=Cluster started, name: {0}
|
||||
system.cluster.member_joined=Member joined: {0}
|
||||
system.cluster.member_left=Member left: {0}
|
||||
system.cluster.curr_members=Current cluster members:
|
||||
system.cluster.member={0} (hostname: {1})
|
||||
system.cluster.shutdown=Clustering has shutdown.
|
||||
|
@@ -39,6 +39,8 @@ system.cluster.license.not_enabled=License does not permit clustering: clusterin
|
||||
system.cluster.disabled=Clustering is disabled: not starting.
|
||||
system.cluster.checker.notavailable=Unable to run cluster check tool as clustering is disabled.
|
||||
system.cluster.started=Cluster started, name: {0}
|
||||
system.cluster.member_joined=Member joined: {0}
|
||||
system.cluster.member_left=Member left: {0}
|
||||
system.cluster.curr_members=Current cluster members:
|
||||
system.cluster.member={0} (hostname: {1})
|
||||
system.cluster.shutdown=Clustering has shutdown.
|
||||
|
@@ -39,6 +39,8 @@ system.cluster.license.not_enabled=License does not permit clustering: clusterin
|
||||
system.cluster.disabled=Clustering is disabled: not starting.
|
||||
system.cluster.checker.notavailable=Unable to run cluster check tool as clustering is disabled.
|
||||
system.cluster.started=Cluster started, name: {0}
|
||||
system.cluster.member_joined=Member joined: {0}
|
||||
system.cluster.member_left=Member left: {0}
|
||||
system.cluster.curr_members=Current cluster members:
|
||||
system.cluster.member={0} (hostname: {1})
|
||||
system.cluster.shutdown=Clustering has shutdown.
|
||||
|
@@ -39,6 +39,8 @@ system.cluster.license.not_enabled=License does not permit clustering: clusterin
|
||||
system.cluster.disabled=Clustering is disabled: not starting.
|
||||
system.cluster.checker.notavailable=Unable to run cluster check tool as clustering is disabled.
|
||||
system.cluster.started=Cluster started, name: {0}
|
||||
system.cluster.member_joined=Member joined: {0}
|
||||
system.cluster.member_left=Member left: {0}
|
||||
system.cluster.curr_members=Current cluster members:
|
||||
system.cluster.member={0} (hostname: {1})
|
||||
system.cluster.shutdown=Clustering has shutdown.
|
||||
|
@@ -39,6 +39,8 @@ system.cluster.license.not_enabled=License does not permit clustering: clusterin
|
||||
system.cluster.disabled=Clustering is disabled: not starting.
|
||||
system.cluster.checker.notavailable=Unable to run cluster check tool as clustering is disabled.
|
||||
system.cluster.started=Cluster started, name: {0}
|
||||
system.cluster.member_joined=Member joined: {0}
|
||||
system.cluster.member_left=Member left: {0}
|
||||
system.cluster.curr_members=Current cluster members:
|
||||
system.cluster.member={0} (hostname: {1})
|
||||
system.cluster.shutdown=Clustering has shutdown.
|
||||
|
@@ -39,6 +39,8 @@ system.cluster.license.not_enabled=License does not permit clustering: clusterin
|
||||
system.cluster.disabled=Clustering is disabled: not starting.
|
||||
system.cluster.checker.notavailable=Unable to run cluster check tool as clustering is disabled.
|
||||
system.cluster.started=Cluster started, name: {0}
|
||||
system.cluster.member_joined=Member joined: {0}
|
||||
system.cluster.member_left=Member left: {0}
|
||||
system.cluster.curr_members=Current cluster members:
|
||||
system.cluster.member={0} (hostname: {1})
|
||||
system.cluster.shutdown=Clustering has shutdown.
|
||||
|
@@ -58,7 +58,22 @@
|
||||
|
||||
<bean id="permissionServiceImpl" class="org.alfresco.repo.security.permissions.impl.PermissionServiceImpl" init-method="init">
|
||||
<property name="nodeService">
|
||||
<ref bean="mtAwareNodeService" />
|
||||
<!-- ALF-19732: added lockableAspectInterceptor -->
|
||||
<bean class="org.springframework.aop.framework.ProxyFactoryBean" >
|
||||
<property name="targetName">
|
||||
<value>mtAwareNodeService</value>
|
||||
</property>
|
||||
<property name="proxyInterfaces">
|
||||
<list>
|
||||
<value>org.alfresco.service.cmr.repository.NodeService</value>
|
||||
</list>
|
||||
</property>
|
||||
<property name="interceptorNames">
|
||||
<list>
|
||||
<value>lockableAspectInterceptor</value>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
</property>
|
||||
<property name="tenantService">
|
||||
<ref bean="tenantService"/>
|
||||
|
@@ -202,11 +202,8 @@ activitiParallelReview.task.wf_submitParallelReviewTask.description=Richiedi l'a
|
||||
activitiParallelReview.property.wf_requiredApprovePercent.title=Percentuale di approvazioni richiesta
|
||||
activitiParallelReview.property.wf_requiredApprovePercent.description=Percentuale di esaminatori che devono approvare il contenuto per l'approvazione
|
||||
activitiParallelReview.task.wf_activitiReviewTask.title=Esamina
|
||||
activitiParallelReview.task.wf_activitiReviewTask.description=Esamina i documenti per approvarli o respingerli
|
||||
activitiParallelReview.task.wf_rejectedParallelTask.title=Documento respinto
|
||||
activitiParallelReview.task.wf_rejectedParallelTask.description=I documenti sono stati respinti
|
||||
activitiParallelReview.task.wf_approvedParallelTask.title=Documento approvato
|
||||
activitiParallelReview.task.wf_approvedParallelTask.description=I documenti sono stati approvati
|
||||
activitiParallelReview.property.wf_reviewerCount.title=Numero di esaminatori
|
||||
activitiParallelReview.property.wf_reviewerCount.description=Numero di esaminatori
|
||||
activitiParallelReview.property.wf_requiredPercent.title=Percentuale di approvazioni richiesta
|
||||
|
@@ -202,11 +202,8 @@ activitiParallelReview.task.wf_submitParallelReviewTask.description=1\u540d\u4ee
|
||||
activitiParallelReview.property.wf_requiredApprovePercent.title=\u8981\u6c42\u3055\u308c\u305f\u627f\u8a8d\u30d1\u30fc\u30bb\u30f3\u30c6\u30fc\u30b8
|
||||
activitiParallelReview.property.wf_requiredApprovePercent.description=\u627f\u8a8d\u3092\u53d7\u3051\u308b\u305f\u3081\u306b\u627f\u8a8d\u3059\u308b\u5fc5\u8981\u306e\u3042\u308b\u30ec\u30d3\u30e5\u30a2\u306e\u30d1\u30fc\u30bb\u30f3\u30c6\u30fc\u30b8
|
||||
activitiParallelReview.task.wf_activitiReviewTask.title=\u30ec\u30d3\u30e5\u30fc
|
||||
activitiParallelReview.task.wf_activitiReviewTask.description=\u6587\u66f8\u3092\u30ec\u30d3\u30e5\u30fc\u3057\u305d\u308c\u3092\u627f\u8a8d\u307e\u305f\u306f\u5374\u4e0b\u3059\u308b
|
||||
activitiParallelReview.task.wf_rejectedParallelTask.title=\u6587\u66f8\u5374\u4e0b
|
||||
activitiParallelReview.task.wf_rejectedParallelTask.description=\u6587\u66f8\u306f\u5374\u4e0b\u3055\u308c\u307e\u3057\u305f
|
||||
activitiParallelReview.task.wf_approvedParallelTask.title=\u6587\u66f8\u627f\u8a8d
|
||||
activitiParallelReview.task.wf_approvedParallelTask.description=\u6587\u66f8\u306f\u627f\u8a8d\u3055\u308c\u307e\u3057\u305f
|
||||
activitiParallelReview.property.wf_reviewerCount.title=\u30ec\u30d3\u30e5\u30a2\u306e\u6570
|
||||
activitiParallelReview.property.wf_reviewerCount.description=\u30ec\u30d3\u30e5\u30a2\u306e\u6570
|
||||
activitiParallelReview.property.wf_requiredPercent.title=\u8981\u6c42\u3055\u308c\u305f\u627f\u8a8d\u30d1\u30fc\u30bb\u30f3\u30c6\u30fc\u30b8
|
||||
|
@@ -202,11 +202,8 @@ activitiParallelReview.task.wf_submitParallelReviewTask.description=Be om dokume
|
||||
activitiParallelReview.property.wf_requiredApprovePercent.title=Obligatorisk godkjenningsprosent
|
||||
activitiParallelReview.property.wf_requiredApprovePercent.description=Prosent av korrekturlesere som m\u00e5 godkjenne for godkjennelse
|
||||
activitiParallelReview.task.wf_activitiReviewTask.title=Gjennomg\u00e5
|
||||
activitiParallelReview.task.wf_activitiReviewTask.description=Gjennomg\u00e5 dokumenter for \u00e5 godkjenne eller avvise dem
|
||||
activitiParallelReview.task.wf_rejectedParallelTask.title=Dokument avvist
|
||||
activitiParallelReview.task.wf_rejectedParallelTask.description=Dokument(er) ble avvist
|
||||
activitiParallelReview.task.wf_approvedParallelTask.title=Dokument godkjent
|
||||
activitiParallelReview.task.wf_approvedParallelTask.description=Dokument(er) ble godkjent
|
||||
activitiParallelReview.property.wf_reviewerCount.title=Antall korrekturlesere
|
||||
activitiParallelReview.property.wf_reviewerCount.description=Antall korrekturlesere
|
||||
activitiParallelReview.property.wf_requiredPercent.title=Obligatorisk godkjenningsprosent
|
||||
|
@@ -202,11 +202,8 @@ activitiParallelReview.task.wf_submitParallelReviewTask.description=Een of meer
|
||||
activitiParallelReview.property.wf_requiredApprovePercent.title=Vereist goedkeuringspercentage
|
||||
activitiParallelReview.property.wf_requiredApprovePercent.description=Percentage van revisoren die hun goedkeuring moeten geven
|
||||
activitiParallelReview.task.wf_activitiReviewTask.title=Reviseren
|
||||
activitiParallelReview.task.wf_activitiReviewTask.description=Documenten reviseren om ze goed te keuren of af te wijzen
|
||||
activitiParallelReview.task.wf_rejectedParallelTask.title=Document afgewezen
|
||||
activitiParallelReview.task.wf_rejectedParallelTask.description=Een of meer documenten zijn afgewezen
|
||||
activitiParallelReview.task.wf_approvedParallelTask.title=Document goedgekeurd
|
||||
activitiParallelReview.task.wf_approvedParallelTask.description=Een of meer documenten zijn goedgekeurd
|
||||
activitiParallelReview.property.wf_reviewerCount.title=Aantal revisoren
|
||||
activitiParallelReview.property.wf_reviewerCount.description=Aantal revisoren
|
||||
activitiParallelReview.property.wf_requiredPercent.title=Vereist goedkeuringspercentage
|
||||
|
@@ -202,11 +202,8 @@ activitiParallelReview.task.wf_submitParallelReviewTask.description=\u0417\u0430
|
||||
activitiParallelReview.property.wf_requiredApprovePercent.title=\u0422\u0440\u0435\u0431\u0443\u0435\u043c\u044b\u0439 \u043f\u0440\u043e\u0446\u0435\u043d\u0442 \u0443\u0442\u0432\u0435\u0440\u0436\u0434\u0435\u043d\u0438\u044f
|
||||
activitiParallelReview.property.wf_requiredApprovePercent.description=\u041f\u0440\u043e\u0446\u0435\u043d\u0442 \u0440\u0435\u0446\u0435\u043d\u0437\u0435\u043d\u0442\u043e\u0432, \u043a\u043e\u0442\u043e\u0440\u044b\u0435 \u0434\u043e\u043b\u0436\u043d\u044b \u0443\u0442\u0432\u0435\u0440\u0434\u0438\u0442\u044c \u0434\u043e\u043a\u0443\u043c\u0435\u043d\u0442 \u0434\u043b\u044f \u0435\u0433\u043e \u043f\u0440\u0438\u043d\u044f\u0442\u0438\u044f
|
||||
activitiParallelReview.task.wf_activitiReviewTask.title=\u041f\u0440\u043e\u0432\u0435\u0440\u0438\u0442\u044c
|
||||
activitiParallelReview.task.wf_activitiReviewTask.description=\u041f\u0440\u043e\u0432\u0435\u0440\u0438\u0442\u044c \u0434\u043e\u043a\u0443\u043c\u0435\u043d\u0442\u044b \u0441 \u0446\u0435\u043b\u044c\u044e \u0443\u0442\u0432\u0435\u0440\u0436\u0434\u0435\u043d\u0438\u044f \u0438\u043b\u0438 \u043e\u0442\u043a\u043b\u043e\u043d\u0435\u043d\u0438\u044f
|
||||
activitiParallelReview.task.wf_rejectedParallelTask.title=\u0414\u043e\u043a\u0443\u043c\u0435\u043d\u0442 \u043e\u0442\u043a\u043b\u043e\u043d\u0435\u043d
|
||||
activitiParallelReview.task.wf_rejectedParallelTask.description=\u0414\u043e\u043a\u0443\u043c\u0435\u043d\u0442\u044b \u0431\u044b\u043b\u0438 \u043e\u0442\u043a\u043b\u043e\u043d\u0435\u043d\u044b
|
||||
activitiParallelReview.task.wf_approvedParallelTask.title=\u0414\u043e\u043a\u0443\u043c\u0435\u043d\u0442 \u0443\u0442\u0432\u0435\u0440\u0436\u0434\u0435\u043d
|
||||
activitiParallelReview.task.wf_approvedParallelTask.description=\u0414\u043e\u043a\u0443\u043c\u0435\u043d\u0442\u044b \u0431\u044b\u043b\u0438 \u0443\u0442\u0432\u0435\u0440\u0436\u0434\u0435\u043d\u044b
|
||||
activitiParallelReview.property.wf_reviewerCount.title=\u0427\u0438\u0441\u043b\u043e \u0440\u0435\u0446\u0435\u043d\u0437\u0435\u043d\u0442\u043e\u0432
|
||||
activitiParallelReview.property.wf_reviewerCount.description=\u0427\u0438\u0441\u043b\u043e \u0440\u0435\u0446\u0435\u043d\u0437\u0435\u043d\u0442\u043e\u0432
|
||||
activitiParallelReview.property.wf_requiredPercent.title=\u0422\u0440\u0435\u0431\u0443\u0435\u043c\u044b\u0439 \u043f\u0440\u043e\u0446\u0435\u043d\u0442 \u0443\u0442\u0432\u0435\u0440\u0436\u0434\u0435\u043d\u0438\u044f
|
||||
|
@@ -202,11 +202,8 @@ activitiParallelReview.task.wf_submitParallelReviewTask.description=\u4e00\u4f4d
|
||||
activitiParallelReview.property.wf_requiredApprovePercent.title=\u6240\u9700\u6279\u51c6\u767e\u5206\u6bd4
|
||||
activitiParallelReview.property.wf_requiredApprovePercent.description=\u8fbe\u5230\u6279\u51c6\u6761\u4ef6\u6240\u9700\u6279\u51c6\u590d\u67e5\u8005\u6240\u5360\u767e\u5206\u6bd4
|
||||
activitiParallelReview.task.wf_activitiReviewTask.title=\u590d\u67e5
|
||||
activitiParallelReview.task.wf_activitiReviewTask.description=\u590d\u67e5\u6587\u6863\u4ee5\u6279\u51c6\u6216\u62d2\u7edd
|
||||
activitiParallelReview.task.wf_rejectedParallelTask.title=\u6587\u6863\u88ab\u62d2\u7edd
|
||||
activitiParallelReview.task.wf_rejectedParallelTask.description=\u6587\u6863\u88ab\u62d2\u7edd
|
||||
activitiParallelReview.task.wf_approvedParallelTask.title=\u5df2\u6279\u51c6\u6587\u6863
|
||||
activitiParallelReview.task.wf_approvedParallelTask.description=\u5df2\u6279\u51c6\u6587\u6863
|
||||
activitiParallelReview.property.wf_reviewerCount.title=\u590d\u67e5\u8005\u6570
|
||||
activitiParallelReview.property.wf_reviewerCount.description=\u590d\u67e5\u8005\u6570
|
||||
activitiParallelReview.property.wf_requiredPercent.title=\u6240\u9700\u6279\u51c6\u767e\u5206\u6bd4
|
||||
|
@@ -29,8 +29,34 @@ import java.io.Serializable;
|
||||
*/
|
||||
public interface CacheFactory<K extends Serializable, V>
|
||||
{
|
||||
/**
|
||||
* Creates a fully distributed cache (when clustering is enabled and active).
|
||||
*
|
||||
* @param cacheName
|
||||
* @return SimpleCache
|
||||
*/
|
||||
SimpleCache<K, V> createCache(String cacheName);
|
||||
|
||||
/**
|
||||
* Creates a "local" cache, i.e. one that exists only on the JVM that created it. It does not
|
||||
* attempt to replicate any data, send invalidation messages etc. and is intended for use
|
||||
* only where the data does not need to be shared and where remote operations should be avoided for
|
||||
* performance reasons.
|
||||
*
|
||||
* @param cacheName
|
||||
* @return SimpleCache
|
||||
*/
|
||||
SimpleCache<K, V> createLocalCache(String cacheName);
|
||||
SimpleCache<K, V> createInvalidatingCache(String cacheName);
|
||||
|
||||
/**
|
||||
* <p>Creates a local (as opposed to fully-distributed) cache that broadcasts invalidation messages
|
||||
* to its peers on other cluster members upon cache item removal. Its peers then remove the
|
||||
* corresponding cache item.</p>
|
||||
*
|
||||
* <p>This cache does <strong>not</strong> replicate put(k,v) invocations.</p>
|
||||
*
|
||||
* @param cacheName
|
||||
* @return SimpleCache
|
||||
*/
|
||||
SimpleCache<K, V> createInvalidateRemovalCache(String cacheName);
|
||||
}
|
||||
|
@@ -61,8 +61,7 @@ public class DefaultCacheFactory<K extends Serializable, V> implements CacheFact
|
||||
return cache;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SimpleCache<K, V> createInvalidatingCache(String cacheName)
|
||||
private SimpleCache<K, V> createInvalidatingCache(String cacheName)
|
||||
{
|
||||
return createLocalCache(cacheName);
|
||||
}
|
||||
|
@@ -69,7 +69,7 @@ import org.alfresco.service.ServiceRegistry;
|
||||
import org.alfresco.service.cmr.action.Action;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.lock.NodeLockedException;
|
||||
import org.alfresco.service.cmr.lock.LockStatus;
|
||||
import org.alfresco.service.cmr.model.FileFolderService;
|
||||
import org.alfresco.service.cmr.model.FileInfo;
|
||||
import org.alfresco.service.cmr.model.FileNotFoundException;
|
||||
@@ -1246,11 +1246,8 @@ public class ScriptNode implements Scopeable, NamespacePrefixResolverProvider
|
||||
|
||||
if (getAspectsSet().contains(ContentModel.ASPECT_LOCKABLE))
|
||||
{
|
||||
try
|
||||
{
|
||||
this.services.getLockService().checkForLock(this.nodeRef);
|
||||
}
|
||||
catch (NodeLockedException ex)
|
||||
LockStatus status = this.services.getLockService().getLockStatus(this.nodeRef);
|
||||
if (status == LockStatus.LOCKED || status == LockStatus.LOCK_OWNER)
|
||||
{
|
||||
locked = true;
|
||||
}
|
||||
|
@@ -60,7 +60,7 @@ public class IndexTransactionTracker extends AbstractReindexComponent
|
||||
private boolean forceReindex;
|
||||
|
||||
private long fromTxnId;
|
||||
private String statusMsg;
|
||||
private volatile String statusMsg;
|
||||
private static final String NO_REINDEX = "No reindex in progress";
|
||||
|
||||
/**
|
||||
|
@@ -1603,6 +1603,8 @@ public class IndexInfo implements IndexMonitor
|
||||
while (shouldBlock())
|
||||
{
|
||||
synchronized (mergerTargetLock)
|
||||
{
|
||||
if(shouldBlock())
|
||||
{
|
||||
if (s_logger.isDebugEnabled())
|
||||
{
|
||||
@@ -1611,12 +1613,13 @@ public class IndexInfo implements IndexMonitor
|
||||
releaseWriteLock();
|
||||
try
|
||||
{
|
||||
mergerTargetLock.wait();
|
||||
mergerTargetLock.wait(60000);
|
||||
}
|
||||
catch (InterruptedException e)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
getWriteLock();
|
||||
}
|
||||
thisThreadPreparing.set(this);
|
||||
@@ -2572,11 +2575,14 @@ public class IndexInfo implements IndexMonitor
|
||||
if (!shouldBlock())
|
||||
{
|
||||
synchronized (mergerTargetLock)
|
||||
{
|
||||
if (!shouldBlock())
|
||||
{
|
||||
mergerTargetLock.notifyAll();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void writeStatusToFile(FileChannel channel) throws IOException
|
||||
{
|
||||
|
@@ -386,7 +386,7 @@ public class FTPServerTest extends TestCase
|
||||
|
||||
// Update the file contents
|
||||
String FILE1_CONTENT_2="That's how it is says Pooh!";
|
||||
ftp.appendFile(FILE1_NAME , new ByteArrayInputStream(FILE1_CONTENT_2.getBytes("UTF-8")));
|
||||
ftp.storeFile(FILE1_NAME , new ByteArrayInputStream(FILE1_CONTENT_2.getBytes("UTF-8")));
|
||||
|
||||
InputStream is2 = ftp.retrieveFileStream(FILE1_NAME);
|
||||
|
||||
@@ -551,7 +551,7 @@ public class FTPServerTest extends TestCase
|
||||
|
||||
// Update the file as user two
|
||||
String FILE1_CONTENT_2="test file content updated";
|
||||
success = ftpTwo.appendFile(FILE1_NAME , new ByteArrayInputStream(FILE1_CONTENT_2.getBytes("UTF-8")));
|
||||
success = ftpTwo.storeFile(FILE1_NAME , new ByteArrayInputStream(FILE1_CONTENT_2.getBytes("UTF-8")));
|
||||
assertTrue("user two unable to append file", success);
|
||||
|
||||
// User one should read user2's content
|
||||
@@ -738,7 +738,7 @@ public class FTPServerTest extends TestCase
|
||||
|
||||
// Update the file contents without setting time directly
|
||||
String FILE1_CONTENT_2="That's how it is says Pooh!";
|
||||
ftp.appendFile(FILE1_NAME , new ByteArrayInputStream(FILE1_CONTENT_2.getBytes("UTF-8")));
|
||||
ftp.storeFile(FILE1_NAME , new ByteArrayInputStream(FILE1_CONTENT_2.getBytes("UTF-8")));
|
||||
|
||||
InputStream is2 = ftp.retrieveFileStream(FILE1_NAME);
|
||||
|
||||
|
@@ -64,7 +64,7 @@ public class DefaultCacheFactoryTest
|
||||
@Test
|
||||
public void canCreateInvalidatingCache()
|
||||
{
|
||||
cache = (DefaultSimpleCache<String, String>) cacheFactory.createInvalidatingCache("cache.someCache");
|
||||
cache = (DefaultSimpleCache<String, String>) cacheFactory.createInvalidateRemovalCache("cache.someCache");
|
||||
assertEquals(4, cache.getMaxItems());
|
||||
assertEquals("cache.someCache", cache.getCacheName());
|
||||
}
|
||||
|
@@ -28,6 +28,7 @@ import junit.framework.TestCase;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.lock.mem.Lifetime;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationComponent;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.MutableAuthenticationDao;
|
||||
@@ -255,6 +256,68 @@ public class LockOwnerDynamicAuthorityTest extends TestCase
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* ALF-19732: Share does not render Cancel Checkout action for items being edited online
|
||||
*/
|
||||
public void testPermissionWithEphemeralLockAspect()
|
||||
{
|
||||
permissionService.setPermission(rootNodeRef, "andy", PermissionService.ALL_PERMISSIONS, true);
|
||||
permissionService.setPermission(rootNodeRef, "lemur", PermissionService.CHECK_OUT, true);
|
||||
permissionService.setPermission(rootNodeRef, "lemur", PermissionService.WRITE, true);
|
||||
permissionService.setPermission(rootNodeRef, "lemur", PermissionService.READ, true);
|
||||
permissionService.setPermission(rootNodeRef, "frog", PermissionService.CHECK_OUT, true);
|
||||
permissionService.setPermission(rootNodeRef, "frog", PermissionService.WRITE, true);
|
||||
permissionService.setPermission(rootNodeRef, "frog", PermissionService.READ, true);
|
||||
authenticationService.authenticate("andy", "andy".toCharArray());
|
||||
NodeRef testNode = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, ContentModel.TYPE_PERSON,
|
||||
ContentModel.TYPE_CMOBJECT, null).getChildRef();
|
||||
lockService.lock(testNode, LockType.READ_ONLY_LOCK, 3600, Lifetime.EPHEMERAL);
|
||||
|
||||
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(testNode,
|
||||
PermissionService.LOCK));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(testNode,
|
||||
PermissionService.UNLOCK));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(testNode, PermissionService.CHECK_OUT));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(testNode, PermissionService.CHECK_IN));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(testNode, PermissionService.CANCEL_CHECK_OUT));
|
||||
|
||||
authenticationService.authenticate("lemur", "lemur".toCharArray());
|
||||
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(testNode,
|
||||
PermissionService.LOCK));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(testNode,
|
||||
PermissionService.UNLOCK));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(testNode, PermissionService.CHECK_OUT));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(testNode, PermissionService.CHECK_IN));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(testNode, PermissionService.CANCEL_CHECK_OUT));
|
||||
|
||||
authenticationService.authenticate("andy", "andy".toCharArray());
|
||||
lockService.unlock(testNode);
|
||||
authenticationService.authenticate("lemur", "lemur".toCharArray());
|
||||
lockService.lock(testNode, LockType.READ_ONLY_LOCK, 3600, Lifetime.EPHEMERAL);
|
||||
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(testNode,
|
||||
PermissionService.LOCK));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(testNode,
|
||||
PermissionService.UNLOCK));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(testNode, PermissionService.CHECK_OUT));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(testNode, PermissionService.CHECK_IN));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(testNode, PermissionService.CANCEL_CHECK_OUT));
|
||||
|
||||
|
||||
authenticationService.authenticate("frog", "frog".toCharArray());
|
||||
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(testNode,
|
||||
PermissionService.LOCK));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(testNode,
|
||||
PermissionService.UNLOCK));
|
||||
assertEquals(AccessStatus.ALLOWED, permissionService.hasPermission(testNode, PermissionService.CHECK_OUT));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(testNode, PermissionService.CHECK_IN));
|
||||
assertEquals(AccessStatus.DENIED, permissionService.hasPermission(testNode, PermissionService.CANCEL_CHECK_OUT));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
Reference in New Issue
Block a user