mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-07-31 17:39:05 +00:00
Merged V2.2 to HEAD
11106: Leniency in AVM CAL upgrade to avoid customer upgrade issues ACT-4500 11129: ETWOTWO-460 Service Port for alfresco runtime 11144: Proper clean-up of deleted node's properties and aspects 11146: Modifications to enable 'alternatives' when running patches 11153: Fixed masked NPE when checking 'alternative' patches 11154: Some neat reporting when a patch doesn't execute as a result of an alternative patch having run 11161: ETWOTWO-91 11163: ETWOTWO-733: CheckOutCheckInService.getWorkingCopy(NodeRef) returns nodes ... 11165: Merged V2.1 to V2.2 10983: Close stream for audit config 11192: ETWOTWO-169 - Editing FSR Deployment Receiver causes password to be lost 11210: Merge of refactored AVM Filesystem storeCreated Processing from 2.1 git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@11227 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
@@ -52,3 +52,21 @@
|
||||
#
|
||||
#db.driver=org.postgresql.Driver
|
||||
#db.url=jdbc:postgresql://localhost:5432/alfresco
|
||||
|
||||
# The well known RMI registry port is defined in the alfresco-shared.properties file
|
||||
# alfresco.rmi.services.port=50500
|
||||
#
|
||||
# RMI service ports for the individual services.
|
||||
# These six services are available remotely.
|
||||
#
|
||||
# Assign individual ports for each service for best performance
|
||||
# or run several services on the same port. You can even run everything on 50500 if needed.
|
||||
#
|
||||
# Select 0 to use a random unused port.
|
||||
#
|
||||
#avm.rmi.service.port=50501
|
||||
#avmsync.rmi.service.port=50502
|
||||
#attribute.rmi.service.port=50503
|
||||
#authentication.rmi.service.port=50504
|
||||
#repo.rmi.service.port=50505
|
||||
#action.rmi.service.port=50506
|
||||
|
@@ -1,4 +1,5 @@
|
||||
# PatchService messages
|
||||
patch.service.preceeded_by_alternative=Preceeded by alternative patch ''{0}''.
|
||||
patch.service.not_relevant=Not relevant to schema {0}
|
||||
patch.executer.checking=Checking for patches to apply ...
|
||||
patch.service.applying_patch=\tApplying patch ''{0}'' ({1}).
|
||||
@@ -19,6 +20,9 @@ patch.general.property_not_set=Patch property ''{0}'' has not been set on this p
|
||||
|
||||
# Individual patch messages
|
||||
|
||||
patch.noOpPatch.description=A placeholder patch; usually marks a superceded patch.
|
||||
patch.noOpPatch.result=No-op patch
|
||||
|
||||
patch.marker.description=Marker patch to record installations and upgrades
|
||||
patch.marker.result=Marker patch applied
|
||||
|
||||
|
@@ -3,6 +3,8 @@
|
||||
"http://www.springframework.org/dtd/spring-beans.dtd">
|
||||
|
||||
<beans>
|
||||
|
||||
<!-- Alfresco Remote Services Definition -->
|
||||
<bean id="avmRemoteTransport" class="org.alfresco.repo.avm.AVMRemoteTransportService"
|
||||
init-method="init" destroy-method="shutDown">
|
||||
<property name="idleTimeout">
|
||||
@@ -31,6 +33,9 @@
|
||||
<property name="registryPort">
|
||||
<value>${alfresco.rmi.services.port}</value>
|
||||
</property>
|
||||
<property name="servicePort">
|
||||
<value>${avm.rmi.service.port}</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<bean id="avmSyncServiceTransport" class="org.alfresco.repo.avm.AVMSyncServiceTransportImpl">
|
||||
@@ -55,6 +60,9 @@
|
||||
<property name="registryPort">
|
||||
<value>${alfresco.rmi.services.port}</value>
|
||||
</property>
|
||||
<property name="servicePort">
|
||||
<value>${avmsync.rmi.service.port}</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- Remote export of AttributeService -->
|
||||
@@ -80,6 +88,9 @@
|
||||
<property name="registryPort">
|
||||
<value>${alfresco.rmi.services.port}</value>
|
||||
</property>
|
||||
<property name="servicePort">
|
||||
<value>${attribute.rmi.service.port}</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- The AuthenticationService exported as an RMI service. -->
|
||||
@@ -96,6 +107,9 @@
|
||||
<property name="registryPort">
|
||||
<value>${alfresco.rmi.services.port}</value>
|
||||
</property>
|
||||
<property name="servicePort">
|
||||
<value>${authentication.rmi.service.port}</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- A Simple Filesystem like API for the repo implementation.
|
||||
@@ -163,6 +177,9 @@
|
||||
<property name="registryPort">
|
||||
<value>${alfresco.rmi.services.port}</value>
|
||||
</property>
|
||||
<property name="servicePort">
|
||||
<value>${repo.rmi.service.port}</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- Remoting the ActionService -->
|
||||
@@ -189,5 +206,8 @@
|
||||
<property name="registryPort">
|
||||
<value>${alfresco.rmi.services.port}</value>
|
||||
</property>
|
||||
<property name="servicePort">
|
||||
<value>${action.rmi.service.port}</value>
|
||||
</property>
|
||||
</bean>
|
||||
</beans>
|
@@ -231,4 +231,23 @@ avm.remote.idlestream.timeout=30000
|
||||
system.usages.enabled=true
|
||||
|
||||
# Repository endpoint - used by Activity Service
|
||||
repo.remote.endpoint.url=http://localhost:8080/alfresco/service
|
||||
repo.remote.endpoint.url=http://localhost:8080/alfresco/service
|
||||
|
||||
# The well known RMI registry port is defined in the alfresco-shared.properties file
|
||||
# alfresco.rmi.services.port=50500
|
||||
#
|
||||
# RMI service ports for the individual services.
|
||||
# These six services are available remotely.
|
||||
#
|
||||
# Assign individual ports for each service for best performance
|
||||
# or run several services on the same port, you can even run everything on 50500 if
|
||||
# running through a firewall.
|
||||
#
|
||||
# Specify 0 to use a random unused port.
|
||||
#
|
||||
avm.rmi.service.port=50501
|
||||
avmsync.rmi.service.port=50502
|
||||
attribute.rmi.service.port=50503
|
||||
authentication.rmi.service.port=50504
|
||||
repo.rmi.service.port=50505
|
||||
action.rmi.service.port=50506
|
||||
|
@@ -31,6 +31,7 @@ import org.alfresco.jlan.server.filesys.DiskInterface;
|
||||
import org.alfresco.jlan.server.filesys.FileName;
|
||||
import org.alfresco.jlan.server.filesys.FileSystem;
|
||||
import org.alfresco.jlan.server.filesys.NotifyChange;
|
||||
import org.alfresco.jlan.util.StringList;
|
||||
import org.alfresco.repo.avm.CreateStoreCallback;
|
||||
import org.alfresco.repo.avm.CreateVersionCallback;
|
||||
import org.alfresco.repo.avm.PurgeStoreCallback;
|
||||
@@ -86,6 +87,11 @@ public class AVMContext extends AlfrescoContext
|
||||
|
||||
private int m_showOptions;
|
||||
|
||||
// List of newly created store names that need adding into the virtualization view
|
||||
|
||||
private StringList m_newStores;
|
||||
private Object m_newStoresLock;
|
||||
|
||||
/**
|
||||
* Class constructor
|
||||
*
|
||||
@@ -127,6 +133,9 @@ public class AVMContext extends AlfrescoContext
|
||||
|
||||
m_virtualView = true;
|
||||
m_showOptions = showOptions;
|
||||
|
||||
m_newStoresLock = new Object();
|
||||
m_newStores = new StringList();
|
||||
|
||||
// Save the associated filesystem driver
|
||||
|
||||
@@ -172,6 +181,34 @@ public class AVMContext extends AlfrescoContext
|
||||
{
|
||||
return m_virtualView;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if there are any new stores queued for adding to the virtualization view
|
||||
*
|
||||
* @return boolean
|
||||
*/
|
||||
protected final boolean hasNewStoresQueued() {
|
||||
if ( m_newStores == null || m_newStores.numberOfStrings() == 0)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the new stores queue, and reset the current queue
|
||||
*
|
||||
* @return StringList
|
||||
*/
|
||||
protected StringList getNewStoresQueue() {
|
||||
|
||||
StringList storesQueue = null;
|
||||
|
||||
synchronized ( m_newStoresLock) {
|
||||
storesQueue = m_newStores;
|
||||
m_newStores = new StringList();
|
||||
}
|
||||
|
||||
return storesQueue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if normal stores should be shown in the virtualization view
|
||||
@@ -285,6 +322,11 @@ public class AVMContext extends AlfrescoContext
|
||||
*/
|
||||
public void storeCreated(String storeName)
|
||||
{
|
||||
// Not interested if the virtualization view is not enabled
|
||||
|
||||
if ( isVirtualizationView() == false)
|
||||
return;
|
||||
|
||||
// Make sure the file state cache is enabled
|
||||
|
||||
FileStateTable fsTable = getStateTable();
|
||||
@@ -297,10 +339,20 @@ public class AVMContext extends AlfrescoContext
|
||||
|
||||
if ( rootState != null)
|
||||
{
|
||||
// Delete the root folder file state and recreate it
|
||||
// DEBUG
|
||||
|
||||
fsTable.removeFileState( FileName.DOS_SEPERATOR_STR);
|
||||
// m_avmDriver.findPseudoState( new AVMPath( ""), this);
|
||||
if ( logger.isDebugEnabled())
|
||||
logger.debug("Queueing new store " + storeName + " for addition to virtualization view");
|
||||
|
||||
// Add the new store name to the list to be picked up by the next file server access
|
||||
// to the filesystem
|
||||
|
||||
synchronized ( m_newStoresLock) {
|
||||
|
||||
// Add the new store name
|
||||
|
||||
m_newStores.addString( storeName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -311,6 +363,11 @@ public class AVMContext extends AlfrescoContext
|
||||
*/
|
||||
public void storePurged(String storeName)
|
||||
{
|
||||
// Not interested if the virtualization view is not enabled
|
||||
|
||||
if ( isVirtualizationView() == false)
|
||||
return;
|
||||
|
||||
// Make sure the file state cache is enabled
|
||||
|
||||
FileStateTable fsTable = getStateTable();
|
||||
@@ -367,6 +424,11 @@ public class AVMContext extends AlfrescoContext
|
||||
*/
|
||||
public void versionCreated(String storeName, int versionID)
|
||||
{
|
||||
// Not interested if the virtualization view is not enabled
|
||||
|
||||
if ( isVirtualizationView() == false)
|
||||
return;
|
||||
|
||||
// Make sure the file state cache is enabled
|
||||
|
||||
FileStateTable fsTable = getStateTable();
|
||||
@@ -432,6 +494,11 @@ public class AVMContext extends AlfrescoContext
|
||||
*/
|
||||
public void versionPurged(String storeName, int versionID)
|
||||
{
|
||||
// Not interested if the virtualization view is not enabled
|
||||
|
||||
if ( isVirtualizationView() == false)
|
||||
return;
|
||||
|
||||
// Make sure the file state cache is enabled
|
||||
|
||||
FileStateTable fsTable = getStateTable();
|
||||
|
@@ -2218,6 +2218,32 @@ public class AVMDiskDriver extends AlfrescoDiskDriver implements DiskInterface
|
||||
if ( avmPath.isPseudoPath() == false)
|
||||
return null;
|
||||
|
||||
// Check if there are any new stores to be added to the virtualization view
|
||||
|
||||
if ( avmCtx.hasNewStoresQueued()) {
|
||||
|
||||
// Get the new stores list, there is a chance another thread might get the queue, if the queue is empty
|
||||
// another thread is processing it
|
||||
|
||||
StringList storeNames = avmCtx.getNewStoresQueue();
|
||||
|
||||
while ( storeNames.numberOfStrings() > 0) {
|
||||
|
||||
// Get the current store name
|
||||
|
||||
String curStoreName = storeNames.removeStringAt( 0);
|
||||
|
||||
// DEBUG
|
||||
|
||||
if ( logger.isDebugEnabled())
|
||||
logger.debug("Adding new store " + curStoreName);
|
||||
|
||||
// Add the current store to the virtualization view
|
||||
|
||||
addNewStore( avmCtx, curStoreName);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the path is to a store pseudo folder
|
||||
|
||||
FileState fstate = null;
|
||||
@@ -2890,21 +2916,27 @@ public class AVMDiskDriver extends AlfrescoDiskDriver implements DiskInterface
|
||||
// Get the web project that the sandbox is linked to
|
||||
|
||||
WebProjectStorePseudoFile webFolder = (WebProjectStorePseudoFile) fullList.findFile( storeFolder.getWebProject(), false);
|
||||
int role = webFolder.getUserRole( cInfo.getUserName());
|
||||
|
||||
if ( role == WebProjectStorePseudoFile.RoleContentManager && avmCtx.showStoreType( storeFolder.isStoreType()))
|
||||
{
|
||||
// User is a content manager, allow access to the store
|
||||
|
||||
filterList.addFile( storeFolder);
|
||||
}
|
||||
else if ( role == WebProjectStorePseudoFile.RolePublisher && avmCtx.showStoreType( storeFolder.isStoreType()))
|
||||
{
|
||||
// Allow access if the user owns the current folder
|
||||
if ( webFolder != null) {
|
||||
int role = webFolder.getUserRole( cInfo.getUserName());
|
||||
|
||||
if ( storeFolder.getUserName().equalsIgnoreCase( cInfo.getUserName()))
|
||||
filterList.addFile( storeFolder);
|
||||
if ( role == WebProjectStorePseudoFile.RoleContentManager && avmCtx.showStoreType( storeFolder.isStoreType()))
|
||||
{
|
||||
// User is a content manager, allow access to the store
|
||||
|
||||
filterList.addFile( storeFolder);
|
||||
}
|
||||
else if ( role == WebProjectStorePseudoFile.RolePublisher && avmCtx.showStoreType( storeFolder.isStoreType()))
|
||||
{
|
||||
// Allow access if the user owns the current folder
|
||||
|
||||
if ( storeFolder.getUserName().equalsIgnoreCase( cInfo.getUserName()))
|
||||
filterList.addFile( storeFolder);
|
||||
}
|
||||
}
|
||||
else if ( logger.isDebugEnabled())
|
||||
logger.debug("Cannot find associated web folder for store " + storeFolder.getFileName());
|
||||
|
||||
}
|
||||
else if ( avmCtx.showNormalStores() || avmCtx.showSiteStores())
|
||||
{
|
||||
@@ -2920,4 +2952,190 @@ public class AVMDiskDriver extends AlfrescoDiskDriver implements DiskInterface
|
||||
|
||||
return filterList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new store to the top level folder list
|
||||
*
|
||||
* @param avmCtx AVMContext
|
||||
* @param storeName String
|
||||
*/
|
||||
protected void addNewStore( AVMContext avmCtx, String storeName) {
|
||||
|
||||
// Get the root folder file state
|
||||
|
||||
FileState fstate = avmCtx.getStateTable().findFileState( FileName.DOS_SEPERATOR_STR, true, false);
|
||||
if ( fstate == null)
|
||||
return;
|
||||
|
||||
// Get the properties for the store
|
||||
|
||||
AVMStoreDescriptor storeDesc = m_avmService.getStore( storeName);
|
||||
if ( storeDesc == null)
|
||||
return;
|
||||
|
||||
Map<QName, PropertyValue> props = m_avmService.getStoreProperties( storeName);
|
||||
|
||||
// Check if the store is a main web project
|
||||
|
||||
if ( props.containsKey( SandboxConstants.PROP_SANDBOX_STAGING_MAIN))
|
||||
{
|
||||
// Get the noderef for the web project
|
||||
|
||||
PropertyValue prop = props.get( SandboxConstants.PROP_WEB_PROJECT_NODE_REF);
|
||||
if ( prop != null) {
|
||||
|
||||
// Get the web project noderef
|
||||
|
||||
NodeRef webNodeRef = new NodeRef( prop.getStringValue());
|
||||
|
||||
// Create the web project pseudo folder
|
||||
|
||||
WebProjectStorePseudoFile webProjFolder = new WebProjectStorePseudoFile( storeDesc, FileName.DOS_SEPERATOR_STR + storeName, webNodeRef);
|
||||
fstate.addPseudoFile( webProjFolder);
|
||||
|
||||
// DEBUG
|
||||
|
||||
if ( logger.isDebugEnabled())
|
||||
logger.debug( " Found web project " + webProjFolder.getFileName());
|
||||
|
||||
// Get the list of content managers for this web project
|
||||
|
||||
List<ChildAssociationRef> mgrAssocs = m_nodeService.getChildAssocs( webNodeRef, WCMAppModel.ASSOC_WEBUSER, RegexQNamePattern.MATCH_ALL);
|
||||
|
||||
for ( ChildAssociationRef mgrRef : mgrAssocs)
|
||||
{
|
||||
// Get the child node and see if it is a content manager association
|
||||
|
||||
NodeRef childRef = mgrRef.getChildRef();
|
||||
|
||||
if ( m_nodeService.getProperty( childRef, WCMAppModel.PROP_WEBUSERROLE).equals(ROLE_CONTENT_MANAGER))
|
||||
{
|
||||
// Get the user name add it to the web project pseudo folder
|
||||
|
||||
String userName = (String) m_nodeService.getProperty( childRef, WCMAppModel.PROP_WEBUSERNAME);
|
||||
|
||||
webProjFolder.addUserRole( userName, WebProjectStorePseudoFile.RoleContentManager);
|
||||
|
||||
// DEBUG
|
||||
|
||||
if ( logger.isDebugEnabled())
|
||||
logger.debug(" Added content manager " + userName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Check if this store is a web project sandbox
|
||||
|
||||
int storeType = StoreType.Normal;
|
||||
String webProjName = null;
|
||||
String userName = null;
|
||||
|
||||
if ( props.containsKey( SandboxConstants.PROP_SANDBOX_AUTHOR_MAIN))
|
||||
{
|
||||
// Sandbox store, linked to a web project
|
||||
|
||||
storeType = StoreType.WebAuthorMain;
|
||||
|
||||
// Get the associated web project name
|
||||
|
||||
webProjName = props.get( SandboxConstants.PROP_WEBSITE_NAME).getStringValue();
|
||||
|
||||
// Get the user name from teh store name
|
||||
|
||||
userName = storeName.substring( webProjName.length() + 2);
|
||||
}
|
||||
else if ( props.containsKey( SandboxConstants.PROP_SANDBOX_AUTHOR_PREVIEW))
|
||||
{
|
||||
// Author preview sandbox store, linked to a web project
|
||||
|
||||
storeType = StoreType.WebAuthorPreview;
|
||||
|
||||
// Get the associated web project name
|
||||
|
||||
String projPlusUser = storeName.substring( 0, storeName.length() - "--preview".length());
|
||||
int pos = projPlusUser.lastIndexOf("--");
|
||||
if ( pos != -1)
|
||||
{
|
||||
webProjName = projPlusUser.substring( 0, pos);
|
||||
userName = projPlusUser.substring(pos + 2);
|
||||
}
|
||||
}
|
||||
else if ( props.containsKey( SandboxConstants.PROP_SANDBOX_WORKFLOW_PREVIEW))
|
||||
{
|
||||
// Staging preview sandbox store, linked to a web project
|
||||
|
||||
storeType = StoreType.WebStagingPreview;
|
||||
}
|
||||
else if ( props.containsKey( SandboxConstants.PROP_SANDBOX_STAGING_PREVIEW))
|
||||
{
|
||||
// Staging preview sandbox store, linked to a web project
|
||||
|
||||
storeType = StoreType.WebStagingPreview;
|
||||
|
||||
// Get the associated web project name
|
||||
|
||||
webProjName = storeName.substring( 0, storeName.length() - "--preview".length());
|
||||
}
|
||||
|
||||
// DEBUG
|
||||
|
||||
if ( logger.isDebugEnabled())
|
||||
logger.debug( " Store " + storeDesc.getName() + ", type=" + StoreType.asString( storeType) + ", webproj=" + webProjName + ", username=" + userName);
|
||||
|
||||
// Add a pseudo file for the current store
|
||||
|
||||
if ( avmCtx.showStoreType( storeType))
|
||||
{
|
||||
// Create the pseudo folder for the store
|
||||
|
||||
StorePseudoFile storeFolder = new StorePseudoFile( storeDesc, FileName.DOS_SEPERATOR_STR + storeName, storeType);
|
||||
if ( storeType != StoreType.Normal)
|
||||
{
|
||||
storeFolder.setWebProject( webProjName);
|
||||
storeFolder.setUserName( userName);
|
||||
|
||||
// Add all publisher/reviewer user names to the web project roles list
|
||||
|
||||
if ( storeFolder.hasWebProject())
|
||||
{
|
||||
// Find the associated web project pseudo folder
|
||||
|
||||
PseudoFileList folderList = fstate.getPseudoFileList();
|
||||
if ( folderList != null) {
|
||||
|
||||
// Find the associated web project
|
||||
|
||||
WebProjectStorePseudoFile webProj = (WebProjectStorePseudoFile) folderList.findFile( storeFolder.getWebProject(), true);
|
||||
|
||||
if ( webProj != null) {
|
||||
|
||||
// Strip the web project name from the sandbox store name and extract the user name.
|
||||
// Add the user as a publisher/reviewer to the web project roles list
|
||||
|
||||
userName = storeFolder.getFileName().substring( webProj.getFileName().length() + 2);
|
||||
|
||||
// If the user does not have a content manager role then add as a publisher
|
||||
|
||||
if ( webProj.getUserRole( userName) == WebProjectStorePseudoFile.RoleNone)
|
||||
{
|
||||
webProj.addUserRole( userName, WebProjectStorePseudoFile.RolePublisher);
|
||||
|
||||
// DEBUG
|
||||
|
||||
if ( logger.isDebugEnabled())
|
||||
logger.debug( " Added publisher " + userName + " to " + webProj.getFileName());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add the store pseudo folder to the root folder file list
|
||||
|
||||
fstate.addPseudoFile( storeFolder);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -78,32 +78,27 @@ public abstract class AbstractPatch implements Patch
|
||||
private String description;
|
||||
/** a list of patches that this one depends on */
|
||||
private List<Patch> dependsOn;
|
||||
/** a list of patches that, if already present, mean that this one should be ignored */
|
||||
private List<Patch> alternatives;
|
||||
/** flag indicating if the patch was successfully applied */
|
||||
private boolean applied;
|
||||
private boolean applyToTenants;
|
||||
/** track completion * */
|
||||
int percentComplete = 0;
|
||||
/** start time * */
|
||||
long startTime;
|
||||
|
||||
/** the service to register ourselves with */
|
||||
private PatchService patchService;
|
||||
/** used to ensure a unique transaction per execution */
|
||||
protected TransactionService transactionService;
|
||||
/** support service */
|
||||
protected NamespaceService namespaceService;
|
||||
/** support service */
|
||||
protected NodeService nodeService;
|
||||
/** support service */
|
||||
protected SearchService searchService;
|
||||
/** support service */
|
||||
protected AuthenticationComponent authenticationComponent;
|
||||
/** support service */
|
||||
protected TenantAdminService tenantAdminService;
|
||||
|
||||
|
||||
/** track completion * */
|
||||
int percentComplete = 0;
|
||||
|
||||
/** start time * */
|
||||
long startTime;
|
||||
|
||||
public AbstractPatch()
|
||||
{
|
||||
this.fixesFromSchema = -1;
|
||||
@@ -112,6 +107,7 @@ public abstract class AbstractPatch implements Patch
|
||||
this.applied = false;
|
||||
this.applyToTenants = true; // by default, apply to each tenant, if tenant service is enabled
|
||||
this.dependsOn = Collections.emptyList();
|
||||
this.alternatives = Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -144,33 +140,21 @@ public abstract class AbstractPatch implements Patch
|
||||
this.transactionService = transactionService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a generally-used service
|
||||
*/
|
||||
public void setNamespaceService(NamespaceService namespaceService)
|
||||
{
|
||||
this.namespaceService = namespaceService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a generally-used service
|
||||
*/
|
||||
public void setNodeService(NodeService nodeService)
|
||||
{
|
||||
this.nodeService = nodeService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a generally-used service
|
||||
*/
|
||||
public void setSearchService(SearchService searchService)
|
||||
{
|
||||
this.searchService = searchService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a generally-used service
|
||||
*/
|
||||
public void setAuthenticationComponent(AuthenticationComponent authenticationComponent)
|
||||
{
|
||||
this.authenticationComponent = authenticationComponent;
|
||||
@@ -303,6 +287,22 @@ public abstract class AbstractPatch implements Patch
|
||||
this.dependsOn = dependsOn;
|
||||
}
|
||||
|
||||
public List<Patch> getAlternatives()
|
||||
{
|
||||
return alternatives;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set all anti-dependencies. If any of the patches in the list have already been executed, then
|
||||
* this one need not be.
|
||||
*
|
||||
* @param alternatives a list of alternative patches
|
||||
*/
|
||||
public void setAlternatives(List<Patch> alternatives)
|
||||
{
|
||||
this.alternatives = alternatives;
|
||||
}
|
||||
|
||||
public boolean applies(int version)
|
||||
{
|
||||
return ((this.fixesFromSchema <= version) && (version <= fixesToSchema));
|
||||
@@ -345,7 +345,9 @@ public abstract class AbstractPatch implements Patch
|
||||
checkPropertyNotNull(authenticationComponent, "authenticationComponent");
|
||||
if (fixesFromSchema == -1 || fixesToSchema == -1 || targetSchema == -1)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Patch properties 'fixesFromSchema', 'fixesToSchema' and 'targetSchema' have not all been set on this patch: \n"
|
||||
throw new AlfrescoRuntimeException(
|
||||
"Patch properties 'fixesFromSchema', 'fixesToSchema' and 'targetSchema' " +
|
||||
"have not all been set on this patch: \n"
|
||||
+ " patch: " + this);
|
||||
}
|
||||
}
|
||||
|
@@ -69,6 +69,13 @@ public interface Patch
|
||||
*/
|
||||
public List<Patch> getDependsOn();
|
||||
|
||||
/**
|
||||
* Get patches that could have done the work already
|
||||
*
|
||||
* @return Returns a list of patches
|
||||
*/
|
||||
public List<Patch> getAlternatives();
|
||||
|
||||
/**
|
||||
* Check if the patch is applicable to a given schema version.
|
||||
*
|
||||
|
@@ -56,6 +56,7 @@ import org.apache.commons.logging.LogFactory;
|
||||
public class PatchServiceImpl implements PatchService
|
||||
{
|
||||
private static final String MSG_NOT_RELEVANT = "patch.service.not_relevant";
|
||||
private static final String MSG_PRECEEDED_BY_ALTERNATIVE = "patch.service.preceeded_by_alternative";
|
||||
private static final String MSG_APPLYING_PATCH = "patch.service.applying_patch";
|
||||
|
||||
private static final Date ZERO_DATE = new Date(0L);
|
||||
@@ -245,6 +246,7 @@ public class PatchServiceImpl implements PatchService
|
||||
boolean success = false;
|
||||
// first check whether the patch is relevant to the repo
|
||||
Descriptor repoDescriptor = descriptorService.getInstalledRepositoryDescriptor();
|
||||
String preceededByAlternative = preceededByAlternative(patch);
|
||||
boolean applies = applies(repoDescriptor, patch);
|
||||
if (!applies)
|
||||
{
|
||||
@@ -252,6 +254,11 @@ public class PatchServiceImpl implements PatchService
|
||||
report = I18NUtil.getMessage(MSG_NOT_RELEVANT, repoDescriptor.getSchema());
|
||||
success = true; // this succeeded because it didn't need to be applied
|
||||
}
|
||||
else if (preceededByAlternative != null)
|
||||
{
|
||||
report = I18NUtil.getMessage(MSG_PRECEEDED_BY_ALTERNATIVE, preceededByAlternative);
|
||||
success = true; // this succeeded because it didn't need to be applied
|
||||
}
|
||||
else
|
||||
{
|
||||
// perform actual execution
|
||||
@@ -309,6 +316,28 @@ public class PatchServiceImpl implements PatchService
|
||||
return appliedPatch;
|
||||
}
|
||||
|
||||
/**
|
||||
* Identifies if one of the alternative patches has already been executed.
|
||||
*
|
||||
* @param patch the patch to check
|
||||
* @return Returns the ID of any successfully executed alternative patch
|
||||
*/
|
||||
private String preceededByAlternative(Patch patch)
|
||||
{
|
||||
// If any alternatives were executed, then bypass this one
|
||||
List<Patch> alternatives = patch.getAlternatives();
|
||||
for (Patch alternative : alternatives)
|
||||
{
|
||||
// If the patch was executed, then this one was effectively executed
|
||||
AppliedPatch appliedAlternative = patchDaoService.getAppliedPatch(alternative.getId());
|
||||
if (appliedAlternative != null && appliedAlternative.getSucceeded())
|
||||
{
|
||||
return alternative.getId();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the patch is applicable to the particular version of the repository.
|
||||
*
|
||||
|
@@ -0,0 +1,49 @@
|
||||
/*
|
||||
* Copyright (C) 2005-2008 Alfresco Software Limited.
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
* As a special exception to the terms and conditions of version 2.0 of
|
||||
* the GPL, you may redistribute this Program in connection with Free/Libre
|
||||
* and Open Source Software ("FLOSS") applications as described in Alfresco's
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* http://www.alfresco.com/legal/licensing"
|
||||
*/
|
||||
package org.alfresco.repo.admin.patch.impl;
|
||||
|
||||
import org.alfresco.i18n.I18NUtil;
|
||||
import org.alfresco.repo.admin.patch.AbstractPatch;
|
||||
|
||||
/**
|
||||
* Does nothing.
|
||||
*
|
||||
* @author Derek Hulley
|
||||
* @since 2.2.2
|
||||
*/
|
||||
public class NoOpPatch extends AbstractPatch
|
||||
{
|
||||
private static final String MSG_RESULT = "patch.noOpPatch.result";
|
||||
|
||||
public NoOpPatch()
|
||||
{
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String applyInternal() throws Exception
|
||||
{
|
||||
return I18NUtil.getMessage(MSG_RESULT);
|
||||
}
|
||||
}
|
@@ -369,35 +369,66 @@ public class HibernateAuditDAO extends HibernateDaoSupport implements AuditDAO,
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
InputStream current = new BufferedInputStream(auditInfo.getAuditConfiguration().getInputStream());
|
||||
ContentReader reader = contentStore.getReader(auditConfig.getConfigURL());
|
||||
reader.setMimetype(MimetypeMap.MIMETYPE_XML);
|
||||
reader.setEncoding("UTF-8");
|
||||
InputStream last = new BufferedInputStream(reader.getContentInputStream());
|
||||
int currentValue = -2;
|
||||
int lastValue = -2;
|
||||
InputStream current = null;
|
||||
InputStream last = null;
|
||||
try
|
||||
{
|
||||
while ((currentValue != -1) && (lastValue != -1) && (currentValue == lastValue))
|
||||
current = new BufferedInputStream(auditInfo.getAuditConfiguration().getInputStream());
|
||||
ContentReader reader = contentStore.getReader(auditConfig.getConfigURL());
|
||||
reader.setMimetype(MimetypeMap.MIMETYPE_XML);
|
||||
reader.setEncoding("UTF-8");
|
||||
last = new BufferedInputStream(reader.getContentInputStream());
|
||||
int currentValue = -2;
|
||||
int lastValue = -2;
|
||||
try
|
||||
{
|
||||
currentValue = current.read();
|
||||
lastValue = last.read();
|
||||
while ((currentValue != -1) && (lastValue != -1) && (currentValue == lastValue))
|
||||
{
|
||||
currentValue = current.read();
|
||||
lastValue = last.read();
|
||||
|
||||
}
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Failed to read and validate current audit configuration against the last", e);
|
||||
}
|
||||
if (currentValue != lastValue)
|
||||
{
|
||||
// Files are different - require a new entry
|
||||
auditConfig = createNewAuditConfigImpl(auditInfo);
|
||||
}
|
||||
else
|
||||
{
|
||||
// No change
|
||||
}
|
||||
}
|
||||
catch (IOException e)
|
||||
finally
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Failed to read and validate current audit configuration against the last", e);
|
||||
}
|
||||
if (currentValue != lastValue)
|
||||
{
|
||||
// Files are different - require a new entry
|
||||
auditConfig = createNewAuditConfigImpl(auditInfo);
|
||||
}
|
||||
else
|
||||
{
|
||||
// No change
|
||||
if (current != null)
|
||||
{
|
||||
try
|
||||
{
|
||||
current.close();
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
s_logger.warn(e);
|
||||
}
|
||||
}
|
||||
|
||||
if (last != null)
|
||||
{
|
||||
try
|
||||
{
|
||||
last.close();
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
s_logger.warn(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -474,7 +474,7 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
|
||||
resultSet = this.searchService.query(
|
||||
nodeRef.getStoreRef(),
|
||||
SearchService.LANGUAGE_LUCENE,
|
||||
"ASPECT:\"" + ContentModel.ASPECT_WORKING_COPY.toString() + "\" +@\\{http\\://www.alfresco.org/model/content/1.0\\}" + ContentModel.PROP_COPY_REFERENCE.getLocalName() + ":\"" + nodeRef.toString() + "\"");
|
||||
"+ASPECT:\"" + ContentModel.ASPECT_WORKING_COPY.toString() + "\" +@\\{http\\://www.alfresco.org/model/content/1.0\\}" + ContentModel.PROP_COPY_REFERENCE.getLocalName() + ":\"" + nodeRef.toString() + "\"");
|
||||
if (resultSet.getNodeRefs().size() != 0)
|
||||
{
|
||||
workingCopy = resultSet.getNodeRef(0);
|
||||
|
@@ -40,6 +40,7 @@ import org.alfresco.service.cmr.repository.ContentData;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.cmr.repository.ContentService;
|
||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||
import org.alfresco.service.cmr.repository.CopyService;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
@@ -73,6 +74,7 @@ public class CheckOutCheckInServiceImplTest extends BaseSpringTest
|
||||
private LockService lockService;
|
||||
private TransactionService transactionService;
|
||||
private PermissionService permissionService;
|
||||
private CopyService copyService;
|
||||
|
||||
/**
|
||||
* Data used by the tests
|
||||
@@ -116,7 +118,8 @@ public class CheckOutCheckInServiceImplTest extends BaseSpringTest
|
||||
this.lockService = (LockService)this.applicationContext.getBean("lockService");
|
||||
this.transactionService = (TransactionService)this.applicationContext.getBean("transactionComponent");
|
||||
this.permissionService = (PermissionService)this.applicationContext.getBean("permissionService");
|
||||
|
||||
this.copyService = (CopyService)this.applicationContext.getBean("copyService");
|
||||
|
||||
// Authenticate as system to create initial test data set
|
||||
AuthenticationComponent authenticationComponent = (AuthenticationComponent)this.applicationContext.getBean("authenticationComponent");
|
||||
authenticationComponent.setSystemUserAsCurrentUser();
|
||||
@@ -432,7 +435,56 @@ public class CheckOutCheckInServiceImplTest extends BaseSpringTest
|
||||
});
|
||||
|
||||
NodeRef wk3 = this.cociService.getWorkingCopy(this.nodeRef);
|
||||
assertNull(wk3);
|
||||
assertNull(wk3);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test the getWorkingCopy method
|
||||
*/
|
||||
public void testETWOTWO_733()
|
||||
{
|
||||
NodeRef origNodeRef = this.nodeService.createNode(
|
||||
this.rootNodeRef,
|
||||
ContentModel.ASSOC_CHILDREN,
|
||||
QName.createQName("{test}test2"),
|
||||
ContentModel.TYPE_CONTENT).getChildRef();
|
||||
|
||||
// Make a copy of the node
|
||||
this.copyService.copyAndRename(
|
||||
origNodeRef,
|
||||
this.rootNodeRef,
|
||||
ContentModel.ASSOC_CHILDREN,
|
||||
QName.createQName("{test}test6"),
|
||||
false);
|
||||
|
||||
NodeRef wk1 = this.cociService.getWorkingCopy(origNodeRef);
|
||||
assertNull(wk1);
|
||||
|
||||
// Check the document out
|
||||
final NodeRef workingCopy = this.cociService.checkout(origNodeRef);
|
||||
|
||||
// Need to commit the transaction in order to get the indexer to run
|
||||
setComplete();
|
||||
endTransaction();
|
||||
|
||||
final NodeRef finalNodeRef = origNodeRef;
|
||||
|
||||
this.transactionService.getRetryingTransactionHelper().doInTransaction(
|
||||
new RetryingTransactionCallback<Object>()
|
||||
{
|
||||
public Object execute()
|
||||
{
|
||||
NodeRef wk2 = CheckOutCheckInServiceImplTest.this.cociService.getWorkingCopy(finalNodeRef);
|
||||
assertNotNull(wk2);
|
||||
assertEquals(workingCopy, wk2);
|
||||
|
||||
CheckOutCheckInServiceImplTest.this.cociService.cancelCheckout(workingCopy);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
NodeRef wk3 = this.cociService.getWorkingCopy(this.nodeRef);
|
||||
assertNull(wk3);
|
||||
}
|
||||
|
||||
public void testAR1056()
|
||||
|
@@ -36,6 +36,7 @@ import org.alfresco.repo.domain.AccessControlListDAO;
|
||||
import org.alfresco.repo.domain.DbAccessControlList;
|
||||
import org.alfresco.repo.domain.hibernate.AclDaoComponentImpl.Indirection;
|
||||
import org.alfresco.repo.search.AVMSnapShotTriggeredIndexingMethodInterceptor;
|
||||
import org.alfresco.repo.search.impl.lucene.index.IndexInfo;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.repo.security.permissions.ACLType;
|
||||
@@ -55,6 +56,8 @@ import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.util.EqualsHelper;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
/**
|
||||
* The AVM implementation for getting and setting ACLs.
|
||||
@@ -63,6 +66,11 @@ import org.alfresco.util.Pair;
|
||||
*/
|
||||
public class AVMAccessControlListDAO implements AccessControlListDAO
|
||||
{
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static Log s_logger = LogFactory.getLog(AVMAccessControlListDAO.class);
|
||||
|
||||
/**
|
||||
* Reference to the AVM Repository instance.
|
||||
*/
|
||||
@@ -765,8 +773,16 @@ public class AVMAccessControlListDAO implements AccessControlListDAO
|
||||
case WORKFLOW:
|
||||
case WORKFLOW_PREVIEW:
|
||||
AVMNodeDescriptor www = fAVMService.lookup(-1, store.getName() + ":/www");
|
||||
update = fixOldAvmAcls(www, false, indirections);
|
||||
result.add(update);
|
||||
if(www != null)
|
||||
{
|
||||
update = fixOldAvmAcls(www, false, indirections);
|
||||
result.add(update);
|
||||
}
|
||||
else
|
||||
{
|
||||
update = fixOldAvmAcls(root, true, indirections);
|
||||
result.add(update);
|
||||
}
|
||||
break;
|
||||
case UNKNOWN:
|
||||
default:
|
||||
@@ -855,7 +871,7 @@ public class AVMAccessControlListDAO implements AccessControlListDAO
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new IllegalStateException();
|
||||
s_logger.warn("Skipping new style ACLs");
|
||||
}
|
||||
}
|
||||
else if (node.isLayeredDirectory())
|
||||
|
@@ -43,6 +43,7 @@ import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.repo.admin.patch.Patch;
|
||||
import org.alfresco.repo.admin.patch.impl.SchemaUpgradeScriptPatch;
|
||||
import org.alfresco.repo.content.filestore.FileContentWriter;
|
||||
import org.alfresco.repo.domain.PropertyValue;
|
||||
@@ -697,11 +698,24 @@ public class SchemaBootstrap extends AbstractLifecycleBean
|
||||
}
|
||||
// Retrieve the first installed schema number
|
||||
int installedSchema = getInstalledSchemaNumber(connection);
|
||||
|
||||
|
||||
nextPatch:
|
||||
for (SchemaUpgradeScriptPatch patch : scriptPatches)
|
||||
{
|
||||
final String patchId = patch.getId();
|
||||
final String scriptUrl = patch.getScriptUrl();
|
||||
|
||||
// Check if any of the alternative patches were executed
|
||||
List<Patch> alternatives = patch.getAlternatives();
|
||||
for (Patch alternativePatch : alternatives)
|
||||
{
|
||||
String alternativePatchId = alternativePatch.getId();
|
||||
boolean alternativeSucceeded = didPatchSucceed(connection, alternativePatchId);
|
||||
if (alternativeSucceeded)
|
||||
{
|
||||
continue nextPatch;
|
||||
}
|
||||
}
|
||||
|
||||
// check if the script was successfully executed
|
||||
boolean wasSuccessfullyApplied = didPatchSucceed(connection, patchId);
|
||||
@@ -830,17 +844,17 @@ public class SchemaBootstrap extends AbstractLifecycleBean
|
||||
StringBuilder sb = new StringBuilder(1024);
|
||||
while(true)
|
||||
{
|
||||
String sql = reader.readLine();
|
||||
String sqlOriginal = reader.readLine();
|
||||
line++;
|
||||
|
||||
if (sql == null)
|
||||
if (sqlOriginal == null)
|
||||
{
|
||||
// nothing left in the file
|
||||
break;
|
||||
}
|
||||
|
||||
// trim it
|
||||
sql = sql.trim();
|
||||
String sql = sqlOriginal.trim();
|
||||
if (sql.length() == 0 ||
|
||||
sql.startsWith( "--" ) ||
|
||||
sql.startsWith( "//" ) ||
|
||||
@@ -879,8 +893,19 @@ public class SchemaBootstrap extends AbstractLifecycleBean
|
||||
// Just take it at face value and probably fail.
|
||||
}
|
||||
}
|
||||
// Add newline
|
||||
if (sb.length() > 0)
|
||||
{
|
||||
sb.append("\n");
|
||||
}
|
||||
// Add leading whitespace for formatting
|
||||
int whitespaceCount = sqlOriginal.indexOf(sql);
|
||||
for (int i = 0; i < whitespaceCount; i++)
|
||||
{
|
||||
sb.append(" ");
|
||||
}
|
||||
// append to the statement being built up
|
||||
sb.append(" ").append(sql);
|
||||
sb.append(sql);
|
||||
// execute, if required
|
||||
if (execute)
|
||||
{
|
||||
@@ -918,7 +943,7 @@ public class SchemaBootstrap extends AbstractLifecycleBean
|
||||
}
|
||||
stmt.execute(sql);
|
||||
// Record the statement
|
||||
executedStatements.append(sql).append(";\n");
|
||||
executedStatements.append(sql).append(";\n\n");
|
||||
}
|
||||
catch (SQLException e)
|
||||
{
|
||||
|
@@ -1425,8 +1425,11 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
|
||||
// Delete deltas
|
||||
usageDeltaDAO.deleteDeltas(nodeId);
|
||||
|
||||
// // finally delete the node
|
||||
// getHibernateTemplate().delete(node);
|
||||
// Wipe out properties and aspects
|
||||
node.getProperties().clear();
|
||||
node.getAspects().clear();
|
||||
|
||||
// Mark the node as deleted
|
||||
node.setDeleted(true);
|
||||
|
||||
// Remove node from cache
|
||||
|
Reference in New Issue
Block a user