mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-07-24 17:32:48 +00:00
Merged V2.2 to HEAD (V2.1 sourced)
7127: Merged V2.1 to V2.2 7118: Fixed overly-eager applicability of patches brought forward from previous releases 7119: Fixed SQL script patch schema numbering 7245: Merged V2.1 to V2.2: 7238: Serializes alfresco->alfresco deployments to the same target. 7241: Added AVM index tracking into the built-in, cron-controlled config. 7242: More DEBUG messages for index tracking, where required. 7243: Fix for url encoding issue as found by by Ishii Akinori 7372: Merged V2.1 to V2.2 7289: Fix for AWC-1542 where utf-8 characters not displaying correctly in RSS feed output 7300: Bumped up session size management values to reduce potential issues with mix-style, shorter transactions. 7303: Portlet updates for MSIE problems in Liferay. 7304: Added the <cifs-url-suffix>. AWC-1671. 7317: Fix OO shutdown 7319: Catch for raising rule executions using null NodeRefs. git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@7374 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
@@ -16,5 +16,5 @@ insert into alf_applied_patch
|
||||
values
|
||||
(
|
||||
'patch.db-V1.4-PerfIndexes02', 'Executed script AlfrescoSchemaUpdate-1.4-PerfIndexes02.sql',
|
||||
0, 26, -1, 27, null, 'UNKOWN', 1, 1, 'Script completed'
|
||||
0, 75, -1, 76, null, 'UNKOWN', 1, 1, 'Script completed'
|
||||
);
|
@@ -16,5 +16,5 @@ insert into alf_applied_patch
|
||||
values
|
||||
(
|
||||
'patch.db-V1.4-PerfIndexes02', 'Executed script AlfrescoSchemaUpdate-1.4-PerfIndexes02.sql',
|
||||
0, 26, -1, 27, null, 'UNKOWN', TRUE, TRUE, 'Script completed'
|
||||
0, 75, -1, 76, null, 'UNKOWN', TRUE, TRUE, 'Script completed'
|
||||
);
|
@@ -62,7 +62,7 @@
|
||||
<!-- Index tracking -->
|
||||
<!--
|
||||
This is the default index tracker component. It is used during bootstrap when incremental recovery
|
||||
is required. It is also used, by default in the clustered index tracking sample.
|
||||
is required.
|
||||
-->
|
||||
<bean
|
||||
id="admIndexTrackerComponent"
|
||||
@@ -99,10 +99,51 @@
|
||||
<ref bean="schedulerFactory" />
|
||||
</property>
|
||||
<property name="cronExpression">
|
||||
<value>${index.tracking.cronExpression}</value>
|
||||
<value>${index.tracking.adm.cronExpression}</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!--===========================-->
|
||||
<!-- AVM (WCM) index tracking -->
|
||||
<!--===========================-->
|
||||
|
||||
<bean
|
||||
id="avmIndexTrackerComponent"
|
||||
class="org.alfresco.repo.node.index.AVMRemoteSnapshotTracker"
|
||||
parent="indexRecoveryComponentBase">
|
||||
<property name="avmService">
|
||||
<ref bean="avmService" />
|
||||
</property>
|
||||
<property name="avmSnapShotTriggeredIndexingMethodInterceptor">
|
||||
<ref bean="avmSnapShotTriggeredIndexingMethodInterceptor" />
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- Schedule index tracking for AVM -->
|
||||
<bean id="avmIndexTrackerTrigger" class="org.alfresco.util.CronTriggerBean">
|
||||
<property name="jobDetail">
|
||||
<bean class="org.springframework.scheduling.quartz.JobDetailBean">
|
||||
<property name="jobClass">
|
||||
<value>org.alfresco.repo.node.index.IndexRecoveryJob</value>
|
||||
</property>
|
||||
<property name="jobDataAsMap">
|
||||
<map>
|
||||
<entry key="indexRecoveryComponent">
|
||||
<ref bean="avmIndexTrackerComponent" />
|
||||
</entry>
|
||||
</map>
|
||||
</property>
|
||||
</bean>
|
||||
</property>
|
||||
<property name="scheduler">
|
||||
<ref bean="schedulerFactory" />
|
||||
</property>
|
||||
<property name="cronExpression">
|
||||
<value>${index.tracking.avm.cronExpression}</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- Missing Content -->
|
||||
<!-- Bean that attempts to index content that was previously missing -->
|
||||
<bean
|
||||
id="missingContentReindexComponent"
|
||||
|
@@ -1,6 +1,7 @@
|
||||
# PatchService messages
|
||||
patch.service.not_relevant=Not relevant to schema {0}
|
||||
patch.executer.checking=Checking for patches to apply ...
|
||||
patch.service.applying_patch=\tApplying patch ''{0}'' ({1}).
|
||||
patch.executer.no_patches_required=No patches were required.
|
||||
patch.executer.system_readonly=Patches cannot be applied to a read-only system. Possible incompatibilities may exist between the application code and the existing data.
|
||||
patch.executer.not_executed =\n=== Recorded patch (not executed) === \nID: {0}\nRESULT: \n{1}\n=====================================
|
||||
|
@@ -185,8 +185,7 @@
|
||||
<value>10000</value>
|
||||
</property>
|
||||
<property name="resourceManagerCallFrequencyMillis">
|
||||
<!-- Was 5000 -->
|
||||
<value>1000</value>
|
||||
<value>5000</value>
|
||||
</property>
|
||||
</bean>
|
||||
<bean id="sessionSizeResourceManager" class="org.alfresco.repo.domain.hibernate.SessionSizeResourceManager">
|
||||
@@ -194,8 +193,7 @@
|
||||
<ref bean="sessionFactory" />
|
||||
</property>
|
||||
<property name="threshold">
|
||||
<!-- Was 5000 -->
|
||||
<value>100</value>
|
||||
<value>5000</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
|
@@ -26,15 +26,19 @@ index.recovery.mode=VALIDATE
|
||||
# Force FULL recovery to stop when encountering errors
|
||||
index.recovery.stopOnError=true
|
||||
# Set the frequency with which the index tracking is triggered.
|
||||
# For more information on index tracking in a cluster:
|
||||
# http://wiki.alfresco.com/wiki/High_Availability_Configuration_V1.4_to_V2.1#Version_1.4.5.2C_2.1.1_and_later
|
||||
# By default, this is effectively never, but can be modified as required.
|
||||
# Examples:
|
||||
# Once every five seconds: 0/5 * * * * ?
|
||||
# Once every two seconds : 0/2 * * * * ?
|
||||
# See http://quartz.sourceforge.net/javadoc/org/quartz/CronTrigger.html
|
||||
index.tracking.cronExpression=* * * * * ? 2099
|
||||
index.tracking.adm.cronExpression=${index.tracking.cronExpression}
|
||||
index.tracking.avm.cronExpression=${index.tracking.cronExpression}
|
||||
# Other properties.
|
||||
index.tracking.maxTxnDurationMinutes=60
|
||||
index.tracking.reindexLagMs=50
|
||||
index.tracking.reindexLagMs=1000
|
||||
index.tracking.maxRecordSetSize=1000
|
||||
|
||||
# Change the failure behaviour of the configuration checker
|
||||
|
@@ -56,11 +56,12 @@ import org.apache.commons.logging.LogFactory;
|
||||
public class PatchServiceImpl implements PatchService
|
||||
{
|
||||
private static final String MSG_NOT_RELEVANT = "patch.service.not_relevant";
|
||||
private static final String MSG_APPLYING_PATCH = "patch.service.applying_patch";
|
||||
|
||||
private static final Date ZERO_DATE = new Date(0L);
|
||||
private static final Date INFINITE_DATE = new Date(Long.MAX_VALUE);
|
||||
|
||||
private static Log logger = LogFactory.getLog(PatchServiceImpl.class);
|
||||
private static Log logger = LogFactory.getLog(PatchExecuter.class);
|
||||
|
||||
private DescriptorService descriptorService;
|
||||
private TransactionService transactionService;
|
||||
@@ -226,15 +227,35 @@ public class PatchServiceImpl implements PatchService
|
||||
// get the patch from the DAO
|
||||
AppliedPatch appliedPatch = patchDaoService.getAppliedPatch(patch.getId());
|
||||
// We bypass the patch if it was executed successfully
|
||||
if (appliedPatch != null && appliedPatch.getWasExecuted() && appliedPatch.getSucceeded())
|
||||
if (appliedPatch != null)
|
||||
{
|
||||
// it has already been applied
|
||||
if (logger.isDebugEnabled())
|
||||
if (appliedPatch.getWasExecuted() && appliedPatch.getSucceeded())
|
||||
{
|
||||
logger.debug("Patch was already successfully applied: \n" +
|
||||
" patch: " + appliedPatch);
|
||||
// It has already been successfully applied
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Patch was already successfully applied: \n" +
|
||||
" patch: " + appliedPatch);
|
||||
}
|
||||
return appliedPatch;
|
||||
}
|
||||
else if (patch.getTargetSchema() != appliedPatch.getTargetSchema())
|
||||
{
|
||||
// The target schema of the defined patch has changed.
|
||||
// The patch applicability was changed for some reason, usually as a result of
|
||||
// merges between branches. We need to detect new patches in clean installs.
|
||||
if (appliedPatch.getAppliedToSchema() == appliedPatch.getTargetSchema())
|
||||
{
|
||||
// The patch applicability changed, but it was originally not executed because
|
||||
// it was a new patch in a clean install
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Patch not applied to a previously clean install: \n" +
|
||||
" patch: " + appliedPatch);
|
||||
}
|
||||
return appliedPatch;
|
||||
}
|
||||
}
|
||||
return appliedPatch;
|
||||
}
|
||||
// the execution report
|
||||
String report = null;
|
||||
@@ -253,6 +274,7 @@ public class PatchServiceImpl implements PatchService
|
||||
// perform actual execution
|
||||
try
|
||||
{
|
||||
logger.info(I18NUtil.getMessage(MSG_APPLYING_PATCH, patch.getId(), patch.getDescription()));
|
||||
report = patch.apply();
|
||||
success = true;
|
||||
}
|
||||
|
@@ -486,7 +486,7 @@ public class AVMServiceTest extends AVMServiceTestBase
|
||||
{
|
||||
try
|
||||
{
|
||||
DeploymentService depService = (DeploymentService) fContext.getBean("DeploymentService");
|
||||
final DeploymentService depService = (DeploymentService) fContext.getBean("DeploymentService");
|
||||
NameMatcher matcher = (NameMatcher)fContext.getBean("globalPathExcluder");
|
||||
setupBasicTree();
|
||||
fService.addAspect("main:/a", ContentModel.ASPECT_REFERENCEABLE);
|
||||
@@ -553,6 +553,36 @@ public class AVMServiceTest extends AVMServiceTestBase
|
||||
depService.deployDifference(-1, "main:/a", "localhost", 50500, "admin", "admin", "target2:/wiggly/diggly", matcher,
|
||||
true, false, false, null);
|
||||
System.out.println(report);
|
||||
fService.createStore("source");
|
||||
RetryingTransactionHelper.RetryingTransactionCallback<Object> cb =
|
||||
new RetryingTransactionHelper.RetryingTransactionCallback<Object>()
|
||||
{
|
||||
public Object execute()
|
||||
throws Exception
|
||||
{
|
||||
BulkLoader loader = new BulkLoader();
|
||||
loader.setAvmService(fService);
|
||||
loader.recursiveLoad("source/java/org/alfresco/repo/avm", "source:/");
|
||||
return null;
|
||||
}
|
||||
};
|
||||
RetryingTransactionHelper helper = (RetryingTransactionHelper)fContext.getBean("retryingTransactionHelper");
|
||||
helper.doInTransaction(cb);
|
||||
fService.createStore("dest");
|
||||
depService.deployDifference(-1, "source:/avm", "localhost", 50500, "admin", "admin", "dest:/avm", null, true, false, false, null);
|
||||
Runnable runner = new Runnable()
|
||||
{
|
||||
public void run()
|
||||
{
|
||||
depService.deployDifference(-1, "source:/avm", "localhost", 50500, "admin", "admin", "dest:/avm", null, true, false, false, null);
|
||||
}
|
||||
};
|
||||
Thread thread = new Thread(runner);
|
||||
thread.start();
|
||||
thread.join();
|
||||
report = depService.deployDifference(-1, "source:/avm", "localhost", 50500, "admin", "admin", "dest:/avm", null, true, false, false, null);
|
||||
System.out.println(report);
|
||||
assertEquals("", report.toString());
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
|
@@ -28,6 +28,8 @@ package org.alfresco.repo.deploy;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@@ -77,6 +79,57 @@ import org.springframework.remoting.rmi.RmiProxyFactoryBean;
|
||||
*/
|
||||
public class DeploymentServiceImpl implements DeploymentService
|
||||
{
|
||||
/**
|
||||
* Class to hold Deployment destination information.
|
||||
* Used as a lock to serialize deployments to the same
|
||||
* destination.
|
||||
* @author britt
|
||||
*/
|
||||
private static class DeploymentDestination
|
||||
{
|
||||
private String fHost;
|
||||
|
||||
private int fPort;
|
||||
|
||||
DeploymentDestination(String host, int port)
|
||||
{
|
||||
fHost = host;
|
||||
fPort = port;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj)
|
||||
{
|
||||
if (this == obj)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof DeploymentDestination))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
DeploymentDestination other = (DeploymentDestination)obj;
|
||||
return fHost.equals(other.fHost) && fPort == other.fPort;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode()
|
||||
{
|
||||
return fHost.hashCode() + fPort;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Holds locks for all deployment destinations (alfresco->alfresco)
|
||||
*/
|
||||
private Map<DeploymentDestination, DeploymentDestination> fDestinations;
|
||||
|
||||
/**
|
||||
* The local AVMService Instance.
|
||||
*/
|
||||
@@ -93,6 +146,7 @@ public class DeploymentServiceImpl implements DeploymentService
|
||||
public DeploymentServiceImpl()
|
||||
{
|
||||
fTicketHolder = new ClientTicketHolderThread();
|
||||
fDestinations = new HashMap<DeploymentDestination, DeploymentDestination>();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -109,122 +163,126 @@ public class DeploymentServiceImpl implements DeploymentService
|
||||
*/
|
||||
public DeploymentReport deployDifference(int version, String srcPath, String hostName, int port, String userName, String password, String dstPath, NameMatcher matcher, boolean createDst, boolean dontDelete, boolean dontDo, DeploymentCallback callback)
|
||||
{
|
||||
try
|
||||
DeploymentDestination dest = getLock(hostName, port);
|
||||
synchronized (dest)
|
||||
{
|
||||
DeploymentReport report = new DeploymentReport();
|
||||
AVMRemote remote = getRemote(hostName, port, userName, password);
|
||||
if (callback != null)
|
||||
{
|
||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.START,
|
||||
new Pair<Integer, String>(version, srcPath),
|
||||
dstPath);
|
||||
callback.eventOccurred(event);
|
||||
}
|
||||
if (version < 0)
|
||||
{
|
||||
String storeName = srcPath.substring(0, srcPath.indexOf(":"));
|
||||
version = fAVMService.createSnapshot(storeName, null, null).get(storeName);
|
||||
}
|
||||
// Get the root of the deployment from this server.
|
||||
AVMNodeDescriptor srcRoot = fAVMService.lookup(version, srcPath);
|
||||
if (srcRoot == null)
|
||||
{
|
||||
throw new AVMNotFoundException("Directory Not Found: " + srcPath);
|
||||
}
|
||||
if (!srcRoot.isDirectory())
|
||||
{
|
||||
throw new AVMWrongTypeException("Not a directory: " + srcPath);
|
||||
}
|
||||
// Create a snapshot on the destination store.
|
||||
String [] storePath = dstPath.split(":");
|
||||
int snapshot = -1;
|
||||
AVMNodeDescriptor dstParent = null;
|
||||
if (!dontDo)
|
||||
{
|
||||
String[] parentBase = AVMNodeConverter.SplitBase(dstPath);
|
||||
dstParent = remote.lookup(-1, parentBase[0]);
|
||||
if (dstParent == null)
|
||||
{
|
||||
if (createDst)
|
||||
{
|
||||
createDestination(remote, parentBase[0]);
|
||||
dstParent = remote.lookup(-1, parentBase[0]);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new AVMNotFoundException("Node Not Found: " + parentBase[0]);
|
||||
}
|
||||
}
|
||||
snapshot = remote.createSnapshot(storePath[0], "PreDeploy", "Pre Deployment Snapshot").get(storePath[0]);
|
||||
}
|
||||
// Get the root of the deployment on the destination server.
|
||||
AVMNodeDescriptor dstRoot = remote.lookup(-1, dstPath);
|
||||
if (dstRoot == null)
|
||||
{
|
||||
// If it doesn't exist, do a copyDirectory to create it.
|
||||
DeploymentEvent event =
|
||||
new DeploymentEvent(DeploymentEvent.Type.COPIED,
|
||||
new Pair<Integer, String>(version, srcPath),
|
||||
dstPath);
|
||||
report.add(event);
|
||||
if (callback != null)
|
||||
{
|
||||
callback.eventOccurred(event);
|
||||
}
|
||||
if (dontDo)
|
||||
{
|
||||
return report;
|
||||
}
|
||||
copyDirectory(version, srcRoot, dstParent, remote, matcher);
|
||||
remote.createSnapshot(storePath[0], "Deployment", "Post Deployment Snapshot.");
|
||||
if (callback != null)
|
||||
{
|
||||
event = new DeploymentEvent(DeploymentEvent.Type.END,
|
||||
new Pair<Integer, String>(version, srcPath),
|
||||
dstPath);
|
||||
callback.eventOccurred(event);
|
||||
}
|
||||
return report;
|
||||
}
|
||||
if (!dstRoot.isDirectory())
|
||||
{
|
||||
throw new AVMWrongTypeException("Not a Directory: " + dstPath);
|
||||
}
|
||||
// The corresponding directory exists so recursively deploy.
|
||||
try
|
||||
{
|
||||
deployDirectoryPush(version, srcRoot, dstRoot, remote, matcher, dontDelete, dontDo, report, callback);
|
||||
remote.createSnapshot(storePath[0], "Deployment", "Post Deployment Snapshot.");
|
||||
DeploymentReport report = new DeploymentReport();
|
||||
AVMRemote remote = getRemote(hostName, port, userName, password);
|
||||
if (callback != null)
|
||||
{
|
||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.END,
|
||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.START,
|
||||
new Pair<Integer, String>(version, srcPath),
|
||||
dstPath);
|
||||
callback.eventOccurred(event);
|
||||
}
|
||||
return report;
|
||||
}
|
||||
catch (AVMException e)
|
||||
{
|
||||
if (version < 0)
|
||||
{
|
||||
String storeName = srcPath.substring(0, srcPath.indexOf(":"));
|
||||
version = fAVMService.createSnapshot(storeName, null, null).get(storeName);
|
||||
}
|
||||
// Get the root of the deployment from this server.
|
||||
AVMNodeDescriptor srcRoot = fAVMService.lookup(version, srcPath);
|
||||
if (srcRoot == null)
|
||||
{
|
||||
throw new AVMNotFoundException("Directory Not Found: " + srcPath);
|
||||
}
|
||||
if (!srcRoot.isDirectory())
|
||||
{
|
||||
throw new AVMWrongTypeException("Not a directory: " + srcPath);
|
||||
}
|
||||
// Create a snapshot on the destination store.
|
||||
String [] storePath = dstPath.split(":");
|
||||
int snapshot = -1;
|
||||
AVMNodeDescriptor dstParent = null;
|
||||
if (!dontDo)
|
||||
{
|
||||
String[] parentBase = AVMNodeConverter.SplitBase(dstPath);
|
||||
dstParent = remote.lookup(-1, parentBase[0]);
|
||||
if (dstParent == null)
|
||||
{
|
||||
if (createDst)
|
||||
{
|
||||
createDestination(remote, parentBase[0]);
|
||||
dstParent = remote.lookup(-1, parentBase[0]);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new AVMNotFoundException("Node Not Found: " + parentBase[0]);
|
||||
}
|
||||
}
|
||||
snapshot = remote.createSnapshot(storePath[0], "PreDeploy", "Pre Deployment Snapshot").get(storePath[0]);
|
||||
}
|
||||
// Get the root of the deployment on the destination server.
|
||||
AVMNodeDescriptor dstRoot = remote.lookup(-1, dstPath);
|
||||
if (dstRoot == null)
|
||||
{
|
||||
// If it doesn't exist, do a copyDirectory to create it.
|
||||
DeploymentEvent event =
|
||||
new DeploymentEvent(DeploymentEvent.Type.COPIED,
|
||||
new Pair<Integer, String>(version, srcPath),
|
||||
dstPath);
|
||||
report.add(event);
|
||||
if (callback != null)
|
||||
{
|
||||
callback.eventOccurred(event);
|
||||
}
|
||||
if (dontDo)
|
||||
{
|
||||
return report;
|
||||
}
|
||||
copyDirectory(version, srcRoot, dstParent, remote, matcher);
|
||||
remote.createSnapshot(storePath[0], "Deployment", "Post Deployment Snapshot.");
|
||||
if (callback != null)
|
||||
{
|
||||
event = new DeploymentEvent(DeploymentEvent.Type.END,
|
||||
new Pair<Integer, String>(version, srcPath),
|
||||
dstPath);
|
||||
callback.eventOccurred(event);
|
||||
}
|
||||
return report;
|
||||
}
|
||||
if (!dstRoot.isDirectory())
|
||||
{
|
||||
throw new AVMWrongTypeException("Not a Directory: " + dstPath);
|
||||
}
|
||||
// The corresponding directory exists so recursively deploy.
|
||||
try
|
||||
{
|
||||
if (snapshot != -1)
|
||||
deployDirectoryPush(version, srcRoot, dstRoot, remote, matcher, dontDelete, dontDo, report, callback);
|
||||
remote.createSnapshot(storePath[0], "Deployment", "Post Deployment Snapshot.");
|
||||
if (callback != null)
|
||||
{
|
||||
AVMSyncService syncService = getSyncService(hostName, port);
|
||||
List<AVMDifference> diffs = syncService.compare(snapshot, dstPath, -1, dstPath, null);
|
||||
syncService.update(diffs, null, false, false, true, true, "Aborted Deployment", "Aborted Deployment");
|
||||
DeploymentEvent event = new DeploymentEvent(DeploymentEvent.Type.END,
|
||||
new Pair<Integer, String>(version, srcPath),
|
||||
dstPath);
|
||||
callback.eventOccurred(event);
|
||||
}
|
||||
return report;
|
||||
}
|
||||
catch (Exception ee)
|
||||
catch (AVMException e)
|
||||
{
|
||||
throw new AVMException("Failed to rollback to version " + snapshot + " on " + hostName, ee);
|
||||
try
|
||||
{
|
||||
if (snapshot != -1)
|
||||
{
|
||||
AVMSyncService syncService = getSyncService(hostName, port);
|
||||
List<AVMDifference> diffs = syncService.compare(snapshot, dstPath, -1, dstPath, null);
|
||||
syncService.update(diffs, null, false, false, true, true, "Aborted Deployment", "Aborted Deployment");
|
||||
}
|
||||
}
|
||||
catch (Exception ee)
|
||||
{
|
||||
throw new AVMException("Failed to rollback to version " + snapshot + " on " + hostName, ee);
|
||||
}
|
||||
throw new AVMException("Deployment to " + hostName + "failed.", e);
|
||||
}
|
||||
throw new AVMException("Deployment to " + hostName + "failed.", e);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
fTicketHolder.setTicket(null);
|
||||
finally
|
||||
{
|
||||
fTicketHolder.setTicket(null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -937,4 +995,22 @@ public class DeploymentServiceImpl implements DeploymentService
|
||||
{
|
||||
return matcher != null && ((srcPath != null && matcher.matches(srcPath)) || (dstPath != null && matcher.matches(dstPath)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the object to lock for an alfresco->alfresco target.
|
||||
* @param host
|
||||
* @param port
|
||||
* @return
|
||||
*/
|
||||
private synchronized DeploymentDestination getLock(String host, int port)
|
||||
{
|
||||
DeploymentDestination newDest = new DeploymentDestination(host, port);
|
||||
DeploymentDestination dest = fDestinations.get(newDest);
|
||||
if (dest == null)
|
||||
{
|
||||
dest = newDest;
|
||||
fDestinations.put(dest, dest);
|
||||
}
|
||||
return dest;
|
||||
}
|
||||
}
|
||||
|
@@ -236,7 +236,7 @@ public class AVMFullIndexRecoveryComponent extends AbstractReindexComponent
|
||||
}
|
||||
else
|
||||
{
|
||||
return recoveryMode.NONE;
|
||||
return RecoveryMode.NONE;
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -96,13 +96,20 @@ public class AVMRemoteSnapshotTracker extends AbstractReindexComponent
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Updating index for store " + store.getName() + " from snapshot " + lastIndexed + " to " + current);
|
||||
logger.debug("Reindexing snapshots for AVM store " + store.getName() + " from " + lastIndexed + " to " + current);
|
||||
}
|
||||
recoverSnapShot(store.getName(), lastIndexed, current);
|
||||
upToDate = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (upToDate)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Reindex check complete for AVM stores");
|
||||
}
|
||||
}
|
||||
}
|
||||
while (!upToDate);
|
||||
|
||||
|
@@ -274,6 +274,10 @@ found:
|
||||
for (Long toExpireTxnId : toExpireTxnIds)
|
||||
{
|
||||
voids.remove(toExpireTxnId);
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Void has expired: " + toExpireTxnId);
|
||||
}
|
||||
}
|
||||
// Done
|
||||
return fromTimeAdjusted;
|
||||
|
@@ -54,6 +54,7 @@ import org.alfresco.service.cmr.security.PermissionService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.namespace.RegexQNamePattern;
|
||||
import org.alfresco.util.GUID;
|
||||
import org.alfresco.util.ParameterCheck;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
@@ -772,6 +773,9 @@ public class RuleServiceImpl implements RuleService, RuntimeRuleService
|
||||
@SuppressWarnings("unchecked")
|
||||
public void addRulePendingExecution(NodeRef actionableNodeRef, NodeRef actionedUponNodeRef, Rule rule, boolean executeAtEnd)
|
||||
{
|
||||
ParameterCheck.mandatory("actionableNodeRef", actionableNodeRef);
|
||||
ParameterCheck.mandatory("actionedUponNodeRef", actionedUponNodeRef);
|
||||
|
||||
// First check to see if the node has been disabled
|
||||
if (this.isEnabled() == true &&
|
||||
this.disabledNodeRefs.contains(this.getOwningNodeRef(rule)) == false &&
|
||||
|
@@ -83,11 +83,18 @@ public class OpenOfficeConnectionTester extends AbstractLifecycleBean
|
||||
}
|
||||
|
||||
/**
|
||||
* Does nothing.
|
||||
* Disconnect
|
||||
*/
|
||||
@Override
|
||||
protected void onShutdown(ApplicationEvent event)
|
||||
{
|
||||
if(connection != null)
|
||||
{
|
||||
if(connection.isConnected())
|
||||
{
|
||||
connection.disconnect();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
Reference in New Issue
Block a user