Merged V2.1 to HEAD

6958: Fix for WCM-823
   6959: Merged V1.4 to V2.1
      6943: Upgrade scripts for transaction commit time and indexes for QName columns on alf_child_assoc
   6960: Fixed script patch "applied on" date updates.
   6961: Retry transactions on ConstraintViolationException.
   6964: Added svn revision number to be substituted into build string if build number is not passed.
   6965: Daylight savings for FTP. Fix for AR-1776.
   6966: Added catch blocks for the AVMLockingException. WCM-877.
   6967: Interim fix for WCM-866 (large link validation report causes SQL exception)
   6968: Fixes for AWC-1309 "Broken preview image for Web Projects in MySpaces" and similar AWC-1635 "Broken/Missing images in MySpaces Web Script".
   6970: Force DB write ordering of the NodeStatus vs Node object.
   6971: More transaction demarcation fixes for special cases of non-executed script patches.
   6972: Switch off session size management for the mass archive and restore test.
   6973: Fixed AR-1801: Boolean isMultiValued() no longer returns null


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@7370 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley 2007-11-13 00:24:19 +00:00
parent e82c2cd946
commit 0ddb624acf
18 changed files with 195 additions and 27 deletions

View File

@ -530,6 +530,9 @@
<property name="publicAction">
<value>false</value>
</property>
<property name="maxNumberLinksInReport">
<value>500</value>
</property>
</bean>
<!-- Scheduled action helper beans -->

View File

@ -54,6 +54,7 @@
<ref bean="patch.db-V1.4-TxnCommitTimeIndex" />
<ref bean="patch.db-V2.1-FKIndexes" />
<ref bean="patch.db-V2.1-ExplicitIndexes" />
<ref bean="patch.db-V1.4-PerfIndexes02" />
<ref bean="patch.db-V2.1-JBPMData" />
<ref bean="patch.db-V2.1-VersionColumns2" />
<ref bean="patch.db-V2.1-JBPMProcessKey" />

View File

@ -0,0 +1,10 @@
--
-- More post-creation indexes. (Generic Schema 1.4)
--
-- These are not declared in the Hibernate mappings.
--
-- Association QNames
CREATE INDEX idx_ca_type_qname ON alf_child_assoc (type_qname);
CREATE INDEX idx_ca_qname ON alf_child_assoc (qname);
CREATE INDEX idx_na_type_qname ON alf_node_assoc (type_qname);

View File

@ -0,0 +1,20 @@
--
-- Some explicit indexes to improve performance for various use-cases (Generic Schema 1.4)
--
-- Association QNames
CREATE INDEX idx_ca_type_qname ON alf_child_assoc (type_qname);
CREATE INDEX idx_ca_qname ON alf_child_assoc (qname);
CREATE INDEX idx_na_type_qname ON alf_node_assoc (type_qname);
--
-- Record script finish
--
delete from alf_applied_patch where id = 'patch.db-V1.4-PerfIndexes02';
insert into alf_applied_patch
(id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
values
(
'patch.db-V1.4-PerfIndexes02', 'Executed script AlfrescoSchemaUpdate-1.4-PerfIndexes02.sql',
0, 26, -1, 27, null, 'UNKOWN', 1, 1, 'Script completed'
);

View File

@ -0,0 +1,20 @@
--
-- Some explicit indexes to improve performance for various use-cases (PostgreSQL 1.4)
--
-- Association QNames
CREATE INDEX idx_ca_type_qname ON alf_child_assoc (type_qname);
CREATE INDEX idx_ca_qname ON alf_child_assoc (qname);
CREATE INDEX idx_na_type_qname ON alf_node_assoc (type_qname);
--
-- Record script finish
--
delete from alf_applied_patch where id = 'patch.db-V1.4-PerfIndexes02';
insert into alf_applied_patch
(id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
values
(
'patch.db-V1.4-PerfIndexes02', 'Executed script AlfrescoSchemaUpdate-1.4-PerfIndexes02.sql',
0, 26, -1, 27, null, 'UNKOWN', TRUE, TRUE, 'Script completed'
);

View File

@ -0,0 +1,18 @@
--
-- Explicit index for alf_transaction.commit_time_ms (PostgreSQL 1.4)
--
CREATE INDEX idx_commit_time_ms ON alf_transaction (commit_time_ms);
UPDATE alf_transaction SET commit_time_ms = id WHERE commit_time_ms IS NULL;
--
-- Record script finish
--
delete from alf_applied_patch where id = 'patch.db-V1.4-TxnCommitTimeIndex';
insert into alf_applied_patch
(id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
values
(
'patch.db-V1.4-TxnCommitTimeIndex', 'Executed script AlfrescoSchemaUpdate-1.4-TxnCommitTimeIndex.sql',
0, 75, -1, 76, null, 'UNKOWN', TRUE, TRUE, 'Script completed'
);

View File

@ -1093,5 +1093,21 @@
</list>
</property>
</bean>
<bean id="patch.db-V1.4-PerfIndexes02" class="org.alfresco.repo.admin.patch.impl.SchemaUpgradeScriptPatch" parent="basePatch">
<property name="id"><value>patch.db-V1.4-PerfIndexes02</value></property>
<property name="description"><value>patch.schemaUpgradeScript.description</value></property>
<property name="fixesFromSchema"><value>0</value></property>
<property name="fixesToSchema"><value>110</value></property>
<property name="targetSchema"><value>111</value></property>
<property name="scriptUrl">
<value>classpath:alfresco/dbscripts/upgrade/1.4/${db.script.dialect}/AlfrescoSchemaUpdate-1.4-PerfIndexes02.sql</value>
</property>
<!-- dependent on upgrade script 1.4-2 having being run -->
<property name="dependsOn" >
<list>
<ref bean="patch.schemaUpdateScript-V1.4-2" />
</list>
</property>
</bean>
</beans>

View File

@ -72,6 +72,7 @@ import org.alfresco.service.cmr.avm.AVMService;
import org.alfresco.service.cmr.avm.AVMStoreDescriptor;
import org.alfresco.service.cmr.avm.AVMWrongTypeException;
import org.alfresco.service.cmr.avm.VersionDescriptor;
import org.alfresco.service.cmr.avm.locking.AVMLockingException;
import org.alfresco.service.cmr.repository.MimetypeService;
import org.alfresco.service.cmr.security.AuthenticationService;
import org.alfresco.service.namespace.QName;
@ -905,6 +906,10 @@ public class AVMDiskDriver extends AlfrescoDiskDriver implements DiskInterface
{
throw new FileNotFoundException(params.getPath());
}
catch (AVMLockingException ex)
{
throw new AccessDeniedException(params.getPath());
}
// Return the file
@ -1039,6 +1044,10 @@ public class AVMDiskDriver extends AlfrescoDiskDriver implements DiskInterface
{
throw new IOException("Invalid path, " + name);
}
catch (AVMLockingException ex)
{
throw new AccessDeniedException("File locked, " + name);
}
}
/**

View File

@ -127,7 +127,8 @@ public class FTPDate
*/
public final static String packMlstDateTime( long dateTime)
{
return _mlstFormat.format( new Date( dateTime));
long dst = TimeZone.getDefault().getDSTSavings();
return _mlstFormat.format( new Date( dateTime - dst));
}
/**
@ -138,6 +139,7 @@ public class FTPDate
*/
public final static String packMlstDateTimeLong( long dateTime)
{
return _mlstFormatLong.format( new Date( dateTime));
long dst = TimeZone.getDefault().getDSTSavings();
return _mlstFormatLong.format( new Date( dateTime - dst));
}
}

View File

@ -4619,8 +4619,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
if (finfo.hasModifyDateTime()) {
buf.append(_factNames[i]);
buf.append("=");
buf.append(FTPDate.packMlstDateTime(finfo
.getModifyDateTime()));
buf.append(FTPDate.packMlstDateTime(finfo.getModifyDateTime()));
buf.append(";");
}
break;
@ -4631,8 +4630,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
if (finfo.hasCreationDateTime()) {
buf.append(_factNames[i]);
buf.append("=");
buf.append(FTPDate.packMlstDateTime(finfo
.getCreationDateTime()));
buf.append(FTPDate.packMlstDateTime(finfo.getCreationDateTime()));
buf.append(";");
}
break;

View File

@ -57,6 +57,7 @@ public class LinkValidationAction extends ActionExecuterAbstractBase
private LinkValidationService linkValidationService;
private AVMService avmService;
private int maxNumberLinksInReport = -1;
private static Log logger = LogFactory.getLog(LinkValidationAction.class);
@ -80,7 +81,19 @@ public class LinkValidationAction extends ActionExecuterAbstractBase
this.avmService = service;
}
/**
* Sets the maximum number of links to show in a report
*
* @param maxLinks The maximum number of links to store in the report,
* -1 will store all links but this must be used with
* extreme caution as the report is stored as a BLOB
* in the underlying database and these have different
* maximum sizes
*/
public void setMaxNumberLinksInReport(int maxLinks)
{
this.maxNumberLinksInReport = maxLinks;
}
@Override
protected void addParameterDefinitions(List<ParameterDefinition> paramList)
@ -136,12 +149,13 @@ public class LinkValidationAction extends ActionExecuterAbstractBase
{
if (destWebappPath == null)
{
logger.debug("Performing link validation check for webapp '" + webappPath + "'");
logger.debug("Performing link validation check for webapp '" + webappPath + "', storing a maximum of " +
this.maxNumberLinksInReport + " broken links");
}
else
{
logger.debug("Performing link validation check for webapp '" + webappPath + "', comparing against '" +
destWebappPath + "'");
destWebappPath + "', storing a maximum of " + this.maxNumberLinksInReport + " broken links");
}
}
@ -160,16 +174,18 @@ public class LinkValidationAction extends ActionExecuterAbstractBase
// create the report object using the 2 sets of results
report = new LinkValidationReport(storeName, webappName, manifest,
monitor.getFileUpdateCount(), monitor.getUrlUpdateCount());
monitor.getFileUpdateCount(), monitor.getUrlUpdateCount(),
this.maxNumberLinksInReport);
}
else
{
// retrieve the manifest of all the broken links and files for the webapp
HrefManifest manifest = this.linkValidationService.getBrokenHrefManifest(webappPath);
// Create the report object using the link check results
report = new LinkValidationReport(storeName, webappName, manifest,
manifest.getBaseFileCount(), manifest.getBaseLinkCount());
manifest.getBaseFileCount(), manifest.getBaseLinkCount(),
this.maxNumberLinksInReport);
// the monitor object is not used here so manually set
// the done status so the client can retrieve the report.

View File

@ -33,6 +33,8 @@ import java.util.List;
import java.util.Map;
import org.alfresco.util.ParameterCheck;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Object representing the result of a link validation action being executed.
@ -54,7 +56,9 @@ public class LinkValidationReport implements Serializable
private int numberBrokenLinks = -1;
private int baseSnapshotVersion = -1;
private int latestSnapshotVersion = -1;
private int maxNumberLinksInReport = -1;
private boolean successful = true;
private boolean maxLinksReached = false;
private Date completedAt;
private Throwable error;
@ -62,6 +66,7 @@ public class LinkValidationReport implements Serializable
private Map<String, HrefManifestEntry> brokenLinksByFile;
private static final long serialVersionUID = 7562964706845609991L;
private static Log logger = LogFactory.getLog(LinkValidationReport.class);
/**
* Constructs a link validation report from the results of a check of the
@ -72,9 +77,11 @@ public class LinkValidationReport implements Serializable
* @param manifest The manifest of broken links and snapshot info
* @param noFilesChecked The number of files checked
* @param noLinksChecked The number of links checked
* @param maxNumberLinksInReport The maximum number of links to store in
* the report, -1 will store all links passed in the manifest object
*/
public LinkValidationReport(String store, String webapp, HrefManifest manifest,
int noFilesChecked, int noLinksChecked)
int noFilesChecked, int noLinksChecked, int maxNumberLinksInReport)
{
this.store = store;
this.webapp = webapp;
@ -84,6 +91,7 @@ public class LinkValidationReport implements Serializable
this.numberLinksChecked = noLinksChecked;
this.baseSnapshotVersion = manifest.getBaseSnapshotVersion();
this.latestSnapshotVersion = manifest.getLatestSnapshotVersion();
this.maxNumberLinksInReport = maxNumberLinksInReport;
// create list and map
List<HrefManifestEntry> manifests = manifest.getManifestEntries();
@ -147,6 +155,16 @@ public class LinkValidationReport implements Serializable
return this.numberBrokenLinks;
}
public int getMaxNumberLinksInReport()
{
return this.maxNumberLinksInReport;
}
public boolean hasMaxNumberLinksExceeded()
{
return this.maxLinksReached;
}
public List<String> getFilesWithBrokenLinks()
{
return this.brokenFiles;
@ -199,6 +217,8 @@ public class LinkValidationReport implements Serializable
buffer.append(" webapp=").append(this.webapp);
buffer.append(" baseSnapshot=").append(this.baseSnapshotVersion);
buffer.append(" latestSnapshot=").append(this.latestSnapshotVersion);
buffer.append(" maxNumberLinksInReport=").append(this.maxNumberLinksInReport);
buffer.append(" maxLinksReached=").append(this.maxLinksReached);
buffer.append(" error=").append(this.error).append(")");
return buffer.toString();
}
@ -212,6 +232,7 @@ public class LinkValidationReport implements Serializable
{
ParameterCheck.mandatory("manifests", manifests);
// iterate over required amount of links and store them
for (HrefManifestEntry manifest : manifests)
{
String fileName = manifest.getFileName();
@ -219,6 +240,20 @@ public class LinkValidationReport implements Serializable
this.brokenFiles.add(fileName);
this.brokenLinksByFile.put(fileName, manifest);
this.numberBrokenLinks = this.numberBrokenLinks + manifest.getHrefs().size();
// check whether we have exceeded the maximum number
// of links, if we have break out
if (this.maxNumberLinksInReport != -1 &&
(this.numberBrokenLinks > this.maxNumberLinksInReport))
{
if (logger.isWarnEnabled())
logger.warn("Maximum number of links ("+ this.maxNumberLinksInReport +
") for report has been exceeded at file number: " +
this.brokenFiles.size());
this.maxLinksReached = true;
break;
}
}
}
}

View File

@ -80,4 +80,12 @@ public interface PatchDaoService
* a date
*/
public List<AppliedPatch> getAppliedPatches(Date from, Date to);
/**
* Update the patch <i>applied on</i> date.
*
* @param id the patch ID
* @param appliedOnDate the date applied
*/
public void setAppliedOnDate(String id, Date appliedOnDate);
}

View File

@ -116,7 +116,7 @@ public class PatchServiceImpl implements PatchService
// construct a list of executed patches by ID (also check the date)
Map<String, AppliedPatch> appliedPatchesById = new HashMap<String, AppliedPatch>(23);
List<AppliedPatch> appliedPatches = patchDaoService.getAppliedPatches();
for (AppliedPatch appliedPatch : appliedPatches)
for (final AppliedPatch appliedPatch : appliedPatches)
{
appliedPatchesById.put(appliedPatch.getId(), appliedPatch);
// Update the time of execution if it is null. This is to deal with
@ -124,7 +124,16 @@ public class PatchServiceImpl implements PatchService
// an execution time assigned
if (appliedPatch.getAppliedOnDate() == null)
{
appliedPatch.setAppliedOnDate(new Date());
RetryingTransactionCallback<Date> callback = new RetryingTransactionCallback<Date>()
{
public Date execute() throws Throwable
{
Date now = new Date();
patchDaoService.setAppliedOnDate(appliedPatch.getId(), now);
return now;
}
};
transactionService.getRetryingTransactionHelper().doInTransaction(callback, false, true);
}
}

View File

@ -128,4 +128,10 @@ public class HibernatePatchDaoServiceImpl extends HibernateDaoSupport implements
// done
return queryResults;
}
public void setAppliedOnDate(String id, Date appliedOnDate)
{
AppliedPatch patch = (AppliedPatch) getHibernateTemplate().get(AppliedPatchImpl.class, id);
patch.setAppliedOnDate(appliedOnDate);
}
}

View File

@ -163,21 +163,13 @@ public class AVMSubmitPackageHandler
}
else
{
if (desc.isDeletedDirectory())
if (desc.isDeletedDirectory() == false)
{
// lookup the previous child and get its contents
final List<AVMNodeDescriptor> history = fAVMService.getHistory(desc, 2);
if (history.size() == 1)
Map<String, AVMNodeDescriptor> list = fAVMService.getDirectoryListing(desc, true);
for (AVMNodeDescriptor child : list.values())
{
return;
recursivelyRemoveLocks(webProject, version, child.getPath());
}
desc = history.get(1);
}
Map<String, AVMNodeDescriptor> list = fAVMService.getDirectoryListing(desc, true);
for (AVMNodeDescriptor child : list.values())
{
recursivelyRemoveLocks(webProject, version, child.getPath());
}
}
}

View File

@ -35,6 +35,7 @@ import javax.transaction.UserTransaction;
import junit.framework.TestCase;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.domain.hibernate.SessionSizeResourceManager;
import org.alfresco.repo.node.StoreArchiveMap;
import org.alfresco.repo.node.archive.RestoreNodeReport.RestoreStatus;
import org.alfresco.repo.node.integrity.IntegrityChecker;
@ -471,6 +472,8 @@ public class ArchiveAndRestoreTest extends TestCase
*/
public void testArchiveVsDeletePerformance() throws Exception
{
// Disable the in-transaction flushing
SessionSizeResourceManager.setDisableInTransaction();
// Start by deleting the node structure and then recreating it.
// Only measure the delete speed
int iterations = 100;

View File

@ -40,6 +40,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hibernate.StaleObjectStateException;
import org.hibernate.StaleStateException;
import org.hibernate.exception.ConstraintViolationException;
import org.hibernate.exception.LockAcquisitionException;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.dao.DataIntegrityViolationException;
@ -70,6 +71,7 @@ public class RetryingTransactionHelper
StaleObjectStateException.class,
LockAcquisitionException.class,
BatchUpdateException.class,
ConstraintViolationException.class,
DataIntegrityViolationException.class,
StaleStateException.class
};