Merged DEV to HEAD (5.0)

87804: ACE-2030: Retire jbpm-upgrade.sql including patches using it
    The from-to schema values match those of the 'patch.db-V3.2-Upgrade-JBPM' bean and they both referenced the
    same SQL file.  Therefore, it was simply a duplicate with the V3.2 bean being the one doing the actual work.
    This bean is therefore joining the V3.2 version in retirement.  The 'targetSchema' is for information purposes
    only and has been changed from 6001 to 2018, which is what it actually did.
  87805: ACE-2030: Remove more patches introduced for upgrade to V3.2
  87806: ACE-2030: Remove more patches introduced for upgrade to V3.3.x
  87807: Move patch.migrateAttrDropOldTables out of the danger zone (ACE-2030) before continuing clean up.


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@87812 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley
2014-10-11 20:57:42 +00:00
parent 1217371e37
commit 958b298f4a
33 changed files with 131 additions and 3330 deletions

View File

@@ -1,203 +0,0 @@
/*
* Copyright (C) 2005-2010 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.admin.patch.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.alfresco.repo.admin.patch.AbstractPatch;
import org.alfresco.repo.admin.patch.PatchExecuter;
import org.alfresco.service.cmr.security.AuthorityService;
import org.alfresco.service.cmr.security.AuthorityType;
import org.alfresco.service.cmr.site.SiteInfo;
import org.alfresco.service.cmr.site.SiteService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.extensions.surf.util.I18NUtil;
/**
* Patch to assign users and groups to default zones
*
* @author andyh
*/
public class AuthorityDefaultZonesPatch extends AbstractPatch
{
/** Success message. */
private static final String MSG_SUCCESS = "patch.authorityDefaultZonesPatch.result";
private static final String MSG_UPDATE_USERS = "patch.authorityDefaultZonesPatch.users";
private static final String MSG_UPDATE_GROUPS = "patch.authorityDefaultZonesPatch.groups";
private static Log progress_logger = LogFactory.getLog(PatchExecuter.class);
/** The authority service. */
private AuthorityService authorityService;
private SiteService siteService;
/**
* Sets the authority service.
*
* @param authorityService
* the authority service
*/
public void setAuthorityService(AuthorityService authorityService)
{
this.authorityService = authorityService;
}
/**
* Set the site service
*/
public void setSiteService(SiteService siteService)
{
this.siteService = siteService;
}
@Override
protected String applyInternal() throws Exception
{
int count = 0;
int total = authorityService.getAllAuthorities(AuthorityType.USER).size() + authorityService.getAllAuthorities(AuthorityType.GROUP).size();
reportProgress(total, count);
String msg = I18NUtil.getMessage(MSG_UPDATE_USERS);
progress_logger.info(msg);
count = setZonesForPeople(total, count);
msg = I18NUtil.getMessage(MSG_UPDATE_GROUPS);
progress_logger.info(msg);
setZonesForGroups(total, count);
return MSG_SUCCESS;
}
private int setZonesForPeople(int total, int start)
{
Set<String> defaultZones = new HashSet<String>(2, 1.0f);
defaultZones.add(AuthorityService.ZONE_APP_DEFAULT);
defaultZones.add(AuthorityService.ZONE_AUTH_ALFRESCO);
List<Action> personActions = new ArrayList<Action>(1);
personActions.add(new Action(null, defaultZones, ActionType.SET));
return setZones(AuthorityType.USER, personActions, total, start);
}
private int setZonesForGroups(int total, int start)
{
Set<String> defaultZones = new HashSet<String>(2, 1.0f);
defaultZones.add(AuthorityService.ZONE_APP_DEFAULT);
defaultZones.add(AuthorityService.ZONE_AUTH_ALFRESCO);
Set<String> shareZones = new HashSet<String>(2, 1.0f);
shareZones.add(AuthorityService.ZONE_APP_SHARE);
shareZones.add(AuthorityService.ZONE_AUTH_ALFRESCO);
List<SiteInfo> sites = siteService.listSites(null, null);
List<Action> groupActions = new ArrayList<Action>(sites.size() * 5 + 1);
for (SiteInfo site : sites)
{
groupActions.add(new Action("GROUP_site_" + site.getShortName(), shareZones, ActionType.SET));
groupActions.add(new Action("GROUP_site_" + site.getShortName()+"_SiteManager", shareZones, ActionType.SET));
groupActions.add(new Action("GROUP_site_" + site.getShortName()+"_SiteCollaborator", shareZones, ActionType.SET));
groupActions.add(new Action("GROUP_site_" + site.getShortName()+"_SiteContributor", shareZones, ActionType.SET));
groupActions.add(new Action("GROUP_site_" + site.getShortName()+"_SiteConsumer", shareZones, ActionType.SET));
}
groupActions.add(new Action(null, defaultZones, ActionType.SET));
return setZones(AuthorityType.GROUP, groupActions, total, start);
}
private int setZones(AuthorityType authorityType, List<Action> actions, int total, int start)
{
int count = start;
Set<String> authorities = authorityService.getAllAuthorities(authorityType);
for (String authority : authorities)
{
for (Action action : actions)
{
if (action.name != null)
{
if (action.name.equals(authority))
{
fixAuthority(action.actionType, action.zones, authority);
break;
}
}
else
{
fixAuthority(action.actionType, action.zones, authority);
break;
}
}
count++;
reportProgress(total, count);
}
return count;
}
private void fixAuthority(ActionType actionType, Set<String> zones, String authority)
{
Set<String> current;
switch (actionType)
{
case ADD:
authorityService.addAuthorityToZones(authority, zones);
break;
case SET:
current = authorityService.getAuthorityZones(authority);
authorityService.removeAuthorityFromZones(authority, current);
authorityService.addAuthorityToZones(authority, zones);
break;
case SET_IF_UNSET:
current = authorityService.getAuthorityZones(authority);
if (current.size() == 0)
{
authorityService.addAuthorityToZones(authority, zones);
}
break;
}
}
private enum ActionType
{
ADD, SET, SET_IF_UNSET;
}
private static class Action
{
String name;
Set<String> zones;
ActionType actionType;
Action(String name, Set<String> zones, ActionType actionType)
{
this.name = name;
this.zones = zones;
this.actionType = actionType;
}
}
}

View File

@@ -1,477 +0,0 @@
/*
* Copyright (C) 2005-2013 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.admin.patch.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.admin.patch.AbstractPatch;
import org.alfresco.repo.admin.patch.PatchExecuter;
import org.alfresco.repo.batch.BatchProcessor;
import org.alfresco.repo.importer.ImporterBootstrap;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authority.UnknownAuthorityException;
import org.alfresco.repo.tenant.TenantService;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.InvalidNodeRefException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.cmr.rule.RuleService;
import org.alfresco.service.cmr.security.AuthorityService;
import org.alfresco.service.cmr.security.AuthorityType;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.namespace.RegexQNamePattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.extensions.surf.util.I18NUtil;
/**
* Migrates authority information previously stored in the user store to the spaces store, using the new structure used
* by AuthorityService.
*
* @author dward
*/
public class AuthorityMigrationPatch extends AbstractPatch
{
/** The title we give to the batch process in progress messages / JMX. */
private static final String MSG_PROCESS_NAME = "patch.authorityMigration.process.name";
/** The warning message when a 'dangling' assoc is found that can't be created */
private static final String MSG_WARNING_INVALID_ASSOC = "patch.authorityMigration.warning.assoc";
/** Success message. */
private static final String MSG_SUCCESS = "patch.authorityMigration.result";
/** The progress_logger. */
private static Log progress_logger = LogFactory.getLog(PatchExecuter.class);
/** The old authority name property. */
private static final QName PROP_AUTHORITY_NAME = QName.createQName(ContentModel.USER_MODEL_URI, "authorityName");
/** The old authority display name property. */
private static final QName PROP_AUTHORITY_DISPLAY_NAME = QName.createQName(ContentModel.USER_MODEL_URI,
"authorityDisplayName");
/** The old authority members property. */
private static final QName PROP_MEMBERS = QName.createQName(ContentModel.USER_MODEL_URI, "members");
/** The authority service. */
private AuthorityService authorityService;
/** The rule service. */
private RuleService ruleService;
/** The user bootstrap. */
private ImporterBootstrap userBootstrap;
/**
* Sets the authority service.
*
* @param authorityService
* the authority service
*/
public void setAuthorityService(AuthorityService authorityService)
{
this.authorityService = authorityService;
}
/**
* Sets the rule service.
*
* @param ruleService
* the rule service
*/
public void setRuleService(RuleService ruleService)
{
this.ruleService = ruleService;
}
/**
* Sets the user bootstrap.
*
* @param userBootstrap
* the user bootstrap
*/
public void setUserBootstrap(ImporterBootstrap userBootstrap)
{
this.userBootstrap = userBootstrap;
}
/**
* Recursively retrieves the authorities under the given node and their associations.
*
* @param parentAuthority
* the full name of the parent authority corresponding to the given node, or <code>null</code> if it is
* not an authority node.
* @param nodeRef
* the node to find authorities below
* @param authoritiesToCreate
* the authorities to create
* @param parentAssocs
* the parent associations
* @return count of the number of parent associations
*/
private int retrieveAuthorities(String parentAuthority, NodeRef nodeRef, Map<String, String> authoritiesToCreate,
Map<String, Set<String>> parentAssocs)
{
int assocCount = 0;
// Process all children
List<ChildAssociationRef> cars = this.nodeService.getChildAssocs(nodeRef);
for (ChildAssociationRef car : cars)
{
NodeRef current = car.getChildRef();
// Record an authority to create
String authorityName = DefaultTypeConverter.INSTANCE.convert(String.class, this.nodeService.getProperty(
current, AuthorityMigrationPatch.PROP_AUTHORITY_NAME));
authoritiesToCreate.put(authorityName, DefaultTypeConverter.INSTANCE.convert(String.class, this.nodeService
.getProperty(current, AuthorityMigrationPatch.PROP_AUTHORITY_DISPLAY_NAME)));
// Record the parent association (or empty set if this is a root)
Set<String> parents = parentAssocs.get(authorityName);
if (parents == null)
{
parents = new TreeSet<String>();
parentAssocs.put(authorityName, parents);
}
if (parentAuthority != null)
{
parents.add(parentAuthority);
assocCount++;
}
// loop over properties
Collection<String> members = DefaultTypeConverter.INSTANCE.getCollection(String.class, this.nodeService
.getProperty(current, AuthorityMigrationPatch.PROP_MEMBERS));
if (members != null)
{
String tenantDomain = null;
if (tenantAdminService.isEnabled())
{
tenantDomain = tenantAdminService.getCurrentUserDomain();
}
for (String user : members)
{
// Believe it or not, some old authorities have null members in them!
if (user != null)
{
if ((tenantDomain != null) && (! (tenantDomain.equals(TenantService.DEFAULT_DOMAIN))))
{
if (tenantAdminService.getUserDomain(user).equals(TenantService.DEFAULT_DOMAIN))
{
if (user.equals(tenantAdminService.getBaseNameUser(AuthenticationUtil.getAdminUserName())))
{
// MT: workaround for CHK-11393 (eg. EMAIL_CONTRIBUTORS with member "admin" instead of "admin@tenant")
user = tenantAdminService.getDomainUser(user, tenantDomain);
}
}
}
Set<String> propParents = parentAssocs.get(user);
if (propParents == null)
{
propParents = new TreeSet<String>();
parentAssocs.put(user, propParents);
}
propParents.add(authorityName);
assocCount++;
}
}
}
assocCount += retrieveAuthorities(authorityName, current, authoritiesToCreate, parentAssocs);
}
return assocCount;
}
/**
* Truncates authority names so that they are within {@link QName#MAX_LENGTH} characters.
*
* @param authoritiesToCreate
* the original, untruncated authorities to create
* @param parentAssocs
* the original parent associations to create
* @param targetAuthoritiesToCreate
* the authorities to create with names shortened to QName.MAX_LENGTH
* @param targetParentAssocs
* the parent associations modified to match targetAuthoritiesToCreate
*/
private void truncateAuthorities(final Map<String, String> authoritiesToCreate,
Map<String, Set<String>> parentAssocs, Map<String, String> targetAuthoritiesToCreate,
Map<String, Set<String>> targetParentAssocs)
{
// Work through each authority, creating a unique truncated name where necessary and populating a map of old to
// new names
Map<String, String> targetLookup = new TreeMap<String, String>();
for (Map.Entry<String, String> entry : authoritiesToCreate.entrySet())
{
String sourceName = entry.getKey();
int sourceLength = sourceName.length();
String targetName = sourceLength > QName.MAX_LENGTH ? sourceName.substring(0, QName.MAX_LENGTH) : sourceName;
int i=0;
while (targetAuthoritiesToCreate.containsKey(targetName))
{
String suffix = String.valueOf(++i);
int prefixLength = QName.MAX_LENGTH - suffix.length();
if (prefixLength < 0)
{
break;
}
targetName = (sourceLength > prefixLength ? sourceName.substring(0, prefixLength) : sourceName) + suffix;
}
targetLookup.put(sourceName, targetName);
targetAuthoritiesToCreate.put(targetName, entry.getValue());
}
// Apply the name mapping to the parent associations
for (Map.Entry<String, Set<String>> entry: parentAssocs.entrySet())
{
Set<String> parents = new TreeSet<String>();
for (String parent : entry.getValue())
{
String targetParent = targetLookup.get(parent);
parents.add(targetParent == null ? parent : targetParent);
}
String sourceChild = entry.getKey();
String targetChild = targetLookup.get(sourceChild);
targetParentAssocs.put(targetChild == null ? sourceChild : targetChild, parents);
}
}
/**
* Migrates the authorities and their associations.
*
* @param authoritiesToCreate
* the authorities to create
* @param parentAssocs
* the parent associations to create (if they don't exist already)
* @return the number of authorities migrated
*/
private void migrateAuthorities(final Map<String, String> authoritiesToCreate, Map<String, Set<String>> parentAssocs)
{
final String tenantDomain = tenantAdminService.getCurrentUserDomain();
BatchProcessor.BatchProcessWorker<Map.Entry<String, Set<String>>> worker = new BatchProcessor.BatchProcessWorker<Map.Entry<String, Set<String>>>()
{
public String getIdentifier(Entry<String, Set<String>> entry)
{
return entry.getKey();
}
public void beforeProcess() throws Throwable
{
// Disable rules
ruleService.disableRules();
// Authentication
// TODO tenant switch
String tenantSystemUser = tenantAdminService.getDomainUser(AuthenticationUtil.getSystemUserName(), tenantDomain);
AuthenticationUtil.setRunAsUser(tenantSystemUser);
}
public void afterProcess() throws Throwable
{
// Enable rules
ruleService.enableRules();
// Clear authentication
AuthenticationUtil.clearCurrentSecurityContext();
}
public void process(Entry<String, Set<String>> authority) throws Throwable
{
String authorityName = authority.getKey();
boolean existed = AuthorityMigrationPatch.this.authorityService.authorityExists(authorityName);
Set<String> knownParents;
if (existed)
{
knownParents = AuthorityMigrationPatch.this.authorityService.getContainingAuthorities(
AuthorityType.GROUP, authorityName, true);
}
else
{
knownParents = Collections.emptySet();
AuthorityType authorityType = AuthorityType.getAuthorityType(authorityName);
// We have associations to a non-existent authority. If it is a user, just skip it because it must
// have been a 'dangling' reference
if (authorityType == AuthorityType.USER)
{
AuthorityMigrationPatch.progress_logger.warn(I18NUtil.getMessage(
AuthorityMigrationPatch.MSG_WARNING_INVALID_ASSOC, authorityName));
return;
}
AuthorityMigrationPatch.this.authorityService.createAuthority(authorityType,
AuthorityMigrationPatch.this.authorityService.getShortName(authorityName),
authoritiesToCreate.get(authorityName), null);
}
Set<String> parentAssocsToCreate = authority.getValue();
parentAssocsToCreate.removeAll(knownParents);
if (!parentAssocsToCreate.isEmpty())
{
try
{
AuthorityMigrationPatch.this.authorityService.addAuthority(parentAssocsToCreate, authorityName);
}
catch (UnknownAuthorityException e)
{
// Let's force a transaction retry if a parent doesn't exist. It may be because we are
// waiting for another worker thread to create it
throw new ConcurrencyFailureException("Forcing batch retry for unknown authority", e);
}
catch (InvalidNodeRefException e)
{
// Another thread may have written the node, but it is not visible to this transaction
// See: ALF-5471: 'authorityMigration' patch can report 'Node does not exist'
throw new ConcurrencyFailureException("Forcing batch retry for invalid node", e);
}
}
}
};
// Migrate using 2 threads, 20 authorities per transaction. Log every 100 entries.
new BatchProcessor<Map.Entry<String, Set<String>>>(
I18NUtil.getMessage(AuthorityMigrationPatch.MSG_PROCESS_NAME),
transactionHelper,
parentAssocs.entrySet(),
2, 20,
AuthorityMigrationPatch.this.applicationEventPublisher,
AuthorityMigrationPatch.progress_logger, 100).process(worker, true);
}
/**
* Gets the old authority container.
*
* @return the old authority container or <code>null</code> if not found
*/
private NodeRef getAuthorityContainer()
{
NodeRef rootNodeRef = this.nodeService.getRootNode(this.userBootstrap.getStoreRef());
QName qnameAssocSystem = QName.createQName("sys", "system", this.namespaceService);
List<ChildAssociationRef> results = this.nodeService.getChildAssocs(rootNodeRef, RegexQNamePattern.MATCH_ALL,
qnameAssocSystem);
NodeRef sysNodeRef = null;
if (results.size() == 0)
{
return null;
}
else
{
sysNodeRef = results.get(0).getChildRef();
}
QName qnameAssocAuthorities = QName.createQName("sys", "authorities", this.namespaceService);
results = this.nodeService.getChildAssocs(sysNodeRef, RegexQNamePattern.MATCH_ALL, qnameAssocAuthorities);
NodeRef authNodeRef = null;
if (results.size() == 0)
{
return null;
}
else
{
authNodeRef = results.get(0).getChildRef();
}
return authNodeRef;
}
/**
* TODO: The walking of the group associations should be wrapped up in a BatchProcessWorkProvider, if possible
*/
@Override
protected String applyInternal() throws Exception
{
NodeRef authorityContainer = getAuthorityContainer();
int authorities = 0, assocs = 0;
if (authorityContainer != null)
{
// Crawl the old tree of authorities
Map<String, String> authoritiesToCreate = new TreeMap<String, String>();
Map<String, Set<String>> parentAssocs = new TreeMap<String, Set<String>>();
assocs = retrieveAuthorities(null, authorityContainer, authoritiesToCreate, parentAssocs);
// Truncate names to an acceptable length
Map<String, String> targetAuthoritiesToCreate = new TreeMap<String, String>();
Map<String, Set<String>> targetParentAssocs = new TreeMap<String, Set<String>>();
truncateAuthorities(authoritiesToCreate, parentAssocs, targetAuthoritiesToCreate, targetParentAssocs);
// Sort the group associations in parent-first order (root groups first)
Map<String, Set<String>> sortedParentAssocs = new LinkedHashMap<String, Set<String>>(
parentAssocs.size() * 2);
List<String> authorityPath = new ArrayList<String>(5);
for (String authority : targetParentAssocs.keySet())
{
authorityPath.add(authority);
visitGroupAssociations(authorityPath, targetParentAssocs, sortedParentAssocs);
authorityPath.clear();
}
// Recreate the authorities and their associations in parent-first order
migrateAuthorities(targetAuthoritiesToCreate, sortedParentAssocs);
authorities = authoritiesToCreate.size();
}
// build the result message
return I18NUtil.getMessage(AuthorityMigrationPatch.MSG_SUCCESS, authorities, assocs);
}
/**
* Visits the last authority in the given list by recursively visiting its parents in associationsOld and then
* adding the authority to associationsNew. Used to sort associationsOld into 'parent-first' order.
*
* @param authorityPath
* The authority to visit, preceeded by all its descendants. Allows detection of cyclic child
* associations.
* @param associationsOld
* the association map to sort
* @param associationsNew
* the association map to add to in parent-first order
*/
private static void visitGroupAssociations(List<String> authorityPath, Map<String, Set<String>> associationsOld,
Map<String, Set<String>> associationsNew)
{
String authorityName = authorityPath.get(authorityPath.size() - 1);
if (!associationsNew.containsKey(authorityName))
{
Set<String> associations = associationsOld.get(authorityName);
if (!associations.isEmpty())
{
int insertIndex = authorityPath.size();
for (String parentAuthority : associations)
{
// Prevent cyclic paths
if (!authorityPath.contains(parentAuthority))
{
authorityPath.add(parentAuthority);
visitGroupAssociations(authorityPath, associationsOld, associationsNew);
authorityPath.remove(insertIndex);
}
}
}
associationsNew.put(authorityName, associations);
}
}
}

View File

@@ -1,602 +0,0 @@
/*
* Copyright (C) 2005-2010 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.admin.patch.impl;
import java.sql.Savepoint;
import java.util.ArrayList;
import java.util.Collection;
import java.util.concurrent.atomic.AtomicBoolean;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.admin.patch.AbstractPatch;
import org.alfresco.repo.admin.patch.PatchExecuter;
import org.alfresco.repo.admin.registry.RegistryKey;
import org.alfresco.repo.admin.registry.RegistryService;
import org.alfresco.repo.batch.BatchProcessor;
import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorkerAdaptor;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.content.ContentStore.ContentUrlHandler;
import org.alfresco.repo.domain.contentdata.ContentDataDAO;
import org.alfresco.repo.domain.control.ControlDAO;
import org.alfresco.repo.domain.patch.PatchDAO;
import org.alfresco.repo.lock.JobLockService;
import org.alfresco.repo.lock.JobLockService.JobLockRefreshCallback;
import org.alfresco.repo.lock.LockAcquisitionException;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport.TxnReadState;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.Pair;
import org.alfresco.util.PropertyCheck;
import org.alfresco.util.VmShutdownListener;
import org.alfresco.util.VmShutdownListener.VmShutdownException;
import org.apache.commons.lang.mutable.MutableInt;
import org.apache.commons.lang.mutable.MutableLong;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.quartz.Job;
import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.extensions.surf.util.I18NUtil;
/**
* Component to migrate old-style content URL storage (<tt>contentUrl=store://...|mimetype=...</tt>)
* to the newer <b>alf_content_url</b> storage.
* <p/>
* The {@link ServiceRegistry} is used to record progress. The component picks up ranges of node IDs
* (DM) and records the progress. Since new nodes will not need converting, the converter
* will stop once it hits the largest node ID that it found upon first initiation. Once completed,
* the content store reader will start to pick up orphaned content and schedule it for deletion.
* <p/>
* A cluster-wide lock is set so that a single instance of this job will be running per Alfresco
* installation.
*
* @author Derek Hulley
* @since 3.2.1
*/
public class ContentUrlConverterPatch extends AbstractPatch
{
// Registry keys
private static final RegistryKey KEY_ADM_MAX_ID = new RegistryKey(
NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "adm", "max-id");
private static final RegistryKey KEY_ADM_RANGE_START_ID = new RegistryKey(
NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "adm", "range-start-id");
private static final RegistryKey KEY_ADM_DONE = new RegistryKey(
NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "adm", "done");
private static final RegistryKey KEY_STORE_DONE = new RegistryKey(
NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "store", "done");
// Lock key
private static final QName LOCK = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter");
// Lock as per patching
private static Log logger = LogFactory.getLog(PatchExecuter.class);
private RegistryService registryService;
private JobLockService jobLockService;
private PatchDAO patchDAO;
private ControlDAO controlDAO;
private ContentStore contentStore;
private ContentDataDAO contentDataDAO;
private int threadCount;
private int batchSize;
private boolean runAsScheduledJob;
private ThreadLocal<Boolean> runningAsJob = new ThreadLocal<Boolean>();
/**
* Default constructor
*/
public ContentUrlConverterPatch()
{
runningAsJob.set(Boolean.FALSE);
threadCount = 2;
batchSize=500;
}
/**
* Service to record progress for later pick-up
*/
public void setRegistryService(RegistryService registryService)
{
this.registryService = registryService;
}
/**
* Service to prevent concurrent execution
*/
public void setJobLockService(JobLockService jobLockService)
{
this.jobLockService = jobLockService;
}
/**
* Component that provides low-level queries and updates to support this patch
*/
public void setPatchDAO(PatchDAO patchDAO)
{
this.patchDAO = patchDAO;
}
/**
* Component that provides low-level database-specific control to support the patch
*/
public void setControlDAO(ControlDAO controlDAO)
{
this.controlDAO = controlDAO;
}
/**
* Set the store containing the content URLs to lift for potential cleaning.
*
* @param contentStore the store containing the system's content URLs
*/
public void setContentStore(ContentStore contentStore)
{
this.contentStore = contentStore;
}
/**
* Set the component that will write URLs coming from the
* {@link ContentStore#getUrls(ContentUrlHandler) content store}.
*
* @param contentDataDAO the DAO to write the URLs
*/
public void setContentDataDAO(ContentDataDAO contentDataDAO)
{
this.contentDataDAO = contentDataDAO;
}
/**
* Set the number of threads that will be used process the required work.
*
* @param threadCount the number of threads
*/
public void setThreadCount(int threadCount)
{
this.threadCount = threadCount;
}
/**
* Set the number of URLs that are processed per job pass; this property is ignored
* when this component is run as a patch. Keep the number low (500) when running
* at short intervals on a on a live machine.
*
* @param batchSize the number of nodes to process per batch when running on a schedule
*/
public void setBatchSize(int batchSize)
{
this.batchSize = batchSize;
}
/**
* Set whether the patch execution should just bypass any actual work i.e. the admin has
* chosen to manually trigger the work.
*
* @param runAsScheduledJob <tt>true</tt> to leave all work up to the scheduled job
*/
public void setRunAsScheduledJob(boolean runAsScheduledJob)
{
this.runAsScheduledJob = runAsScheduledJob;
}
@Override
protected void checkProperties()
{
PropertyCheck.mandatory(this, "registryService", registryService);
PropertyCheck.mandatory(this, "jobLockService", jobLockService);
PropertyCheck.mandatory(this, "patchDAO", patchDAO);
super.checkProperties();
}
/**
* Method called when executed as a scheduled job.
*/
private void executeViaJob()
{
AuthenticationUtil.RunAsWork<String> patchRunAs = new AuthenticationUtil.RunAsWork<String>()
{
public String doWork() throws Exception
{
RetryingTransactionCallback<String> patchTxn = new RetryingTransactionCallback<String>()
{
public String execute() throws Exception
{
try
{
runningAsJob.set(Boolean.TRUE);
String report = applyInternal();
// done
return report;
}
finally
{
runningAsJob.set(Boolean.FALSE); // Back to default
}
}
};
return transactionHelper.doInTransaction(patchTxn);
}
};
String report = AuthenticationUtil.runAs(patchRunAs, AuthenticationUtil.getSystemUserName());
if (report != null)
{
logger.info(report);
}
}
/**
* Gets a set of work to do and executes it within this transaction. If kicked off via a job,
* the task will exit before completion, on the assumption that it will be kicked off at regular
* intervals. When called as a patch, it will run to completion with full progress logging.
*/
@Override
protected String applyInternal() throws Exception
{
if (AlfrescoTransactionSupport.getTransactionReadState() != TxnReadState.TXN_READ_WRITE)
{
// Nothing to do
return null;
}
boolean isRunningAsJob = runningAsJob.get().booleanValue();
// Do we bug out of patch execution
if (runAsScheduledJob && !isRunningAsJob)
{
return I18NUtil.getMessage("patch.convertContentUrls.bypassingPatch");
}
// Lock in proportion to the batch size (0.1s per node or 0.8 min per 500)
String lockToken = getLock(batchSize*100L);
if (lockToken == null)
{
// Some other process is busy
if (isRunningAsJob)
{
// Fine, we're doing batches
return null;
}
else
{
throw new RuntimeException("Unable to get job lock during patch execution. Only one server should perform the upgrade.");
}
}
// Use a flag to keep track of the running job
final AtomicBoolean running = new AtomicBoolean(true);
jobLockService.refreshLock(lockToken, LOCK, batchSize*100, new JobLockRefreshCallback()
{
@Override
public boolean isActive()
{
return running.get();
}
@Override
public void lockReleased()
{
running.set(false);
}
});
boolean completed = false;
try
{
logger.info(I18NUtil.getMessage("patch.convertContentUrls.start"));
logger.info(I18NUtil.getMessage("patch.convertContentUrls.adm.start"));
boolean admCompleted = applyADMLooping(running);
logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.start", contentStore));
boolean urlLiftingCompleted = applyUrlLifting(running);
completed = admCompleted && urlLiftingCompleted;
}
catch (RuntimeException e)
{
logger.error(
I18NUtil.getMessage("patch.convertContentUrls.error", e.getMessage()),
e);
return I18NUtil.getMessage("patch.convertContentUrls.error", e.getMessage());
}
finally
{
// The lock will self-release if answer isActive in the negative
running.set(false);
}
if (completed)
{
return I18NUtil.getMessage("patch.convertContentUrls.done");
}
else
{
return I18NUtil.getMessage("patch.convertContentUrls.inProgress");
}
}
/**
* Attempts to get the lock. If the lock couldn't be taken, then <tt>null</tt> is returned.
*
* @return Returns the lock token or <tt>null</tt>
*/
private String getLock(long time)
{
try
{
return jobLockService.getLock(LOCK, time);
}
catch (LockAcquisitionException e)
{
return null;
}
}
private boolean applyADMLooping(final AtomicBoolean running)
{
RetryingTransactionCallback<Boolean> callback = new RetryingTransactionCallback<Boolean>()
{
public Boolean execute() throws Throwable
{
return applyADM();
}
};
boolean done = false;
while (running.get())
{
done = transactionHelper.doInTransaction(callback, false, true);
if (done)
{
break;
}
}
return done;
}
/**
* Do the DM conversion work
* @return Returns <tt>true</tt> if the work is done
*/
private boolean applyADM() throws Exception
{
Long maxId = (Long) registryService.getProperty(KEY_ADM_MAX_ID);
// Must we run at all?
Boolean done = (Boolean) registryService.getProperty(KEY_ADM_DONE);
if (done != null && done.booleanValue())
{
logger.info(I18NUtil.getMessage("patch.convertContentUrls.adm.done", maxId));
return true;
}
if (maxId == null)
{
maxId = patchDAO.getMaxAdmNodeID();
registryService.addProperty(KEY_ADM_MAX_ID, maxId);
}
Long startId = (Long) registryService.getProperty(KEY_ADM_RANGE_START_ID);
if (startId == null)
{
startId = 1L;
registryService.addProperty(KEY_ADM_RANGE_START_ID, startId);
}
// Each thread gets 10 executions i.e. we get ranges for threadCount*10 lots of work
Long endId = startId;
Collection<Pair<Long, Long>> batchProcessorWork = new ArrayList<Pair<Long,Long>>(2);
for (long i = 0; i < threadCount*10; i++)
{
endId = startId + (i+1L) * batchSize;
Pair<Long, Long> batchEntry = new Pair<Long, Long>(
startId + i * batchSize,
endId);
batchProcessorWork.add(batchEntry);
}
BatchProcessWorkerAdaptor<Pair<Long, Long>> batchProcessorWorker = new BatchProcessWorkerAdaptor<Pair<Long, Long>>()
{
public void process(Pair<Long, Long> range) throws Throwable
{
Long startId = range.getFirst();
Long endId = range.getSecond();
// Bulk-update the old content properties
patchDAO.updateAdmV31ContentProperties(startId, endId);
}
};
BatchProcessor<Pair<Long, Long>> batchProcessor = new BatchProcessor<Pair<Long, Long>>(
"ContentUrlConverter.ADM (" + maxId + ")",
transactionHelper,
batchProcessorWork, threadCount, 1,
applicationEventPublisher, null, 1);
batchProcessor.process(batchProcessorWorker, true);
if (batchProcessor.getTotalErrors() > 0)
{
// Something went wrong. We don't advance the start range so that the patch re-execution will
// start at the start of the range that failed.
throw AlfrescoRuntimeException.create("patch.convertContentUrls.error", batchProcessor.getLastError());
}
// Advance
startId = endId;
// Have we
if (startId > maxId)
{
startId = maxId + 1;
// We're past the max ID that we're interested in
done = Boolean.TRUE;
registryService.addProperty(KEY_ADM_DONE, done);
logger.info(I18NUtil.getMessage("patch.convertContentUrls.adm.done", maxId));
return true;
}
// Progress
super.reportProgress(maxId, startId);
// Move the start ID on
registryService.addProperty(KEY_ADM_RANGE_START_ID, startId);
// More to do
return false;
}
private boolean applyUrlLifting(final AtomicBoolean running) throws Exception
{
RetryingTransactionCallback<Boolean> callback = new RetryingTransactionCallback<Boolean>()
{
public Boolean execute() throws Throwable
{
return applyUrlLiftingInTxn(running);
}
};
return transactionHelper.doInTransaction(callback, false, true);
}
private boolean applyUrlLiftingInTxn(final AtomicBoolean running) throws Exception
{
// Check the store
if (!contentStore.isWriteSupported())
{
logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.readOnly"));
return true;
}
Boolean admDone = (Boolean) registryService.getProperty(KEY_ADM_DONE);
if ((admDone == null || !admDone.booleanValue()))
{
logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.pending"));
return false;
}
// Must we run at all?
Boolean done = (Boolean) registryService.getProperty(KEY_STORE_DONE);
if (done != null && done.booleanValue())
{
logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.done"));
return true;
}
final long totalSize = contentStore.getSpaceTotal();
final MutableLong currentSize = new MutableLong(0L);
final MutableInt count = new MutableInt();
count.setValue(0);
ContentUrlHandler handler = new ContentUrlHandler()
{
private int allCount = 0;
public void handle(String contentUrl)
{
if (!running.get())
{
// Either VM shutdown or lock release. Either way, bug out.
throw new VmShutdownListener.VmShutdownException();
}
ContentReader reader = contentStore.getReader(contentUrl);
if (!reader.exists())
{
// Not there any more
return;
}
currentSize.setValue(currentSize.longValue() + reader.getSize());
// Create a savepoint
String savepointName = new Long(System.nanoTime()).toString();
Savepoint savepoint = controlDAO.createSavepoint(savepointName);
try
{
contentDataDAO.createContentUrlOrphaned(contentUrl, null);
controlDAO.releaseSavepoint(savepoint);
count.setValue(count.intValue()+1);
}
catch (DataIntegrityViolationException e)
{
// That's OK, the URL was already managed
controlDAO.rollbackToSavepoint(savepoint);
}
allCount++;
if (allCount % batchSize == 0)
{
if (totalSize < 0)
{
// Report
logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.progress", allCount));
}
else
{
ContentUrlConverterPatch.super.reportProgress(totalSize, currentSize.longValue());
}
}
}
};
try
{
contentStore.getUrls(handler);
}
catch (UnsupportedOperationException e)
{
logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.noSupport"));
}
catch (VmShutdownException e)
{
// We didn't manage to complete
return false;
}
// Record the completion
done = Boolean.TRUE;
registryService.addProperty(KEY_STORE_DONE, done);
// Done
logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.scheduled", count.intValue(), contentStore));
return true;
}
/**
* Job to initiate the {@link ContentUrlConverterPatch}
*
* @author Derek Hulley
* @since 3.2.1
*/
public static class ContentUrlConverterJob implements Job
{
public ContentUrlConverterJob()
{
}
/**
* Calls the cleaner to do its work
*/
public void execute(JobExecutionContext context) throws JobExecutionException
{
JobDataMap jobData = context.getJobDetail().getJobDataMap();
// extract the content cleaner to use
Object contentUrlConverterObj = jobData.get("contentUrlConverter");
if (contentUrlConverterObj == null || !(contentUrlConverterObj instanceof ContentUrlConverterPatch))
{
throw new AlfrescoRuntimeException(
"'contentUrlConverter' data must contain valid 'ContentUrlConverter' reference");
}
ContentUrlConverterPatch contentUrlConverter = (ContentUrlConverterPatch) contentUrlConverterObj;
contentUrlConverter.executeViaJob();
}
}
}

View File

@@ -1,298 +0,0 @@
/*
* Copyright (C) 2005-2010 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.admin.patch.impl;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.alfresco.model.ApplicationModel;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.admin.patch.AbstractPatch;
import org.alfresco.repo.importer.ImporterBootstrap;
import org.alfresco.service.cmr.admin.PatchException;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.namespace.QName;
import org.springframework.context.MessageSource;
import org.springframework.extensions.surf.util.I18NUtil;
/**
* Ensures that the <b>invite email templates</b> and <b>notify email templates</b> folders are present.
* <p>
* This uses the bootstrap importer to get the paths to look for. If not present,
* the required structures are created.
* <p>
*
* @author valerysh
*
*/
public class EmailTemplatesInviteAndNotifyFoldersPatch extends AbstractPatch {
public static final String PROPERTY_COMPANY_HOME_CHILDNAME = "spaces.company_home.childname";
public static final String PROPERTY_DICTIONARY_CHILDNAME = "spaces.dictionary.childname";
public static final String PROPERTY_EMAIL_TEMPLATES_FOLDER_CHILDNAME = "spaces.templates.email.childname";
public static final String PROPERTY_EMAIL_NOTIFY_TEMPLATES_FOLDER_CHILDNAME = "spaces.templates.email.notify.childname";
public static final String PROPERTY_EMAIL_INVITE_TEMPLATES_FOLDER_CHILDNAME = "spaces.templates.email.invite1.childname";
private static final String PROPERTY_EMAIL_NOTIFY_TEMPLATES_FOLDER_NAME = "spaces.notify_templates.email.name";
private static final String PROPERTY_EMAIL_NOTIFY_TEMPLATES_FOLDER_DESCRIPTION = "spaces.notify_templates.email.description";
private static final String PROPERTY_EMAIL_INVITE_TEMPLATES_FOLDER_NAME = "spaces.invite_templates.email.name";
private static final String PROPERTY_EMAIL_INVITE_TEMPLATES_FOLDER_DESCRIPTION = "spaces.invite_templates.email.description";
private static final String SAMPLE_NOTIFY_TEMPLATE_NAME = "notify_user_email.ftl.sample";
private static final String INVITE_TEMPLATE_NAME = "invite_user_email.ftl";
private static final String MSG_EMAIL_INVITE_TEMPLATES_FOLDER_EXISTS = "patch.emailInviteTemplatesFolder.result.exists";
private static final String MSG_EMAIL_INVITE_TEMPLATES_FOLDER_CREATED = "patch.emailInviteTemplatesFolder.result.created";
private static final String MSG_EMAIL_NOTIFY_TEMPLATES_FOLDER_EXISTS = "patch.emailNotifyTemplatesFolder.result.exists";
private static final String MSG_EMAIL_NOTIFY_TEMPLATES_FOLDER_CREATED = "patch.emailNotifyTemplatesFolder.result.created";
private static final String PROPERTY_ICON = "space-icon-default";
private ImporterBootstrap importerBootstrap;
private MessageSource messageSource;
protected NodeRef emailNotifyTemplatesFolderNodeRef;
protected NodeRef emailInviteTemplatesFolderNodeRef;
protected Properties configuration;
protected NodeRef emailTemplatesFolderNodeRef;
private String emailTemplatesFolderXPath;
public void setImporterBootstrap(ImporterBootstrap importerBootstrap)
{
this.importerBootstrap = importerBootstrap;
}
public void setMessageSource(MessageSource messageSource)
{
this.messageSource = messageSource;
}
/**
* Ensure that required common properties have been set
*/
protected void checkCommonProperties() throws Exception
{
checkPropertyNotNull(importerBootstrap, "importerBootstrap");
checkPropertyNotNull(messageSource, "messageSource");
}
/**
* Extracts pertinent references and properties that are common to execution
* of this and derived patches.
*/
protected void setUp() throws Exception
{
// get the node store that we must work against
StoreRef storeRef = importerBootstrap.getStoreRef();
if (storeRef == null)
{
throw new PatchException("Bootstrap store has not been set");
}
NodeRef storeRootNodeRef = nodeService.getRootNode(storeRef);
this.configuration = importerBootstrap.getConfiguration();
// get the association names that form the path
String companyHomeChildName = configuration.getProperty(PROPERTY_COMPANY_HOME_CHILDNAME);
if (companyHomeChildName == null || companyHomeChildName.length() == 0)
{
throw new PatchException("Bootstrap property '" + PROPERTY_COMPANY_HOME_CHILDNAME + "' is not present");
}
String dictionaryChildName = configuration.getProperty(PROPERTY_DICTIONARY_CHILDNAME);
if (dictionaryChildName == null || dictionaryChildName.length() == 0)
{
throw new PatchException("Bootstrap property '" + PROPERTY_DICTIONARY_CHILDNAME + "' is not present");
}
String emailTemplatesChildName = configuration.getProperty(PROPERTY_EMAIL_TEMPLATES_FOLDER_CHILDNAME);
if (emailTemplatesChildName == null || emailTemplatesChildName.length() == 0)
{
throw new PatchException("Bootstrap property '" + PROPERTY_EMAIL_TEMPLATES_FOLDER_CHILDNAME + "' is not present");
}
String emailNotifyTemplatesChildName = configuration.getProperty(PROPERTY_EMAIL_NOTIFY_TEMPLATES_FOLDER_CHILDNAME);
if (emailNotifyTemplatesChildName == null || emailNotifyTemplatesChildName.length() == 0)
{
throw new PatchException("Bootstrap property '" + PROPERTY_EMAIL_NOTIFY_TEMPLATES_FOLDER_CHILDNAME + "' is not present");
}
String emailInviteTemplatesChildName = configuration.getProperty(PROPERTY_EMAIL_INVITE_TEMPLATES_FOLDER_CHILDNAME);
if (emailInviteTemplatesChildName == null || emailInviteTemplatesChildName.length() == 0)
{
throw new PatchException("Bootstrap property '" + PROPERTY_EMAIL_INVITE_TEMPLATES_FOLDER_CHILDNAME + "' is not present");
}
StringBuilder sb = new StringBuilder();
sb.append("/").append(companyHomeChildName)
.append("/").append(dictionaryChildName)
.append("/").append(emailTemplatesChildName);
emailTemplatesFolderXPath = sb.toString();
// get the email templates node
List<NodeRef> nodeRefs = searchService.selectNodes(storeRootNodeRef, emailTemplatesFolderXPath, null, namespaceService, false);
if (nodeRefs.size() == 0)
{
throw new PatchException("XPath didn't return any results: \n" +
" root: " + storeRootNodeRef + "\n" +
" xpath: " + emailTemplatesFolderXPath);
}
else if (nodeRefs.size() > 1)
{
throw new PatchException("XPath returned too many results: \n" +
" root: " + storeRootNodeRef + "\n" +
" xpath: " + emailTemplatesFolderXPath + "\n" +
" results: " + nodeRefs);
}
this.emailTemplatesFolderNodeRef = nodeRefs.get(0);
emailNotifyTemplatesFolderNodeRef = searchFolder(emailNotifyTemplatesChildName);
emailInviteTemplatesFolderNodeRef = searchFolder(emailInviteTemplatesChildName);
}
@Override
protected String applyInternal() throws Exception
{
// common properties must be set before we can continue
checkCommonProperties();
if (messageSource == null)
{
throw new PatchException("'messageSource' property has not been set");
}
setUp();
// create the folder if needed - output a message to describe the result
StringBuffer msg = new StringBuffer();
if (emailNotifyTemplatesFolderNodeRef == null)
{
emailNotifyTemplatesFolderNodeRef = createFolderAndMoveTemplate(PROPERTY_EMAIL_NOTIFY_TEMPLATES_FOLDER_CHILDNAME,
PROPERTY_EMAIL_NOTIFY_TEMPLATES_FOLDER_NAME,
PROPERTY_EMAIL_NOTIFY_TEMPLATES_FOLDER_DESCRIPTION,
SAMPLE_NOTIFY_TEMPLATE_NAME);
msg.append(I18NUtil.getMessage(MSG_EMAIL_NOTIFY_TEMPLATES_FOLDER_CREATED, emailNotifyTemplatesFolderNodeRef));
}
else
{
msg.append(I18NUtil.getMessage(MSG_EMAIL_NOTIFY_TEMPLATES_FOLDER_EXISTS, emailNotifyTemplatesFolderNodeRef));
}
msg.append("; ");
if (emailInviteTemplatesFolderNodeRef == null)
{
emailInviteTemplatesFolderNodeRef = createFolderAndMoveTemplate(PROPERTY_EMAIL_INVITE_TEMPLATES_FOLDER_CHILDNAME,
PROPERTY_EMAIL_INVITE_TEMPLATES_FOLDER_NAME,
PROPERTY_EMAIL_INVITE_TEMPLATES_FOLDER_DESCRIPTION,
INVITE_TEMPLATE_NAME);
msg.append(I18NUtil.getMessage(MSG_EMAIL_INVITE_TEMPLATES_FOLDER_CREATED, emailNotifyTemplatesFolderNodeRef));
}
else
{
msg.append(I18NUtil.getMessage(MSG_EMAIL_INVITE_TEMPLATES_FOLDER_EXISTS, emailNotifyTemplatesFolderNodeRef));
}
return msg.toString();
}
private NodeRef searchFolder(String xpath)
{
List<NodeRef> nodeRefs = searchService.selectNodes(emailTemplatesFolderNodeRef, xpath, null, namespaceService, false);
if (nodeRefs.size() > 1)
{
throw new PatchException("XPath returned too many results: \n" +
" email templates node: " + emailTemplatesFolderNodeRef + "\n" +
" xpath: " + xpath + "\n" +
" results: " + nodeRefs);
}
else if (nodeRefs.size() == 0)
{
// the node does not exist
return null;
}
else
{
return nodeRefs.get(0);
}
}
private NodeRef createFolderAndMoveTemplate(String folderChildName, String folderName, String folderDescription, String templateName)
{
// get required properties
String emailTemplatesChildName = configuration.getProperty(folderChildName);
if (emailTemplatesChildName == null)
{
throw new PatchException("Bootstrap property '" + folderChildName + "' is not present");
}
String emailTemplatesName = messageSource.getMessage(
folderName,
null,
I18NUtil.getLocale());
if (emailTemplatesName == null || emailTemplatesName.length() == 0)
{
throw new PatchException("Bootstrap property '" + folderName + "' is not present");
}
String emailTemplatesDescription = messageSource.getMessage(
folderDescription,
null,
I18NUtil.getLocale());
if (emailTemplatesDescription == null || emailTemplatesDescription.length() == 0)
{
throw new PatchException("Bootstrap property '" + folderDescription + "' is not present");
}
Map<QName, Serializable> properties = new HashMap<QName, Serializable>(7);
properties.put(ContentModel.PROP_NAME, emailTemplatesName);
properties.put(ContentModel.PROP_TITLE, emailTemplatesName);
properties.put(ContentModel.PROP_DESCRIPTION, emailTemplatesDescription);
properties.put(ApplicationModel.PROP_ICON, PROPERTY_ICON);
// create the node
ChildAssociationRef childAssocRef = nodeService.createNode(
emailTemplatesFolderNodeRef,
ContentModel.ASSOC_CONTAINS,
QName.resolveToQName(namespaceService, emailTemplatesChildName),
ContentModel.TYPE_FOLDER,
properties);
NodeRef createdFolderNodeRef = childAssocRef.getChildRef();
// add the required aspects
nodeService.addAspect(createdFolderNodeRef, ApplicationModel.ASPECT_UIFACETS, null);
//move template
String xpath = emailTemplatesFolderXPath + "/cm:" + templateName;
List<NodeRef> templateNodeRefs = searchService.selectNodes(emailTemplatesFolderNodeRef, xpath, null, namespaceService, false);
for (NodeRef templateNodeRef : templateNodeRefs)
{
QName qname = nodeService.getPrimaryParent(templateNodeRef).getQName();
nodeService.moveNode(
templateNodeRef,
createdFolderNodeRef,
ContentModel.ASSOC_CHILDREN,
qname);
}
return createdFolderNodeRef;
}
}

View File

@@ -1,384 +0,0 @@
/*
* Copyright (C) 2005-2010 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.admin.patch.impl;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.sql.Savepoint;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.alfresco.error.StackTraceUtil;
import org.alfresco.repo.admin.patch.AbstractPatch;
import org.alfresco.repo.admin.patch.PatchExecuter;
import org.alfresco.repo.batch.BatchProcessWorkProvider;
import org.alfresco.repo.batch.BatchProcessor;
import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorker;
import org.alfresco.repo.domain.control.ControlDAO;
import org.alfresco.repo.domain.node.ChildAssocEntity;
import org.alfresco.repo.domain.patch.PatchDAO;
import org.alfresco.repo.domain.qname.QNameDAO;
import org.alfresco.service.cmr.dictionary.AssociationDefinition;
import org.alfresco.service.cmr.dictionary.ChildAssociationDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryException;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.TempFileProvider;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.extensions.surf.util.I18NUtil;
/**
* Fixes <a href=https://issues.alfresco.com/jira/browse/ETWOTWO-1133>ETWOTWO-1133</a>.
* Checks all CRC values for <b>alf_child_assoc.child_node_name_crc and alf_child_assoc.qname_crc</b>.
*
* @author Derek Hulley
* @since V2.2SP4
*/
public class FixNameCrcValuesPatch extends AbstractPatch
{
private static final String MSG_SUCCESS = "patch.fixNameCrcValues.result";
private static final String MSG_REWRITTEN = "patch.fixNameCrcValues.fixed";
private static final String MSG_UNABLE_TO_CHANGE = "patch.fixNameCrcValues.unableToChange";
private static final String MSG_FIXING_LOCALNAME = "patch.fixNameCrcValues.fixingLocalname";
private static final String ERR_ASSOCIATION_TYPE_NOT_DEFINED = "patch.fixNameCrcValues.associationTypeNotDefined";
private static final String ERR_ASSOCIATION_TYPE_NOT_CHILD = "patch.fixNameCrcValues.associationTypeNotChild";
private PatchDAO patchDAO;
private QNameDAO qnameDAO;
private ControlDAO controlDAO;
private DictionaryService dictionaryService;
private int batchThreads = 2;
private int batchSize = 1000;
private long batchMaxQueryRange = Long.MAX_VALUE;
private int batchQuerySize = 2000;
private static Log logger = LogFactory.getLog(FixNameCrcValuesPatch.class);
private static Log progress_logger = LogFactory.getLog(PatchExecuter.class);
public FixNameCrcValuesPatch()
{
}
public void setPatchDAO(PatchDAO patchDAO)
{
this.patchDAO = patchDAO;
}
/**
* @param qnameDAO resolved QNames
*/
public void setQnameDAO(QNameDAO qnameDAO)
{
this.qnameDAO = qnameDAO;
}
/**
* @param controlDAO used to create Savepoints
*/
public void setControlDAO(ControlDAO controlDAO)
{
this.controlDAO = controlDAO;
}
/**
* @param dictionaryService used to check the child associations for unique checking
*/
public void setDictionaryService(DictionaryService dictionaryService)
{
this.dictionaryService = dictionaryService;
}
/**
* @param batchThreads the number of threads that will write child association changes
*/
public void setBatchThreads(int batchThreads)
{
this.batchThreads = batchThreads;
}
/**
* @param batchSize the number of child associations that will be modified per transaction
*/
public void setBatchSize(int batchSize)
{
this.batchSize = batchSize;
}
/**
* @param batchMaxQueryRange the largest ID range that the work provider can query for.
* Lower this if the distribution of ID in alf_child_assoc is not
* uniform and memory problems are encountered.
*/
public void setBatchMaxQueryRange(long batchMaxQueryRange)
{
this.batchMaxQueryRange = batchMaxQueryRange;
}
/**
* @param batchQuerySize the maximum number of results to pull back before handing off to
* the threads (usually threads * batch size)
*/
public void setBatchQuerySize(int batchQuerySize)
{
this.batchQuerySize = batchQuerySize;
}
@Override
protected void checkProperties()
{
super.checkProperties();
checkPropertyNotNull(patchDAO, "patchDAO");
checkPropertyNotNull(qnameDAO, "qnameDAO");
checkPropertyNotNull(controlDAO, "controlDAO");
checkPropertyNotNull(dictionaryService, "dictionaryService");
checkPropertyNotNull(applicationEventPublisher, "applicationEventPublisher");
}
@Override
protected String applyInternal() throws Exception
{
// initialise the helper
FixNameCrcValuesHelper helper = new FixNameCrcValuesHelper();
try
{
String msg = helper.fixCrcValues();
// done
return msg;
}
finally
{
helper.closeWriter();
}
}
private class FixNameCrcValuesHelper
{
private File logFile;
private FileChannel channel;
private Integer assocCount;
private Long minAssocId = 0L;
private Long maxAssocId;
private FixNameCrcValuesHelper() throws IOException
{
// put the log file into a long life temp directory
File tempDir = TempFileProvider.getLongLifeTempDir("patches");
logFile = new File(tempDir, "FixNameCrcValuesPatch.log");
// open the file for appending
RandomAccessFile outputFile = new RandomAccessFile(logFile, "rw");
channel = outputFile.getChannel();
// move to the end of the file
channel.position(channel.size());
// add a newline and it's ready
writeLine("").writeLine("");
writeLine("FixNameCrcValuesPatch executing on " + new Date());
}
private FixNameCrcValuesHelper write(Object obj) throws IOException
{
channel.write(ByteBuffer.wrap(obj.toString().getBytes("UTF-8")));
return this;
}
private FixNameCrcValuesHelper writeLine(Object obj) throws IOException
{
write(obj);
write("\n");
return this;
}
private void closeWriter()
{
try { channel.close(); } catch (Throwable e) {}
}
public String fixCrcValues() throws Exception
{
BatchProcessWorkProvider<Map<String, Object>> workProvider = new BatchProcessWorkProvider<Map<String,Object>>()
{
public synchronized int getTotalEstimatedWorkSize()
{
if (assocCount == null)
{
assocCount = patchDAO.getChildAssocCount();
}
return assocCount.intValue();
}
public synchronized Collection<Map<String, Object>> getNextWork()
{
if (maxAssocId == null)
{
maxAssocId = patchDAO.getMaxChildAssocId();
}
double total = (double) getTotalEstimatedWorkSize();
long rangeMultipler = Math.round(maxAssocId.doubleValue() / total);
// Get the next collection
List<Map<String, Object>> results = patchDAO.getChildAssocsForCrcFix(
minAssocId, maxAssocId, rangeMultipler, batchMaxQueryRange, batchQuerySize);
// Find out what the last ID is
int resultsSize = results.size();
if (resultsSize > 0)
{
Map<String, Object> lastResult = results.get(resultsSize - 1);
Long id = (Long) lastResult.get("id");
minAssocId = id + 1L;
}
// Hand back the results
return results;
}
};
// get the association types to check
BatchProcessor<Map<String, Object>> batchProcessor = new BatchProcessor<Map<String, Object>>(
"FixNameCrcValuesPatch",
transactionHelper,
workProvider,
batchThreads, batchSize,
applicationEventPublisher,
progress_logger, 1000);
BatchProcessWorker<Map<String, Object>> worker = new BatchProcessWorker<Map<String, Object>>()
{
public String getIdentifier(Map<String, Object> entry)
{
return entry.toString();
}
public void beforeProcess() throws Throwable
{
}
public void process(Map<String, Object> row) throws Throwable
{
Long assocId = (Long) row.get("id");
Long typeQNameId = (Long) row.get("typeQNameId");
Long qnameNamespaceId = (Long) row.get("qnameNamespaceId");
String qnameLocalName = (String) row.get("qnameLocalName");
Long childNodeNameCrc = (Long) row.get("childNodeNameCrc");
Long qnameCrc = (Long) row.get("qnameCrc");
String childNodeUuid = (String) row.get("childNodeUuid");
String childNodeName = (String) row.get("childNodeName");
// Use the UUID if there is no cm:name
childNodeName = (childNodeName == null) ? childNodeUuid : childNodeName;
// Ensure that we generate a valid QName (see comments on ALF-4529)
if (qnameLocalName == null || qnameLocalName.length() == 0)
{
String qnameLocalNameNew = "fix-" + assocId;
logger.warn(
I18NUtil.getMessage(MSG_FIXING_LOCALNAME, assocId, qnameLocalName, qnameLocalNameNew));
qnameLocalName = qnameLocalNameNew;
}
// Resolve QNames
QName typeQName = qnameDAO.getQName(typeQNameId).getSecond();
String namespace = qnameDAO.getNamespace(qnameNamespaceId).getSecond();
QName qname = QName.createQName(namespace, qnameLocalName);
ChildAssocEntity entity = new ChildAssocEntity();
entity.setChildNodeNameAll(dictionaryService, typeQName, childNodeName);
entity.setQNameAll(qnameDAO, qname, false);
Long childNodeNameCrcNew = entity.getChildNodeNameCrc();
Long qnameCrcNew = entity.getQnameCrc();
entity = null; // Just checking that we don't misuse it
AssociationDefinition assocDef = dictionaryService.getAssociation(typeQName);
if (assocDef == null)
{
throw new DictionaryException(ERR_ASSOCIATION_TYPE_NOT_DEFINED, typeQName, assocId);
}
else if (!assocDef.isChild())
{
throw new DictionaryException(ERR_ASSOCIATION_TYPE_NOT_CHILD, typeQName, assocId);
}
ChildAssociationDefinition childAssocDef = (ChildAssociationDefinition) assocDef;
boolean requiresNameConstraint = !childAssocDef.getDuplicateChildNamesAllowed();
// Check the CRC for the QName
if (qnameCrcNew.equals(qnameCrc))
{
// Check the CRC values for cm:name
// - value might have stayed the same
// - Any existing name crc negative value is fine if the name constraint need not be enforced
if (childNodeNameCrcNew.equals(childNodeNameCrc) || (childNodeNameCrc < 0 && !requiresNameConstraint))
{
// This child assoc is good
return;
}
}
Savepoint savepoint = null;
try
{
// Being here indicates that the association needs to be updated
savepoint = controlDAO.createSavepoint("FixNameCrcValuesPatch");
patchDAO.updateChildAssocCrc(assocId, childNodeNameCrcNew, qnameCrcNew);
controlDAO.releaseSavepoint(savepoint);
String msg = I18NUtil.getMessage(
MSG_REWRITTEN,
assocId,
childNodeName, childNodeNameCrc, childNodeNameCrcNew,
qname, qnameCrc, qnameCrcNew);
writeLine(msg);
}
catch (Throwable e)
{
if (savepoint != null)
{
controlDAO.rollbackToSavepoint(savepoint);
}
String msg = I18NUtil.getMessage(
MSG_UNABLE_TO_CHANGE,
assocId,
childNodeName, childNodeNameCrc, childNodeNameCrcNew,
qname, qnameCrc, qnameCrcNew,
e.getMessage());
// We just log this and add details to the message file
if (logger.isDebugEnabled())
{
logger.debug(msg, e);
}
else
{
logger.warn(msg);
}
StringBuilder sb = new StringBuilder(1024);
StackTraceUtil.buildStackTrace(msg, e.getStackTrace(), sb, 0);
writeLine(sb.toString());
}
}
public void afterProcess() throws Throwable
{
}
};
int updated = batchProcessor.process(worker, true);
String msg = I18NUtil.getMessage(MSG_SUCCESS, updated, logFile);
return msg;
}
}
}

View File

@@ -1,73 +0,0 @@
/*
* Copyright (C) 2005-2010 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.admin.patch.impl;
import org.alfresco.repo.admin.patch.AbstractPatch;
import org.alfresco.repo.domain.patch.PatchDAO;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.extensions.surf.util.I18NUtil;
/**
* Patch to add person usage ('cm:sizeCurrent') property to person (if missing)
*
* @author janv
*/
public class PersonUsagePatch extends AbstractPatch
{
private static Log logger = LogFactory.getLog(PersonUsagePatch.class);
/** Success messages. */
private static final String MSG_SUCCESS1 = "patch.personUsagePatch.result1";
private static final String MSG_SUCCESS2 = "patch.personUsagePatch.result2";
private PatchDAO patchDAO;
public void setPatchDAO(PatchDAO patchDAO)
{
this.patchDAO = patchDAO;
}
@Override
protected String applyInternal() throws Exception
{
logger.info("Checking for people with missing 'cm:sizeCurrent' property ...");
int count = addPersonSizeCurrentProperty();
String msg = null;
if (count > 0)
{
logger.info("... missing 'cm:sizeCurrent' property added to "+count+" people");
msg = I18NUtil.getMessage(MSG_SUCCESS1, count);
}
else
{
logger.info("... no people were missing the 'cm:sizeCurrent' property");
msg = I18NUtil.getMessage(MSG_SUCCESS2);
}
return msg;
}
private int addPersonSizeCurrentProperty()
{
return patchDAO.addSizeCurrentProp();
}
}