ACE-1447: Added 'cm:indexControl' aspect to surf-config folder and its children so they wouldn’t be indexed. Also added an asynchronous patch.

git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@72733 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Jamal Kaabi-Mofrad
2014-06-02 04:03:57 +00:00
parent 33e6e44d4a
commit 5a021b2565
16 changed files with 1113 additions and 45 deletions

View File

@@ -77,6 +77,12 @@
<result property="aclType" column="acl_type" jdbcType="BIGINT" javaType="java.lang.Long"/>
</resultMap>
<resultMap id="result_nodeNameUuidMap" type="java.util.HashMap">
<result property="uuid" column="uuid" jdbcType="VARCHAR" javaType="java.lang.String"/>
<result property="protocol" column="protocol" jdbcType="VARCHAR" javaType="java.lang.String"/>
<result property="identifier" column="identifier" jdbcType="VARCHAR" javaType="java.lang.String"/>
</resultMap>
<!-- -->
<!-- Parameter Maps -->
<!-- -->
@@ -128,6 +134,11 @@
<parameter property="maxNodeId" jdbcType="BIGINT" javaType="java.lang.Long"/>
</parameterMap>
<parameterMap id="parameter_minMaxNodeId" type="map">
<parameter property="minNodeId" jdbcType="BIGINT" javaType="java.lang.Long"/>
<parameter property="maxNodeId" jdbcType="BIGINT" javaType="java.lang.Long"/>
</parameterMap>
<!-- -->
<!-- Selects -->
<!-- -->
@@ -541,6 +552,70 @@
and np.node_id &lt; #{maxNodeId}
</select>
<select id="select_CountNodesWithTypeId" parameterType="Ids" resultType="java.lang.Long" >
select
count(*)
from
alf_node n
where
n.type_qname_id = #{idOne}
</select>
<!-- Select children of surf-config folder. Mainly, we are interested in:
company_home/sites/surf-config/components
company_home/sites/surf-config/pages
company_home/sites/surf-config/pages/user
company_home/sites/surf-config/pages/user{userId}
-->
<select id="select_ChildrenOfTheSharedSurfConfigFolder" parameterMap="parameter_minMaxNodeId" resultMap="result_nodeNameUuidMap" >
<![CDATA[
select lev3.child_node_name as name, lev3.child_node_id as node_id, targetn.uuid, targets.protocol, targets.identifier
from
alf_child_assoc lev1
join alf_child_assoc lev2 on (lev2.parent_node_id = lev1.child_node_id)
join alf_child_assoc lev3 on (lev3.parent_node_id = lev2.child_node_id)
join alf_node targetn on (targetn.id = lev3.child_node_id)
join alf_store targets on (targets.id = targetn.store_id)
where
lev1.qname_localname = 'sites'
and lev2.qname_localname = 'surf-config'
and lev3.child_node_id >= #{minNodeId}
and lev3.child_node_id < #{maxNodeId}
UNION
select lev4.child_node_name as name, lev4.child_node_id as node_id, targetn.uuid, targets.protocol, targets.identifier
from
alf_child_assoc lev1
join alf_child_assoc lev2 on (lev2.parent_node_id = lev1.child_node_id)
join alf_child_assoc lev3 on (lev3.parent_node_id = lev2.child_node_id)
join alf_child_assoc lev4 on (lev4.parent_node_id = lev3.child_node_id)
join alf_node targetn on (targetn.id = lev4.child_node_id)
join alf_store targets on (targets.id = targetn.store_id)
where
lev1.qname_localname = 'sites'
and lev2.qname_localname = 'surf-config'
and lev3.qname_localname = 'pages'
and lev4.child_node_id >= #{minNodeId}
and lev4.child_node_id < #{maxNodeId}
UNION
select lev5.child_node_name as name, lev5.child_node_id as node_id, targetn.uuid, targets.protocol, targets.identifier
from
alf_child_assoc lev1
join alf_child_assoc lev2 on (lev2.parent_node_id = lev1.child_node_id)
join alf_child_assoc lev3 on (lev3.parent_node_id = lev2.child_node_id)
join alf_child_assoc lev4 on (lev4.parent_node_id = lev3.child_node_id)
join alf_child_assoc lev5 on (lev5.parent_node_id = lev4.child_node_id)
join alf_node targetn on (targetn.id = lev5.child_node_id)
join alf_store targets on (targets.id = targetn.store_id)
where
lev1.qname_localname = 'sites'
and lev2.qname_localname = 'surf-config'
and lev3.qname_localname = 'pages'
and lev4.qname_localname = 'user'
and lev5.child_node_id >= #{minNodeId}
and lev5.child_node_id < #{maxNodeId}
]]>
</select>
<!-- -->
<!-- Updates -->
<!-- -->

View File

@@ -517,3 +517,10 @@ patch.addGroupAuthority.result=\n\Successfully added group authority: {0}
patch.siteAdministrators.description=Adds the 'GROUP_SITE_ADMINISTRATORS' group
patch.alfrescoSearchAdministrators.description=Adds the 'GROUP_ALFRESCO_SEARCH_ADMINISTRATORS' group
patch.surfConfigFolderPatch.description=Adds cm:indexControl aspect to surf-config children
patch.surfConfigFolderPatch.result=Successfully applied ''cm:indexControl'' aspect to {0} sites'' surf-config folders and their children as well as to the shared surf-config folder(s) and its/their children.
patch.genericBootstrap.result.deferred=The patch has been deferred
patch.asynchrounse.checking=Checking for the asynchronous patch ...

View File

@@ -3746,5 +3746,25 @@
</bean>
</property>
</bean>
<!-- Fix data related to cm:copiedfrom aspect -->
<bean id="patch.surfConfigFolder" class="org.alfresco.repo.admin.patch.impl.SurfConfigFolderPatch" parent="basePatch" >
<property name="id"><value>patch.surfConfigFolder</value></property>
<property name="description"><value>patch.surfConfigFolderPatch.description</value></property>
<property name="fixesFromSchema"><value>0</value></property>
<property name="fixesToSchema"><value>7004</value></property>
<property name="targetSchema"><value>7005</value></property>
<property name="requiresTransaction"><value>false</value></property>
<property name="applyToTenants"><value>false</value></property>
<!-- We have to ignore it, as this patch will be run by the scheduler in the background. The AsynchronousPatch will take care of registration -->
<property name="ignored"><value>${system.patch.surfConfigFolder.deferred}</value></property>
<property name="patchDAO" ref="patchDAO" />
<property name="nodeDAO" ref="nodeDAO" />
<property name="qnameDAO" ref="qnameDAO" />
<property name="behaviourFilter" ref="policyBehaviourFilter" />
<property name="ruleService" ref="ruleService" />
<property name="jobLockService" ref="jobLockService" />
<!-- Do we deferr running the shared folder patch? -->
<property name="deferred"><value>${system.patch.surfConfigFolder.deferred}</value></property>
</bean>
</beans>

View File

@@ -1100,3 +1100,10 @@ system.lockTryTimeout=100
system.lockTryTimeout.DictionaryDAOImpl=2000
system.lockTryTimeout.MessageServiceImpl=${system.lockTryTimeout}
system.lockTryTimeout.PolicyComponentImpl=${system.lockTryTimeout}
#
# Do we defer running the shared folder patch?
#
system.patch.surfConfigFolder.deferred=true
system.patchSurfConfigFolderTrigger.startDelayMinutes=2

View File

@@ -342,4 +342,31 @@
</property>
</bean>
<!-- surf-config folder patch -->
<bean id="patchSurfConfigFolderJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass">
<value>org.alfresco.repo.admin.patch.AsynchronousPatch$AsynchronousPatchJob</value>
</property>
<property name="jobDataAsMap">
<map>
<entry key="asynchronousPatch">
<ref bean="patch.surfConfigFolder" />
</entry>
</map>
</property>
</bean>
<bean id="patchSurfConfigFolderTrigger" class="org.alfresco.util.TriggerBean">
<property name="jobDetail">
<ref bean="patchSurfConfigFolderJobDetail" />
</property>
<property name="scheduler">
<ref bean="schedulerFactory" />
</property>
<property name="startDelayMinutes">
<value>${system.patchSurfConfigFolderTrigger.startDelayMinutes}</value>
</property>
<property name="repeatCount">
<value>0</value>
</property>
</bean>
</beans>

View File

@@ -23,4 +23,4 @@ version.build=r@scm-revision@-b@build-number@
# Schema number
version.schema=7004
version.schema=7005

View File

@@ -1,5 +1,5 @@
/*
* Copyright (C) 2005-2010 Alfresco Software Limited.
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -94,13 +94,15 @@ public abstract class AbstractPatch implements Patch, ApplicationEventPublisher
/** start time * */
long startTime;
private boolean deferred = false;
/** whether the patch must be deferred (not to be executed in bootstrap) or not */
private boolean deferred = false;
// Does the patch require an enclosing transaction?
private boolean requiresTransaction = true;
/** the service to register ourselves with */
private PatchService patchService;
protected PatchService patchService;
/** used to ensure a unique transaction per execution */
protected TransactionService transactionService;
/** Use this helper to ensure that patches can execute even on a read-only system */
@@ -560,11 +562,7 @@ public abstract class AbstractPatch implements Patch, ApplicationEventPublisher
}
/**
* Apply the patch, regardless of the deferred flag. So if the patch has not run due to it being deferred earlier
* then this will run it now. Also ignores the "applied" lock. So the patch can be executed many times.
*
* @return the patch report
* @throws PatchException if the patch failed to be applied
* {@inheritDoc}
*/
public String applyAsync() throws PatchException
{
@@ -744,13 +742,13 @@ public abstract class AbstractPatch implements Patch, ApplicationEventPublisher
this.deferred = deferred;
}
/*
*
/**
* {@inheritDoc}
*/
public boolean isDeferred()
{
return deferred;
}
return this.deferred;
}
private int getReportingInterval(long soFar, long toGo)
{

View File

@@ -0,0 +1,193 @@
/*
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.admin.patch;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.lock.JobLockService;
import org.alfresco.repo.lock.JobLockService.JobLockRefreshCallback;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.quartz.Job;
import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.springframework.extensions.surf.util.I18NUtil;
/**
* Base implementation of the asynchronous patch.
*
* @author Jamal Kaabi-Mofrad
*/
public abstract class AsynchronousPatch extends AbstractPatch
{
private static final Log logger = LogFactory.getLog(AsynchronousPatch.class);
private static final String JOB_NAME = "asynchronousPatch";
private static final String MSG_CHECKING = "patch.asynchrounse.checking";
private static final String MSG_NO_PATCHES_REQUIRED = "patch.executer.no_patches_required";
private static final String MSG_SYSTEM_READ_ONLY = "patch.executer.system_readonly";
private static final String MSG_NOT_EXECUTED = "patch.executer.not_executed";
private static final String MSG_EXECUTED = "patch.executer.executed";
private static final String MSG_FAILED = "patch.executer.failed";
private static final long LOCK_TIME_TO_LIVE = 10000;
private static final long LOCK_REFRESH_TIME = 5000;
private JobLockService jobLockService;
/**
* @param jobLockService the jobLockService to set
*/
public void setJobLockService(JobLockService jobLockService)
{
this.jobLockService = jobLockService;
}
@Override
protected void checkProperties()
{
super.checkProperties();
checkPropertyNotNull(jobLockService, "jobLockService");
}
public void executeAsynchronously()
{
// Lock the push
QName lockQName = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, this.getId());
String lockToken = jobLockService.getLock(lockQName, LOCK_TIME_TO_LIVE, 0, 1);
AsyncPatchCallback callback = new AsyncPatchCallback();
jobLockService.refreshLock(lockToken, lockQName, LOCK_REFRESH_TIME, callback);
try
{
if (logger.isDebugEnabled())
{
logger.debug(this.getId() + ": job lock held");
}
applyOutstandingPatch(this);
}
finally
{
if (logger.isTraceEnabled())
{
logger.trace(this.getId() + ": job finished");
}
// Release the locks on the job and stop refreshing
callback.isActive = false;
jobLockService.releaseLock(lockToken, lockQName);
}
}
private void applyOutstandingPatch(Patch patch)
{
// Apply the patch even if we are in read only mode. The system may not
// work safely otherwise.
if (!patchService.validatePatch(patch))
{
logger.warn(I18NUtil.getMessage(MSG_SYSTEM_READ_ONLY));
return;
}
logger.info(I18NUtil.getMessage(MSG_CHECKING));
AppliedPatch appliedPatch = patchService.getPatch(this.getId());
// Don't bother if the patch has already been applied successfully
if (appliedPatch != null && appliedPatch.getSucceeded())
{
logger.info(I18NUtil.getMessage(MSG_NO_PATCHES_REQUIRED));
return;
}
patchService.applyOutstandingPatch(this);
// get the executed patch
appliedPatch = patchService.getPatch(patch.getId());
if (!appliedPatch.getWasExecuted())
{
// the patch was not executed
logger.debug(I18NUtil.getMessage(MSG_NOT_EXECUTED, appliedPatch.getId(), appliedPatch.getReport()));
}
else if (appliedPatch.getSucceeded())
{
logger.info(I18NUtil.getMessage(MSG_EXECUTED, appliedPatch.getId(), appliedPatch.getReport()));
}
else
{
logger.error(I18NUtil.getMessage(MSG_FAILED, appliedPatch.getId(), appliedPatch.getReport()));
throw new AlfrescoRuntimeException("Not all patches could be applied.");
}
}
/**
* Job to initiate the {@link AsynchronousPatch} if it has been deferred
*
* @author Jamal Kaabi-Mofrad
*/
public static class AsynchronousPatchJob implements Job
{
public AsynchronousPatchJob()
{
}
@Override
public void execute(JobExecutionContext context) throws JobExecutionException
{
JobDataMap jobData = context.getJobDetail().getJobDataMap();
// extract the object to use
Object asyncPatchObj = jobData.get(JOB_NAME);
if (asyncPatchObj == null || !(asyncPatchObj instanceof AsynchronousPatch))
{
throw new AlfrescoRuntimeException(JOB_NAME + " data must contain valid 'AsynchronousPatch' reference");
}
// Job Lock
AsynchronousPatch patch = (AsynchronousPatch) asyncPatchObj;
patch.executeAsynchronously();
}
}
/**
* @author Jamal Kaabi-Mofrad
*/
private class AsyncPatchCallback implements JobLockRefreshCallback
{
public boolean isActive = true;
@Override
public boolean isActive()
{
return isActive;
}
@Override
public void lockReleased()
{
if (logger.isTraceEnabled())
{
logger.trace("lock released");
}
}
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright (C) 2005-2010 Alfresco Software Limited.
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -99,9 +99,26 @@ public interface Patch
*/
public String apply() throws PatchException;
/**
* Apply the patch, regardless of the deferred flag. So if the patch has not
* run due to it being deferred earlier then this will run it now. Also
* ignores the "applied" lock. So the patch can be executed many times.
*
* @return the patch report
* @throws PatchException if the patch failed to be applied
*/
public String applyAsync() throws PatchException;
/**
* Is this patch just ignored - never considered for application
* @return
*/
public boolean isIgnored();
/**
* Indicates whether the patch must be deferred (not to be executed in bootstrap) or not
*
* @return true if the patch must be deferred, false otherwise
*/
public boolean isDeferred();
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright (C) 2005-2010 Alfresco Software Limited.
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -55,6 +55,15 @@ public interface PatchService
* patches could be applied.
*/
public boolean applyOutstandingPatches();
/**
* Apply the specified patch that is relevant to the repo.
*
* @param patch the patch object
* @return true if the specified patch and its dependencies were applied, or
* false if the process was terminated before all patches could be applied.
*/
public boolean applyOutstandingPatch(Patch patch);
/**
* Retrieves all applied patches between two specific times.
@@ -74,4 +83,15 @@ public interface PatchService
* @return Returns the patch instance or <tt>null</tt> if one has not been persisted
*/
public AppliedPatch getPatch(String id);
/**
* Does some up-front validation on the specified patch, specifically to see
* if it applies to the current server version and not some future version.
* This is to prevent tampering with versioning information attached to a
* license.
*
* @param patch the patch object
* @return true if validation is successful. Outputs errors and returns false otherwise.
*/
public boolean validatePatch(Patch patch);
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright (C) 2005-2010 Alfresco Software Limited.
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -38,6 +38,7 @@ import org.alfresco.service.descriptor.DescriptorService;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ParameterCheck;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.extensions.surf.util.I18NUtil;
@@ -102,8 +103,24 @@ public class PatchServiceImpl implements PatchService
private final QName vetoName = QName.createQName(NamespaceService.APP_MODEL_1_0_URI, "PatchServiceImpl");
/**
* {@inheritDoc}
*/
public boolean validatePatches()
{
return validatePatchImpl(patches);
}
/**
* {@inheritDoc}
*/
public boolean validatePatch(Patch patch)
{
ParameterCheck.mandatory("patch", patch);
return validatePatchImpl(Collections.singletonList(patch));
}
private boolean validatePatchImpl(List<Patch> patches)
{
boolean success = true;
int serverSchemaVersion = descriptorService.getServerDescriptor().getSchema();
@@ -115,7 +132,6 @@ public class PatchServiceImpl implements PatchService
.getFixesToSchema(), patch.getTargetSchema()));
success = false;
}
}
if (!success)
{
@@ -140,7 +156,7 @@ public class PatchServiceImpl implements PatchService
Collections.sort(sortedPatches, comparator);
// construct a list of executed patches by ID (also check the date)
Map<String, AppliedPatch> appliedPatchesById = new HashMap<String, AppliedPatch>(23);
Map<String, AppliedPatch> appliedPatchesById = new HashMap<String, AppliedPatch>(250);
List<AppliedPatch> appliedPatches = appliedPatchDAO.getAppliedPatches();
for (final AppliedPatch appliedPatch : appliedPatches)
{
@@ -191,6 +207,55 @@ public class PatchServiceImpl implements PatchService
return success;
}
/**
* {@inheritDoc}
*/
public boolean applyOutstandingPatch(Patch patch)
{
boolean success = true;
try
{
// Disable rules whilst processing the patches
this.ruleService.disableRules();
try
{
Map<String, AppliedPatch> appliedPatchesById = null;
if (patch.getDependsOn().isEmpty())
{
AppliedPatch appliedPatch = appliedPatchDAO.getAppliedPatch(patch.getId());
appliedPatchesById = new HashMap<String, AppliedPatch>(1);
if (appliedPatch != null)
{
appliedPatchesById.put(appliedPatch.getId(), appliedPatch);
}
}
else
{
appliedPatchesById = new HashMap<String, AppliedPatch>(250);
List<AppliedPatch> appliedPatches = appliedPatchDAO.getAppliedPatches();
for (final AppliedPatch appliedPatch : appliedPatches)
{
appliedPatchesById.put(appliedPatch.getId(), appliedPatch);
}
}
// apply the patch
success = applyPatchAndDependencies(patch, appliedPatchesById);
}
finally
{
this.ruleService.enableRules();
}
}
catch (Throwable exception)
{
exception.printStackTrace();
success = false;
}
// done
return success;
}
/**
* Reentrant method that ensures that a patch and all its dependencies get applied.
* The process terminates on the first failure.
@@ -488,7 +553,7 @@ public class PatchServiceImpl implements PatchService
patch.getId(),
I18NUtil.getMessage(patch.getDescription()));
logger.info(msg);
report = patch.apply();
report = (patch.isDeferred()) ? patch.applyAsync() : patch.apply();
state = STATE.APPLIED;
}
catch (PatchException e)

View File

@@ -1,3 +1,21 @@
/*
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.admin.patch.impl;

View File

@@ -0,0 +1,461 @@
/*
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.admin.patch.impl;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.admin.patch.AsynchronousPatch;
import org.alfresco.repo.admin.patch.PatchExecuter;
import org.alfresco.repo.batch.BatchProcessWorkProvider;
import org.alfresco.repo.batch.BatchProcessor;
import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorker;
import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorkerAdaptor;
import org.alfresco.repo.domain.node.NodeDAO;
import org.alfresco.repo.domain.patch.PatchDAO;
import org.alfresco.repo.domain.qname.QNameDAO;
import org.alfresco.repo.policy.BehaviourFilter;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.site.SiteModel;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.rule.RuleService;
import org.alfresco.service.namespace.RegexQNamePattern;
import org.alfresco.util.Pair;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.extensions.surf.util.I18NUtil;
/**
* Patch to add <i>cm:indexControl</i> aspect to sites' surf-config folders and
* their children as well as to the shared surf-config folder(s) and its/their children.
*
* @author Jamal Kaabi-Mofrad
*/
public class SurfConfigFolderPatch extends AsynchronousPatch
{
private static final Log logger = LogFactory.getLog(SurfConfigFolderPatch.class);
private static final Log progress_logger = LogFactory.getLog(PatchExecuter.class);
private static final String MSG_SUCCESS = "patch.surfConfigFolderPatch.result";
// name of the surf config folder
private static final String SURF_CONFIG = "surf-config";
private static final String COMPONENTS = "components"; // cm:surf-config/cm:components
private static final String PAGES = "pages"; // cm:surf-config/cm:pages
private static final String SITE = "site"; // cm:surf-config/cm:pages/cm:site
private static final int SITE_BATCH_THREADS = 2;
private static final int SHARED_SURF_CONFIG_BATCH_THREADS = 2;
private static final int BATCH_SIZE = 1000;
private static final int SHARED_SURF_CONFIG_BATCH_MAX_QUERY_RANGE = 1000;
private static final int SITE_BATCH_MAX_QUERY_RANGE = 1000;
private PatchDAO patchDAO;
private NodeDAO nodeDAO;
private QNameDAO qnameDAO;
private BehaviourFilter behaviourFilter;
private RuleService ruleService;
/**
* @param patchDAO the patchDAO to set
*/
public void setPatchDAO(PatchDAO patchDAO)
{
this.patchDAO = patchDAO;
}
/**
* @param nodeDAO the nodeDAO to set
*/
public void setNodeDAO(NodeDAO nodeDAO)
{
this.nodeDAO = nodeDAO;
}
/**
* @param qnameDAO the qnameDAO to set
*/
public void setQnameDAO(QNameDAO qnameDAO)
{
this.qnameDAO = qnameDAO;
}
/**
* @param behaviourFilter the behaviourFilter to set
*/
public void setBehaviourFilter(BehaviourFilter behaviourFilter)
{
this.behaviourFilter = behaviourFilter;
}
/**
* @param ruleService the ruleService to set
*/
public void setRuleService(RuleService ruleService)
{
this.ruleService = ruleService;
}
@Override
protected void checkProperties()
{
super.checkProperties();
checkPropertyNotNull(patchDAO, "patchDAO");
checkPropertyNotNull(nodeDAO, "nodeDAO");
checkPropertyNotNull(qnameDAO, "qnameDAO");
checkPropertyNotNull(ruleService, "ruleService");
checkPropertyNotNull(behaviourFilter, "behaviourFilter");
}
/*
* (non-Javadoc)
* @see org.alfresco.repo.admin.patch.AbstractPatch#applyInternal()
*/
@Override
protected String applyInternal() throws Exception
{
long start = System.currentTimeMillis();
// get user names that will be used within RunAs
final String systemUser = AuthenticationUtil.getSystemUserName();
// Instance to provide raw data to process
BatchProcessWorkProvider<Long> siteWorkProvider = new SiteWorkProvider();
// Instance to handle each item of work
BatchProcessWorker<Long> siteWorker = new BatchProcessWorkerAdaptor<Long>()
{
@Override
public void beforeProcess() throws Throwable
{
// Run as the systemuser
AuthenticationUtil.setRunAsUser(systemUser);
}
@Override
public void process(Long entry) throws Throwable
{
// Disable auditable aspect
behaviourFilter.disableBehaviour();
// Disable rules
ruleService.disableRules();
try
{
SurfConfigFolderPatch.this.process(entry);
}
finally
{
ruleService.enableRules();
behaviourFilter.enableBehaviour();
}
}
@Override
public void afterProcess() throws Throwable
{
AuthenticationUtil.clearCurrentSecurityContext();
}
};
BatchProcessor<Long> siteBatchProcessor = new BatchProcessor<Long>("SurfConfigFolderPatch",
transactionService.getRetryingTransactionHelper(), siteWorkProvider, SITE_BATCH_THREADS, BATCH_SIZE, null,
progress_logger, 1000);
int updatedSiteSurfConfig = siteBatchProcessor.process(siteWorker, true);
// shared surf-config folder
// Instance to provide raw data to process
BatchProcessWorkProvider<NodeRef> surfConfigWorkProvider = new SharedSurfConfigWorkProvider();
// Instance to handle each item of work
BatchProcessWorker<NodeRef> surfConfigWorker = new BatchProcessWorkerAdaptor<NodeRef>()
{
@Override
public void beforeProcess() throws Throwable
{
// Run as the systemuser
AuthenticationUtil.setRunAsUser(systemUser);
}
@Override
public void process(NodeRef entry) throws Throwable
{
// Disable auditable aspect
behaviourFilter.disableBehaviour();
// Disable rules
ruleService.disableRules();
try
{
if (logger.isDebugEnabled())
{
logger.debug("\tP: Processing [company_home/sites/surf-config/pages/user] children");
}
// add aspect to
// app:company_home/st:sites/cm:surf-config/cm:pages/cm:user/{userName}
addIndexControlAspectIfNotExist(entry);
}
finally
{
ruleService.enableRules();
behaviourFilter.enableBehaviour();
}
}
@Override
public void afterProcess() throws Throwable
{
AuthenticationUtil.clearCurrentSecurityContext();
}
};
BatchProcessor<NodeRef> surfConfigBatchProcessor = new BatchProcessor<NodeRef>("SurfConfigFolderPatch",
transactionService.getRetryingTransactionHelper(), surfConfigWorkProvider, SHARED_SURF_CONFIG_BATCH_THREADS,
BATCH_SIZE, null, progress_logger, 1000);
surfConfigBatchProcessor.process(surfConfigWorker, true);
int numOfSites = updatedSiteSurfConfig / 12;
String msg = I18NUtil.getMessage(MSG_SUCCESS, numOfSites);
long end = System.currentTimeMillis();
logger.info(msg + " in [" + (end - start) + " ms]");
return msg;
}
private void process(long siteId)
{
String siteName = (String) nodeDAO.getNodeProperty(siteId, ContentModel.PROP_NAME);
if (logger.isDebugEnabled())
{
logger.debug("\tP: Processing surf-config folder for the site: [" + siteName + ']');
}
// {siteName}/cm:surf-config/
Pair<Long, ChildAssociationRef> surfConfigPair = nodeDAO.getChildAssoc(siteId, ContentModel.ASSOC_CONTAINS, SURF_CONFIG);
if (surfConfigPair == null)
{
logger.info("WARNING: unable to find surf-config folder for site: [" + siteName + ']');
return;
}
NodeRef surfConfigNodeRef = surfConfigPair.getSecond().getChildRef();
// apply the aspect to suef-config folder
addIndexControlAspectIfNotExist(surfConfigNodeRef);
// cm:surf-config/cm:components
NodeRef componentsNodeRef = nodeService.getChildByName(surfConfigNodeRef, ContentModel.ASSOC_CONTAINS, COMPONENTS);
if (componentsNodeRef != null)
{
// {siteName}/cm:surf-config/cm:components nodeRef
addIndexControlAspectIfNotExist(componentsNodeRef);
List<ChildAssociationRef> listOfComponents = nodeService.getChildAssocs(componentsNodeRef,
ContentModel.ASSOC_CONTAINS, RegexQNamePattern.MATCH_ALL);
// apply the aspect to all of the children (6 in total)
for (ChildAssociationRef comp : listOfComponents)
{
addIndexControlAspectIfNotExist(comp.getChildRef());
}
}
else
{
logger.info("WARNING: unable to find surf-config/components folder for site: [" + siteName + ']');
}
// cm:surf-config/cm:pages folder
NodeRef pagesNodeRef = nodeService.getChildByName(surfConfigNodeRef, ContentModel.ASSOC_CONTAINS, PAGES);
if (pagesNodeRef == null)
{
logger.info("WARNING: unable to find surf-config/pages folder for site: [" + siteName + ']');
return;
}
// add aspect to cm:pages
addIndexControlAspectIfNotExist(pagesNodeRef);
// cm:surf-config/cm:pages/cm:site folder
NodeRef siteNodeRef = nodeService.getChildByName(pagesNodeRef, ContentModel.ASSOC_CONTAINS, SITE);
if (siteNodeRef == null)
{
logger.info("WARNING: unable to find surf-config/pages/site folder for site: [" + siteName + ']');
return;
}
// add aspect to cm:pages/cm:site folder
addIndexControlAspectIfNotExist(siteNodeRef);
// cm:surf-config/cm:pages/cm:site/{siteName}
NodeRef siteChildNodeRef = nodeService.getChildByName(siteNodeRef, ContentModel.ASSOC_CONTAINS, siteName);
if (siteChildNodeRef == null)
{
logger.info("WARNING: unable to find surf-config/pages/site/" + siteName + " folder for site: [" + siteName + ']');
return;
}
// add aspect to cm:surf-config/cm:pages/cm:site/{siteName}
addIndexControlAspectIfNotExist(siteChildNodeRef);
List<ChildAssociationRef> listOfComponents = nodeService.getChildAssocs(siteChildNodeRef, ContentModel.ASSOC_CONTAINS,
RegexQNamePattern.MATCH_ALL);
// apply the aspect to all of the children
for (ChildAssociationRef comp : listOfComponents)
{
addIndexControlAspectIfNotExist(comp.getChildRef());
}
}
private void addIndexControlAspectIfNotExist(NodeRef nodeRef)
{
// We need to check the property rather than the aspect, as the node
// might have the aspect but not the correct property.
Serializable indexProp = nodeService.getProperty(nodeRef, ContentModel.PROP_IS_INDEXED);
if (indexProp == null || ((Boolean) indexProp))
{
nodeService.addAspect(nodeRef, ContentModel.ASPECT_INDEX_CONTROL,
Collections.singletonMap(ContentModel.PROP_IS_INDEXED, (Serializable) false));
if (logger.isDebugEnabled())
{
logger.debug("\tP: Adding cm:indexControl aspect to node: [" + nodeRef + ']');
}
}
}
/**
* Work provider which performs incremental queries to find site nodes.
*
* @author Jamal Kaabi-Mofrad
*/
private class SiteWorkProvider implements BatchProcessWorkProvider<Long>
{
private long maxId = Long.MAX_VALUE;
private long workCount = Long.MAX_VALUE;
private long currentId = 0L;
private final long siteTypeQNameId;
private SiteWorkProvider()
{
this.siteTypeQNameId = qnameDAO.getQName(SiteModel.TYPE_SITE).getFirst();
}
@Override
public synchronized int getTotalEstimatedWorkSize()
{
if (maxId == Long.MAX_VALUE)
{
maxId = patchDAO.getMaxAdmNodeID();
if (logger.isDebugEnabled())
{
logger.debug("\tQ: Max node id: " + maxId);
}
}
if (workCount == Long.MAX_VALUE)
{
// get the sites count
workCount = patchDAO.getCountNodesWithTypId(SiteModel.TYPE_SITE);
// Each site has 12 children (we care only about surf-config
// itself and its children)
workCount *= 12;
if (logger.isDebugEnabled())
{
logger.debug("\tQ: Work count: " + workCount);
}
}
return (int) workCount;
}
@Override
public synchronized Collection<Long> getNextWork()
{
// Record the site node IDs
final List<Long> siteNodeIDs = new ArrayList<Long>(SITE_BATCH_MAX_QUERY_RANGE);
// Keep querying until we have enough results to give back
int minResults = SITE_BATCH_MAX_QUERY_RANGE / 2;
while (currentId <= maxId && siteNodeIDs.size() < minResults)
{
List<Long> nodeIds = patchDAO.getNodesByTypeQNameId(siteTypeQNameId, currentId, currentId
+ SITE_BATCH_MAX_QUERY_RANGE);
siteNodeIDs.addAll(nodeIds);
// Increment the minimum ID
currentId += SITE_BATCH_MAX_QUERY_RANGE;
}
// Done
return siteNodeIDs;
}
}
/**
* Work provider which performs incremental queries to find shared
* Surf-Config folders and their children.
*
* @author Jamal Kaabi-Mofrad
*/
private class SharedSurfConfigWorkProvider implements BatchProcessWorkProvider<NodeRef>
{
private long maxId = Long.MAX_VALUE;
private long currentId = 0L;
private SharedSurfConfigWorkProvider()
{
}
@Override
public synchronized int getTotalEstimatedWorkSize()
{
if (maxId == Long.MAX_VALUE)
{
maxId = patchDAO.getMaxAdmNodeID();
if (logger.isDebugEnabled())
{
logger.debug("\tQ: Max node id: " + maxId);
}
}
return 0;
}
@Override
public synchronized Collection<NodeRef> getNextWork()
{
// Record the user folder node IDs
final List<NodeRef> folderNodes = new ArrayList<NodeRef>(SHARED_SURF_CONFIG_BATCH_MAX_QUERY_RANGE);
// Keep querying until we have enough results to give back
int minResults = SHARED_SURF_CONFIG_BATCH_MAX_QUERY_RANGE / 2;
while (currentId <= maxId && folderNodes.size() < minResults)
{
List<NodeRef> nodeIds = patchDAO.getChildrenOfTheSharedSurfConfigFolder(currentId, currentId
+ SHARED_SURF_CONFIG_BATCH_MAX_QUERY_RANGE);
folderNodes.addAll(nodeIds);
// Increment the minimum ID
currentId += SHARED_SURF_CONFIG_BATCH_MAX_QUERY_RANGE;
}
// Preload the nodes for quicker access
nodeDAO.cacheNodes(folderNodes);
// Done
return folderNodes;
}
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright (C) 2005-2010 Alfresco Software Limited.
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -280,5 +280,27 @@ public interface PatchDAO
* @return
*/
public List<Long> getNodesByContentPropertyMimetypeId(Long mimetypeId, Long minNodeId, Long maxNodeId);
/**
* Gets the total number of nodes which match the given Type QName.
*
* @param typeQName the qname to search for
* @return count of nodes that match the typeQName
*/
public long getCountNodesWithTypId(QName typeQName);
/**
* Finds folders of the shared surf-config (for all tenants):
* <ul>
* <li> company_home/sites/surf-config/components </li>
* <li>company_home/sites/surf-config/pages </li>
* <li>company_home/sites/surf-config/pages/user </li>
* <li>company_home/sites/surf-config/pages/user{userId} </li>
* </ul>
* @param minNodeId - min node id in the result set - inclusive
* @param maxNodeId - max node id in the result set - exclusive
* @return list of children nodeRefs
*/
public List<NodeRef> getChildrenOfTheSharedSurfConfigFolder(Long minNodeId, Long maxNodeId);
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright (C) 2005-2010 Alfresco Software Limited.
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -108,6 +108,9 @@ public class PatchDAOImpl extends AbstractPatchDAOImpl
private static final String SELECT_NODES_BY_TYPE_URI = "alfresco.patch.select_NodesByTypeUriId";
private static final String SELECT_NODES_BY_ASPECT_QNAME = "alfresco.patch.select_NodesByAspectQName";
private static final String SELECT_NODES_BY_CONTENT_MIMETYPE = "alfresco.patch.select_NodesByContentMimetype";
private static final String SELECT_COUNT_NODES_WITH_TYPE_ID = "alfresco.patch.select_CountNodesWithTypeId";
private static final String SELECT_CHILDREN_OF_THE_SHARED_SURFCONFIG_FOLDER = "alfresco.patch.select_ChildrenOfTheSharedSurfConfigFolder";
private LocaleDAO localeDAO;
@@ -703,4 +706,52 @@ public class PatchDAOImpl extends AbstractPatchDAOImpl
params.put("maxNodeId", maxNodeId);
return (List<Long>) template.selectList(SELECT_NODES_BY_CONTENT_MIMETYPE, params);
}
@Override
public long getCountNodesWithTypId(QName typeQName)
{
// Resolve the QName
Pair<Long, QName> qnameId = qnameDAO.getQName(typeQName);
if (qnameId == null)
{
return 0L;
}
IdsEntity params = new IdsEntity();
params.setIdOne(qnameId.getFirst());
Long count = (Long) template.selectOne(SELECT_COUNT_NODES_WITH_TYPE_ID, params);
if (count == null)
{
return 0L;
}
else
{
return count;
}
}
@Override
public List<NodeRef> getChildrenOfTheSharedSurfConfigFolder(Long minNodeId, Long maxNodeId)
{
Map<String, Object> params = new HashMap<String, Object>(2);
params.put("minNodeId", minNodeId);
params.put("maxNodeId", maxNodeId);
final List<NodeRef> results = new ArrayList<NodeRef>(1000);
ResultHandler resultHandler = new ResultHandler()
{
@SuppressWarnings("unchecked")
public void handleResult(ResultContext context)
{
Map<String, Object> row = (Map<String, Object>) context.getResultObject();
String protocol = (String) row.get("protocol");
String identifier = (String) row.get("identifier");
String uuid = (String) row.get("uuid");
NodeRef nodeRef = new NodeRef(new StoreRef(protocol, identifier), uuid);
results.add(nodeRef);
}
};
template.select(SELECT_CHILDREN_OF_THE_SHARED_SURFCONFIG_FOLDER, params, resultHandler);
return results;
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright (C) 2005-2013 Alfresco Software Limited.
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -18,13 +18,18 @@
*/
package org.alfresco.service.cmr.model;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.alfresco.repo.policy.BehaviourFilter;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.namespace.QName;
/**
@@ -70,34 +75,61 @@ public class FileFolderUtil
* @param parentBehavioursToDisable
* @return
*/
public static FileInfo makeFolders(FileFolderService service,
NodeRef parentNodeRef, List<String> pathElements,
QName folderTypeQName,
BehaviourFilter behaviourFilter,
Set<QName> parentBehavioursToDisable)
public static FileInfo makeFolders(FileFolderService service, NodeRef parentNodeRef, List<String> pathElements,
QName folderTypeQName, BehaviourFilter behaviourFilter, Set<QName> parentBehavioursToDisable)
{
if (pathElements.size() == 0)
validate(pathElements, service, folderTypeQName);
List<PathElementDetails> list = new ArrayList<>(pathElements.size());
for (String pathElement : pathElements)
{
throw new IllegalArgumentException("Path element list is empty");
list.add(new PathElementDetails(pathElement, null));
}
// make sure that the folder is correct
boolean isFolder = service.getType(folderTypeQName) == FileFolderServiceType.FOLDER;
if (!isFolder)
{
throw new IllegalArgumentException(
"Type is invalid to make folders with: " + folderTypeQName);
}
FileInfo fileInfo = makeFolders(service, null, parentNodeRef, list, folderTypeQName, behaviourFilter,
parentBehavioursToDisable);
// Should we check the type?
return fileInfo;
}
/**
* Checks for the presence of, and creates as necessary, the folder
* structure in the provided paths with the following options:
* <ul>
* <li>Option to disable parent behaviour(s) when creating sub-folder.</li>
* <li>Each folder has the option to have its own set of aspects</li>
*</ul>
*
* @param service the FileFolderService object
* @param nodeService the NodeService object
* @param parentNodeRef the node under which the path will be created
* @param pathElementDetails the list of folder hierarchy where each folder
* can have its own set of aspects - may not be empty
* @param folderTypeQName the types of nodes to create. This must be a valid
* subtype of {@link org.alfresco.model.ContentModel#TYPE_FOLDER
* they folder type}
* @param behaviourFilter the BehaviourFilter object
* @param parentBehavioursToDisable the set of behaviours that must be
* disabled
* @return Returns the {@code FileInfo} of the last folder in the path.
*/
public static FileInfo makeFolders(FileFolderService service, NodeService nodeService, NodeRef parentNodeRef,
List<PathElementDetails> pathElementDetails, QName folderTypeQName, BehaviourFilter behaviourFilter,
Set<QName> parentBehavioursToDisable)
{
validate(pathElementDetails, service, folderTypeQName);
NodeRef currentParentRef = parentNodeRef;
// just loop and create if necessary
for (String pathElement : pathElements)
for (PathElementDetails pathElement : pathElementDetails)
{
// does it exist?
// Navigation should not check permissions
NodeRef nodeRef = AuthenticationUtil.runAs(new SearchAsSystem(
service, currentParentRef, pathElement), AuthenticationUtil
.getSystemUserName());
NodeRef nodeRef = AuthenticationUtil.runAs(
new SearchAsSystem(service, currentParentRef, pathElement.getFolderName()),
AuthenticationUtil.getSystemUserName());
if (nodeRef == null)
{
@@ -108,15 +140,23 @@ public class FileFolderUtil
behaviourFilter.disableBehaviour(currentParentRef, parentBehaviourToDisable);
}
}
try
{
// not present - make it
// If this uses the public service it will check create
// permissions
FileInfo createdFileInfo = service.create(currentParentRef,
pathElement, folderTypeQName);
FileInfo createdFileInfo = service.create(currentParentRef, pathElement.getFolderName(), folderTypeQName);
currentParentRef = createdFileInfo.getNodeRef();
Map<QName, Map<QName, Serializable>> requireddAspects = pathElement.getAspects();
if (requireddAspects != null && nodeService != null)
{
for (QName aspect : requireddAspects.keySet())
{
nodeService.addAspect(currentParentRef, aspect, requireddAspects.get(aspect));
}
}
}
finally
{
@@ -144,6 +184,21 @@ public class FileFolderUtil
return fileInfo;
}
private static <T> void validate(List<T> pathElements, FileFolderService service, QName folderTypeQName)
{
if (pathElements.size() == 0)
{
throw new IllegalArgumentException("Path element list is empty");
}
// make sure that the folder is correct
boolean isFolder = service.getType(folderTypeQName) == FileFolderServiceType.FOLDER;
if (!isFolder)
{
throw new IllegalArgumentException("Type is invalid to make folders with: " + folderTypeQName);
}
}
private static class SearchAsSystem implements RunAsWork<NodeRef>
{
FileFolderService service;
@@ -163,5 +218,37 @@ public class FileFolderUtil
}
}
/**
* A simple POJO to hold information about the folder which will be created.
*
* @author Jamal Kaabi-Mofrad
*/
public static class PathElementDetails
{
private final String folderName;
private final Map<QName, Map<QName, Serializable>> aspects;
public PathElementDetails(String folderName, Map<QName, Map<QName, Serializable>> aspects)
{
this.folderName = folderName;
this.aspects = Collections.unmodifiableMap(aspects);
}
/**
* @return the folderName
*/
public String getFolderName()
{
return this.folderName;
}
/**
* @return the aspects
*/
public Map<QName, Map<QName, Serializable>> getAspects()
{
return this.aspects;
}
}
}